Skip to content

Commit

Permalink
Merge pull request #10 from niivue/lint
Browse files Browse the repository at this point in the history
Lint
  • Loading branch information
neurolabusc authored May 13, 2024
2 parents 411c541 + 39b6de6 commit 9e181ba
Show file tree
Hide file tree
Showing 4 changed files with 394 additions and 389 deletions.
86 changes: 43 additions & 43 deletions brainchop-telemetry.js → brainchop-diagnostics.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
export {isChrome, localSystemDetails }
export { isChrome, localSystemDetails }

async function detectBrowser() {
if (navigator.userAgent.indexOf('OPR/') > -1) {
Expand Down Expand Up @@ -55,10 +55,10 @@ async function detectOperatingSys() {
}

async function checkWebGl2(gl) {
//const gl = document.createElement('canvas').getContext('webgl2')
// const gl = document.createElement('canvas').getContext('webgl2')
if (!gl) {
if (typeof WebGL2RenderingContext !== 'undefined') {
const msg = 'WebGL2 may be disabled. Please try updating video card drivers'
console.log('WebGL2 may be disabled. Please try updating video card drivers')
} else {
console.log('WebGL2 is not supported')
}
Expand All @@ -70,7 +70,7 @@ async function checkWebGl2(gl) {
}

async function detectGPUVendor(gl) {
//const gl = document.createElement('canvas').getContext('webgl')
// const gl = document.createElement('canvas').getContext('webgl')
let debugInfo
if (gl) {
debugInfo = gl.getExtension('WEBGL_debug_renderer_info')
Expand All @@ -87,7 +87,7 @@ async function detectGPUVendor(gl) {
}

async function detectGPUVendor_v0(gl) {
//const gl = document.createElement('canvas').getContext('webgl')
// const gl = document.createElement('canvas').getContext('webgl')
if (gl) {
const debugInfo = gl.getExtension('WEBGL_debug_renderer_info')
return debugInfo ? gl.getParameter(debugInfo.UNMASKED_VENDOR_WEBGL) : null
Expand Down Expand Up @@ -149,15 +149,15 @@ async function isChrome() {
}

async function localSystemDetails(statData, gl = null) {
// -- Timing data to collect
const today = new Date()
if (statData.isModelFullVol) {
statData.Brainchop_Ver = 'FullVolume'
} else {
statData.Brainchop_Ver = 'SubVolumes'
}
// -- Timing data to collect
const today = new Date()
if (statData.isModelFullVol) {
statData.Brainchop_Ver = 'FullVolume'
} else {
statData.Brainchop_Ver = 'SubVolumes'
}

/* let geoData = getBrowserLocationInfo()
/* let geoData = getBrowserLocationInfo()
if(geoData) {
statData["Country"] = geoData["Country"]
statData["State"] = geoData["Region"]
Expand All @@ -167,34 +167,34 @@ async function localSystemDetails(statData, gl = null) {
statData["State"] = ""
statData["City"] = ""
} */
statData.Total_t = (Date.now() - statData.startTime) / 1000.0
delete statData.startTime
statData.Date = parseInt(today.getMonth() + 1) + '/' + today.getDate() + '/' + today.getFullYear()
statData.Browser = await detectBrowser()
statData.Browser_Ver = await detectBrowserVersion()
statData.OS = await detectOperatingSys()
statData.WebGL2 = await checkWebGl2(gl)
statData.GPU_Vendor = await detectGPUVendor(gl)
statData.GPU_Card = await detectGPUCardType(gl)
statData.GPU_Vendor_Full = await detectGPUVendor_v0(gl)
statData.GPU_Card_Full = await detectGPUCardType_v0(gl)
statData.CPU_Cores = await getCPUNumCores()
statData.Which_Brainchop = 'latest'
if (await isChrome()) {
statData.Heap_Size_MB = window.performance.memory.totalJSHeapSize / (1024 * 1024).toFixed(2)
statData.Used_Heap_MB = window.performance.memory.usedJSHeapSize / (1024 * 1024).toFixed(2)
statData.Heap_Limit_MB = window.performance.memory.jsHeapSizeLimit / (1024 * 1024).toFixed(2)
}
if (gl) {
console.log('MAX_TEXTURE_SIZE :', gl.getParameter(gl.MAX_TEXTURE_SIZE))
console.log('MAX_RENDERBUFFER_SIZE :', gl.getParameter(gl.MAX_RENDERBUFFER_SIZE))
// -- check to see if machine has two graphics card: one is the builtin e.g. Intel Iris Pro, the other is NVIDIA GeForce GT 750M.
// -- check browser use which one, if debugInfo is null then installed GPU is not used
const debugInfo = gl.getExtension('WEBGL_debug_renderer_info')
console.log('VENDOR WEBGL:', gl.getParameter(debugInfo.UNMASKED_VENDOR_WEBGL))
statData.Texture_Size = gl.getParameter(gl.MAX_TEXTURE_SIZE) // --returns the maximum dimension the GPU can address
} else {
statData.Texture_Size = null
}
return statData
statData.Total_t = (Date.now() - statData.startTime) / 1000.0
delete statData.startTime
statData.Date = parseInt(today.getMonth() + 1) + '/' + today.getDate() + '/' + today.getFullYear()
statData.Browser = await detectBrowser()
statData.Browser_Ver = await detectBrowserVersion()
statData.OS = await detectOperatingSys()
statData.WebGL2 = await checkWebGl2(gl)
statData.GPU_Vendor = await detectGPUVendor(gl)
statData.GPU_Card = await detectGPUCardType(gl)
statData.GPU_Vendor_Full = await detectGPUVendor_v0(gl)
statData.GPU_Card_Full = await detectGPUCardType_v0(gl)
statData.CPU_Cores = await getCPUNumCores()
statData.Which_Brainchop = 'latest'
if (await isChrome()) {
statData.Heap_Size_MB = window.performance.memory.totalJSHeapSize / (1024 * 1024).toFixed(2)
statData.Used_Heap_MB = window.performance.memory.usedJSHeapSize / (1024 * 1024).toFixed(2)
statData.Heap_Limit_MB = window.performance.memory.jsHeapSizeLimit / (1024 * 1024).toFixed(2)
}
if (gl) {
console.log('MAX_TEXTURE_SIZE :', gl.getParameter(gl.MAX_TEXTURE_SIZE))
console.log('MAX_RENDERBUFFER_SIZE :', gl.getParameter(gl.MAX_RENDERBUFFER_SIZE))
// -- check to see if machine has two graphics card: one is the builtin e.g. Intel Iris Pro, the other is NVIDIA GeForce GT 750M.
// -- check browser use which one, if debugInfo is null then installed GPU is not used
const debugInfo = gl.getExtension('WEBGL_debug_renderer_info')
console.log('VENDOR WEBGL:', gl.getParameter(debugInfo.UNMASKED_VENDOR_WEBGL))
statData.Texture_Size = gl.getParameter(gl.MAX_TEXTURE_SIZE) // --returns the maximum dimension the GPU can address
} else {
statData.Texture_Size = null
}
return statData
}
77 changes: 45 additions & 32 deletions brainchop-mainthread.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import * as tf from '@tensorflow/tfjs'
import { BWLabeler } from './bwlabels.js'
import { inferenceModelsList } from "./brainchop-parameters.js"
export { runInference}
import { inferenceModelsList } from './brainchop-parameters.js'
export { runInference }

async function getModelNumParameters(modelObj) {
let numParameters = 0
Expand Down Expand Up @@ -151,43 +151,56 @@ async function minMaxNormalizeVolumeData(volumeData) {
}

async function inferenceFullVolumeSeqCovLayer(
model,
slices_3d,
input_shape,
isChannelLast,
num_of_slices,
slice_height,
slice_width
_model,
_slices_3d,
_input_shape,
_isChannelLast,
_num_of_slices,
_slice_height,
_slice_width
) {
window.alert('inferenceFullVolumeSeqCovLayer() is not dead code?')
}

async function inferenceFullVolume(
model,
slices_3d,
input_shape,
isChannelLast,
num_of_slices,
slice_height,
slice_width
_model,
_slices_3d,
_input_shape,
_isChannelLast,
_num_of_slices,
_slice_height,
_slice_width
) {
window.alert('inferenceFullVolume() is not dead code?')
}

async function inferenceSubVolumes(model, slices_3d, num_of_slices, slice_height, slice_width, pipeline1_out = null) {
async function inferenceSubVolumes(
_model,
_slices_3d,
_num_of_slices,
_slice_height,
_slice_width,
_pipeline1_out = null
) {
window.alert('inferenceSubVolumes() is not dead code?')
}

async function tensor2LightBuffer(tensor, dtype) {
async function tensor2LightBuffer(_tensor, _dtype) {
window.alert('tensor2LightBuffer() is not dead code?')
// return new Buffer(tensor.shape, dtype, Array.from(tensor.dataSync()) );
}

async function draw3dObjBoundingVolume(unstackOutVolumeTensor) {
async function draw3dObjBoundingVolume(_unstackOutVolumeTensor) {
window.alert('draw3dObjBoundingVolume() is not dead code?')
}

async function argMaxLarge(outVolumeBuffer, num_of_slices, slice_height, slice_width, numOfClasses, dtype = 'float32') {
async function argMaxLarge(
_outVolumeBuffer,
_num_of_slices,
_slice_height,
_slice_width,
_numOfClasses,
_dtype = 'float32'
) {
window.alert('argMaxLarge() is not dead code?')
}

Expand All @@ -202,8 +215,7 @@ async function removeZeroPaddingFrom3dTensor(tensor3d, rowPad = 1, colPad = 1, d
if (tensor3d.rank !== 3) {
throw new Error('Tensor must be 3D')
}
let h, w, d
;[h, w, d] = tensor3d.shape
const [h, w, d] = tensor3d.shape
return tensor3d.slice([rowPad, colPad, depthPad], [h - 2 * rowPad, w - 2 * colPad, d - 2 * depthPad])
}

Expand Down Expand Up @@ -298,11 +310,11 @@ async function generateBrainMask(
// Use this conversion to download output slices as nii file. Takes around 30 ms
// does not use `push` to avoid stack overflows. In future: consider .set() with typed arrays
const allOutputSlices3DCC1DimArray = new Array(allOutputSlices3DCC[0].length * allOutputSlices3DCC.length)
let index = 0;
let index = 0
for (let sliceIdx = 0; sliceIdx < allOutputSlices3DCC.length; sliceIdx++) {
for (let i = 0; i < allOutputSlices3DCC[sliceIdx].length; i++) {
allOutputSlices3DCC1DimArray[index++] = allOutputSlices3DCC[sliceIdx][i];
}
for (let i = 0; i < allOutputSlices3DCC[sliceIdx].length; i++) {
allOutputSlices3DCC1DimArray[index++] = allOutputSlices3DCC[sliceIdx][i]
}
}
let brainOut = []

Expand All @@ -315,7 +327,8 @@ async function generateBrainMask(
// Brain tissue
window.alert('getAllSlicesData1D() is not dead code? niftiHeader and niftiImage required by getAllSlicesData1D')
}
if (isFinalImage || opts.showPhase1Output) {//all done
if (isFinalImage || opts.showPhase1Output) {
// all done
callbackImg(brainOut, opts, modelEntry)
callbackUI('Segmentation finished', 0)
}
Expand Down Expand Up @@ -495,7 +508,7 @@ class SequentialConvLayer {
async apply(inputTensor) {
const oldDeleteTextureThreshold = tf.ENV.get('WEBGL_DELETE_TEXTURE_THRESHOLD')
tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', 0)

// eslint-disable-next-line @typescript-eslint/no-this-alias
const self = this
// Important to avoid "undefined" class var members inside the timer.
// "this" has another meaning inside the timer.
Expand Down Expand Up @@ -555,7 +568,7 @@ class SequentialConvLayer {
self.callbackUI(`Iteration ${chIdx}`, chIdx / self.outChannels)
console.log(`Number of Tensors: ${memoryInfo.numTensors}`)
console.log(`Number of Data Buffers: ${memoryInfo.numDataBuffers}`)

console.log(`Megabytes In Use: ${(memoryInfo.numBytes / 1048576).toFixed(3)} MB`)
if (memoryInfo.unreliable) {
console.log(`Unreliable: ${memoryInfo.unreliable}`)
Expand Down Expand Up @@ -632,7 +645,7 @@ async function generateOutputSlicesV2(
const conn = 26 // Example connectivity
const binarize = true
const onlyLargestClusterPerClass = true
const [labelCount, labeledImage] = BWInstance.bwlabel(img, dim, conn, binarize, onlyLargestClusterPerClass)
const [_labelCount, labeledImage] = BWInstance.bwlabel(img, dim, conn, binarize, onlyLargestClusterPerClass)
for (let i = 0; i < img.length; i++) {
img[i] *= labeledImage[i]
}
Expand Down Expand Up @@ -955,7 +968,7 @@ async function inferenceFullVolumeSeqCovLayerPhase2(
const seqConvLayer = await new SequentialConvLayer(res, 10, isChannelLast, callbackUI)

// Apply the last output tensor to the seq. instance
let outputTensor = await seqConvLayer.apply(curTensor[i])
const outputTensor = await seqConvLayer.apply(curTensor[i])

// -- document.getElementById("progressBarChild").style.width = 0 + "%";;

Expand Down
Loading

0 comments on commit 9e181ba

Please sign in to comment.