Closed tymiao1220 closed 7 years ago
it sounds like you are not properly importing the NRRDLoader - depending on your setup you should probably call something like:
<script type="text/javascript" src="https://cdn.rawgit.com/mrdoob/three.js/master/examples/js/loaders/NRRDLoader.js"></script>
However AMI should already support NRRD out of the box so you do not need to use the THREEJS NRRD Loader.
Just load the NRRD file as a regular volume and you should be fine. https://github.com/FNNDSC/ami/blob/dev/examples/loader_nrrd/loader_nrrd.js#L97-L107
Thank you for your response.
What I want is to make a label mesh as shown in https://fnndsc.github.io/ami/#viewers_quadview
, and using nrrd file (from itk-snap has header information) as label instead of .stl.
When I use LoadersVolume()
to load my nrrd file as you mentioned I got error
dicomParser.readPart10Header: DICM prefix not found at location 132 - this is not a valid DICOM P10 file.
loaders.volume.js:112 parsers.dicom could not parse the file
loaders.base.js:232 oops... something went wrong...
loaders.base.js:233 parsers.dicom could not parse the file
It seems loader tends to parse my nrrd file as dicom P10 file.
Is this because my nrrd file cannot be parsed successfully? Do they (label and background stack images) have to be same format ( e.g. all dicom ).
Appreciated!
AMI uses the file extension (.dcm, .nii, etc.) to know which parser to apply.
If the file has no extension, it will run the DICOM parser by default.
What is the full name of the file you are trying to load?
I see, that might be the reason, because it is a api call identify by that file id
(e.g.
files='http://localhost:8080/api/v1/file/128122102131232/download?contentDisposition=attachment';
var loader = new LoadersVolume();
loader.load(files);
)instead of a '.nrrd' extension file.
I need to think about way to make through that.
Thank you again!
one way around is to extend a bit the https://github.com/FNNDSC/ami/blob/dev/src/core/core.utils.js#L119.
Add something similar to that: https://github.com/FNNDSC/ami/blob/dev/src/core/core.utils.js#L160-L163
But instead of contentType=application%2Fdicom
have contentType=application%2Fnrrd
and replace the file url by:
files='http://localhost:8080/api/v1/file/128122102131232/download?contentDisposition=attachment&contenType=application/nrrd';
would that work for you?
That is a good idea! I will try and let you know! Appreciate!
In this way, nrrd can be parsed well!
Thank you for your patient!
@NicolasRannou May I ask one more question, after trying to use nrrd image as mask on the top of dicom, I got a interesting result it looks like that
It looks like 36 frames nrrd images are ordered each 4 sequence slices in one like:
I attached my code below, and I am trying to ask that on stackoverflow but it seems no one have answer, so could you please give me some hint like why it happens, I appreciate so much!
function init() {
function animate() {
controls.update();
// render first layer offscreen
renderer.render(sceneLayer0, camera, sceneLayer0TextureTarget, true);
// render second layer offscreen
renderer.render(sceneLayer1, camera, sceneLayer1TextureTarget, true);
// mix the layers and render it ON screen!
renderer.render(sceneLayerMix, camera);
statsyay.update();
// request new frame
requestAnimationFrame(function() {
animate();
});
}
// renderer
threeD = document.getElementById('container');
renderer = new THREE.WebGLRenderer({
antialias: true,
alpha: true,
});
renderer.setSize(threeD.clientWidth, threeD.clientHeight);
renderer.setClearColor(0x607D8B, 1);
threeD.appendChild(renderer.domElement);
// stats
statsyay = new Stats();
threeD.appendChild(statsyay.domElement);
// scene
sceneLayer0 = new THREE.Scene();
sceneLayer1 = new THREE.Scene();
sceneLayerMix = new THREE.Scene();
// render to texture!!!!
sceneLayer0TextureTarget = new THREE.WebGLRenderTarget(
threeD.clientWidth,
threeD.clientHeight,
{minFilter: THREE.LinearFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBAFormat,
});
sceneLayer1TextureTarget = new THREE.WebGLRenderTarget(
threeD.clientWidth,
threeD.clientHeight,
{minFilter: THREE.LinearFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBAFormat,
});
// camera
camera = new CamerasOrthographic(
threeD.clientWidth / -2, threeD.clientWidth / 2,
threeD.clientHeight / 2, threeD.clientHeight / -2,
0.1, 10000);
// controls
controls = new ControlsOrthographic(camera, threeD);
controls.staticMoving = true;
controls.noRotate = true;
camera.controls = controls;
animate();
}
// init threeJS...
init();
let t2 = [
'59c93386e57f6325439d74ac', '59c93386e57f6325439d74af', '59c93387e57f6325439d74b2', '59c93387e57f6325439d74b5', '59c93387e57f6325439d74b8',
'59c93387e57f6325439d74bb', '59c93387e57f6325439d74be', '59c93388e57f6325439d74c1', '59c93388e57f6325439d74c4', '59c93388e57f6325439d74c7',
'59c93388e57f6325439d74ca', '59c93388e57f6325439d74cd', '59c93389e57f6325439d74d0', '59c93389e57f6325439d74d3', '59c93389e57f6325439d74d6',
'59c93389e57f6325439d74d9', '59c93389e57f6325439d74dc', '59c9338ae57f6325439d74df', '59c9338ae57f6325439d74e2', '59c9338ae57f6325439d74e5',
'59c9338ae57f6325439d74e8', '59c9338ae57f6325439d74eb', '59c9338be57f6325439d74ee', '59c9338be57f6325439d74f1', '59c9338be57f6325439d74f4',
'59c9338be57f6325439d74f7', '59c9338be57f6325439d74fa', '59c9338be57f6325439d74fd', '59c9338ce57f6325439d7500', '59c9338ce57f6325439d7503',
'59c9338ce57f6325439d7506', '59c9338ce57f6325439d7509', '59c9338de57f6325439d750c', '59c9338de57f6325439d750f', '59c9338de57f6325439d7512',
'59c9338de57f6325439d7515'
];
let files = t2.map(function(v) {
return 'http://myserver/file/'+v+'/download?contentDisposition=attachment';
});
files.push('http://myserver/file/59c93359e57f6325439d74a8/download?contentDisposition=attachment&contentType=application%2Fnrrd');
// load sequence for each file
// instantiate the loader
// it loads and parses the dicom image
var loader = new LoadersVolume(threeD);
/**
* Build GUI
*/
function buildGUI(stackHelper) {
/**
* Update Layer 1
*/
function updateLayer1() {
// update layer1 geometry...
if (meshLayer1) {
// dispose geometry first
meshLayer1.geometry.dispose();
meshLayer1.geometry = stackHelper.slice.geometry;
meshLayer1.geometry.verticesNeedUpdate = true;
}
}
/**
* Update layer mix
*/
function updateLayerMix() {
// update layer1 geometry...
if (meshLayerMix) {
sceneLayerMix.remove(meshLayerMix);
meshLayerMix.material.dispose();
meshLayerMix.material = null;
meshLayerMix.geometry.dispose();
meshLayerMix.geometry = null;
// add mesh in this scene with right shaders...
meshLayerMix = new THREE.Mesh(
stackHelper.slice.geometry, materialLayerMix);
// go the LPS space
meshLayerMix.applyMatrix(stackHelper.stack._ijk2LPS);
sceneLayerMix.add(meshLayerMix);
}
}
var stack = stackHelper.stack;
var gui = new dat.GUI({
autoPlace: false,
});
var customContainer = document.getElementById('my-gui-container');
customContainer.appendChild(gui.domElement);
var layer0Folder = gui.addFolder('CT');
layer0Folder.add(stackHelper.slice, 'invert');
var lutUpdate = layer0Folder.add(
stackHelper.slice, 'lut', lutLayer0.lutsAvailable());
lutUpdate.onChange(function(value) {
lutLayer0.lut = value;
stackHelper.slice.lutTexture = lutLayer0.texture;
});
var indexUpdate = layer0Folder.add(
stackHelper, 'index', 0, stack.dimensionsIJK.z - 1).step(1).listen();
indexUpdate.onChange(function() {
updateLayer1();
updateLayerMix();
});
layer0Folder.add(
stackHelper.slice, 'interpolation', 0, 1).step(1).listen();
layer0Folder.open();
// layer mix folder
var layerMixFolder = gui.addFolder('Segmentation');
var opacityLayerMix1 = layerMixFolder.add(
layerMix, 'opacity1', 0, 1).step(0.01);
opacityLayerMix1.onChange(function(value) {
uniformsLayerMix.uOpacity1.value = value;
});
layerMixFolder.open();
// hook up callbacks
controls.addEventListener('OnScroll', function(e) {
if (e.delta > 0) {
if (stackHelper.index >= stack.dimensionsIJK.z - 1) {
return false;
}
stackHelper.index += 1;
} else {
if (stackHelper.index <= 0) {
return false;
}
stackHelper.index -= 1;
}
updateLayer1();
updateLayerMix();
});
updateLayer1();
updateLayerMix();
/**
* Handle window resize
*/
function onWindowResize() {
var threeD = document.getElementById('container');
camera.canvas = {
width: threeD.clientWidth,
height: threeD.clientHeight,
};
camera.fitBox(2);
renderer.setSize(threeD.clientWidth, threeD.clientHeight);
}
window.addEventListener('resize', onWindowResize, false);
onWindowResize();
}
/**
* Handle series
*/
function handleSeries() {
//
//
// first stack of first series
console.log(loader.data[0].mergeSeries(loader.data));
var mergedSeries = loader.data[0].mergeSeries(loader.data);
var stack = mergedSeries[0].stack[0];
console.log(stack._rescaleSlope);
var stack2 = mergedSeries[1].stack[0];
console.log(stack._rescaleSlope);
console.log(stack);
console.log(stack2);
loader.free();
loader = null;
console.log(stack._rescaleSlope);
console.log(stack2._rescaleSlope);
var stackHelper = new HelpersStack(stack);
// stackHelper.bbox.visible = false;
// stackHelper.border.visible = false;
stackHelper.index = 10;
console.log(stackHelper);
sceneLayer0.add(stackHelper);
stack2.prepare();
// pixels packing for the fragment shaders now happens there
stack2.pack();
console.log(stack2._rawData.length);
var textures2 = [];
for (var m = 0; m < stack2._rawData.length; m++) {
var tex = new THREE.DataTexture(
stack2.rawData[m],
stack2.textureSize, //4096
stack2.textureSize,
stack2.textureType, //1023
THREE.UnsignedByteType,
THREE.UVMapping,
THREE.ClampToEdgeWrapping,
THREE.ClampToEdgeWrapping,
THREE.NearestFilter,
THREE.NearestFilter);
tex.needsUpdate = true;
tex.flipY = true;
textures2.push(tex);
}
console.log(textures2);
// create material && mesh then add it to sceneLayer1
uniformsLayer1 = ShadersDataUniforms.uniforms();
uniformsLayer1.uTextureSize.value = stack2.textureSize; //4096
uniformsLayer1.uTextureContainer.value = textures2;
uniformsLayer1.uWorldToData.value = stack2.lps2IJK;
uniformsLayer1.uNumberOfChannels.value = stack2.numberOfChannels; //1
uniformsLayer1.uPixelType.value = stack2.pixelType; //0
uniformsLayer1.uBitsAllocated.value = stack2.bitsAllocated;/////16
uniformsLayer1.uWindowCenterWidth.value =
[stack2.windowCenter, stack2.windowWidth];
uniformsLayer1.uRescaleSlopeIntercept.value =
[stack2.rescaleSlope, stack2.rescaleIntercept];
uniformsLayer1.uDataDimensions.value = [stack2.dimensionsIJK.x, //672,672,36
stack2.dimensionsIJK.y,
stack2.dimensionsIJK.z];
uniformsLayer1.uInterpolation.value = 0;
// generate shaders on-demand!
var fs = new ShadersDataFragment(uniformsLayer1);
var vs = new ShadersDataVertex();
materialLayer1 = new THREE.ShaderMaterial(
{side: THREE.DoubleSide,
uniforms: uniformsLayer1,
vertexShader: vs.compute(),
fragmentShader: fs.compute(),
});
console.log(stackHelper);
console.log(materialLayer1);
// add mesh in this scene with right shaders...
meshLayer1 = new THREE.Mesh(stackHelper.slice.geometry, materialLayer1);
// go the LPS space
console.log(meshLayer1);
meshLayer1.applyMatrix(stack._ijk2LPS);
sceneLayer1.add(meshLayer1);
// Create the Mix layer
uniformsLayerMix = ShadersLayerUniforms.uniforms();
uniformsLayerMix.uTextureBackTest0.value = sceneLayer0TextureTarget.texture;
uniformsLayerMix.uTextureBackTest1.value = sceneLayer1TextureTarget.texture;
let fls = new ShadersLayerFragment(uniformsLayerMix);
let vls = new ShadersLayerVertex();
materialLayerMix = new THREE.ShaderMaterial(
{side: THREE.DoubleSide,
uniforms: uniformsLayerMix,
vertexShader: vls.compute(),
fragmentShader: fls.compute(),
transparent: true,
});
// add mesh in this scene with right shaders...
meshLayerMix = new THREE.Mesh(stackHelper.slice.geometry, materialLayerMix);
// go the LPS space
meshLayerMix.applyMatrix(stack._ijk2LPS);
sceneLayerMix.add(meshLayerMix);
//
// set camera
var worldbb = stack.worldBoundingBox();
var lpsDims = new THREE.Vector3(
worldbb[1] - worldbb[0],
worldbb[3] - worldbb[2],
worldbb[5] - worldbb[4]
);
// box: {halfDimensions, center}
var box = {
center: stack.worldCenter().clone(),
halfDimensions:
new THREE.Vector3(lpsDims.x + 10, lpsDims.y + 10, lpsDims.z + 10),
};
// init and zoom
var canvas = {
width: threeD.clientWidth,
height: threeD.clientHeight,
};
camera.directions = [stack.xCosine, stack.yCosine, stack.zCosine];
camera.box = box;
camera.canvas = canvas;
camera.update();
camera.fitBox(2);
// CREATE LUT
lutLayer0 = new HelpersLut(
'my-lut-canvases-l0',
'default',
'linear',
[[0, 0, 0, 0], [1, 1, 1, 1]],
[[0, 1], [1, 1]]);
lutLayer0.luts = HelpersLut.presetLuts();
lutLayer1 = new HelpersLut(
'my-lut-canvases-l1',
'default',
'linear',
stack2.segmentationLUT,
stack2.segmentationLUTO,
true);
uniformsLayer1.uLut.value = 2;
uniformsLayer1.uTextureLUT.value = lutLayer1.texture;
buildGUI(stackHelper);
}
loader.load(files)
.then(function() {
handleSeries();
})
.catch(function(error) {
window.console.log(error);
});
Can you please open a new issue with the question?
How did you generate the mask?
Thanks!
Description
I am using 'https://fnndsc.github.io/ami/#viewers_quadview' as an example to load a nrrd label What I did is import nrrdloader
<script type="text/javascript" src="./dist/three.js/examples/js/loaders/NRRDLoader.js"></script>
first and then makingconst nrrdLoader = new THREE.NRRDLoader();
instead ofconst stlLoader = new THREE.STLLoader();
But I got
THREE.Volume is not a constructor
atat THREE.NRRDLoader.parse (NRRDLoader.js:312) at NRRDLoader.js:20
Browsers Affected
Versions