Canvas JavaScript

Multithreaded Subsampled Raytracing

This JavaScript program demonstrates how to use several offscreen canvases to perform multithreaded raytraced rendering via web workers on a canvas element.

In the example, the left canvas element renders a 600x600 pixel image of a raytraced scene using 16 subsamples per pixel. The right canvas performs the exact same rendering using 16 smaller offscreen canvas elements in a 4x4 array. The speed of the rendering for each canvas is displayed beneath the canvas after each new frame is rendered.

To render on a canvas, click it to give it focus and then press the arrow keys to change the viewing direction rerender the scene to the canvas. To get an accurate timing for a single frame, only press the direction key just once. No attempt has been made to ensure that the 4x4 array of mutlithreaded canvas elements render at the same time. So, you will notice a flicker, which is more pronounced when a direction key is held down and repeated renderings occur. This can be mitigated by rendering all of the smaller canvas bitmaps to larger offscreen canvas before rendering. However, I have deliberately not done this to make the smaller canvases more apparent.

MultithreadedSubsampledRaytrace.html

<!DOCTYPE html>
<html lang="en">
  <meta charset="utf-8" />
  <head>
  	<title>XoaX.net's Javascript</title>
  	<link rel="icon" href="data:,">
  	<script src="Main.js"></script>
    <style>
    	.cFocus { border: 1px red solid; }
    	.cBlur { border: none; }
    </style>
  </head>
  <body onload="Initialize()">
  	<div style="float:left;">
  		<canvas width="600" height="600" id="idCanvas0" style="background-color: #F0F0F0;"></canvas><br />
  		<label for="idTime0">Rendering Time: <input id="idTime0" type="text" size="10" /> milliseconds</label>
  	</div>
  	<div style="float:right;">
  		<canvas width="600" height="600" id="idCanvas1" style="background-color: #F0F0F0;"></canvas><br />
  		<label for="idTime1">Rendering Time: <input id="idTime1" type="text" size="10" /> milliseconds</label>
  	</div>
  </body>
</html>

Main.js

var gqaCPs = new Array(2);
var gqaCanvases = new Array(2);
var gqaContexts = new Array(2);
var gdStartTimes = new Array(2)
var gqaTimeInputs = new Array(2);
var giaThreadCounts = [0,0];

var gpCurrCP = null;
// Create a 2 canvas planes 
var gqCP1 = null;
var gqCP2 = null;
var gqCanvas1 = null;
var gqCanvas2 = null;
var gqContext1 = null;
var gqContext2 = null;

class CCanvasPlane {
	// Pass in the canvas size in pixels and the size in space
	// The pixels of the canvas will be cenered at the origin
	// So, if the canvas is 600x600 pixels. Pixel (299.5, 299.5) will be at the origin,
	// with pixels starting at 0 and going to 599
	constructor(iIndex, dW, dH, iWorkersPerDim)  {
		this.miIndex = iIndex;
		this.miWorkers = iWorkersPerDim*iWorkersPerDim;
		var qCanvas = gqaCanvases[iIndex];
		this.mqContext = gqaContexts[iIndex];
		this.miPixelW = qCanvas.width;
		this.miPixelH = qCanvas.height;
		//this.mqImData = this.mqContext.createImageData(this.miPixelW, this.miPixelH);
		this.mdW = dW;
		this.mdH = dH;
		// Set some default angles
		this.mqAlpha = Math.PI/6;
		this.mqBeta = -Math.PI/6;
		this.mqWorkers = new Array(iWorkersPerDim);
		// This should create the arrays of workers
		for (var i = 0; i < iWorkersPerDim; ++i) {
			this.mqWorkers[i] = new Array(iWorkersPerDim);
			for (var j = 0; j < iWorkersPerDim; ++j) {
				// Create a new worker
				var qWorker = new Worker("Worker.js");
				// Set the response function for this worker
				qWorker.onmessage = function(qEvent) {
					let iLocIndex = qEvent.data.miIndex;
					let qBitmap = qEvent.data.mqBitmap;
					gqaContexts[iLocIndex].drawImage(qBitmap, qEvent.data.miPixelOffsetX, qEvent.data.miPixelOffsetY);
					// Increment the thread count
					++giaThreadCounts[iLocIndex];
					// Check whether we have rendered the entire image
					if (giaThreadCounts[iLocIndex] == qEvent.data.miWorkers) {
						// Set the rendering time for the canvas
						gqaTimeInputs[iLocIndex].value = performance.now() - gdStartTimes[iLocIndex];
						// Reset the thread count after the full image is rendered
						giaThreadCounts[iLocIndex] = 0;
					}
				}
				this.mqWorkers[i][j] = qWorker;
			}
		}
	}
	Left() {
		this.mqAlpha +=  Math.PI/12;
	}
	Right() {
		this.mqAlpha -=  Math.PI/12;
	}
	Up() {
		if (this.mqBeta + Math.PI/12 <- .01) {
			this.mqBeta +=  Math.PI/12;
		}
	}
	Down() {
		if (this.mqBeta - Math.PI/12 > -Math.PI/2 + .01) {
			this.mqBeta -=  Math.PI/12;
		}
	}
	CreateCoordinateVectors() {
		var daaA = [];
		// This is the x direction of the canvas inside the plane z= 0 in space coordinates
		daaA[0] = [Math.cos(this.mqAlpha), Math.sin(this.mqAlpha), 0];
		// cos(beta)*up + sin(beta)*(vector perp to x pointing forward), since beta is angle between the canvas and the z-axis
		daaA[1] = [-daaA[0][1]*Math.sin(this.mqBeta), daaA[0][0]*Math.sin(this.mqBeta), Math.cos(this.mqBeta)];
		// The vector straigth out of canvas. The cross product of the previous vectors
		daaA[2] = [daaA[0][1]*daaA[1][2] - daaA[0][2]*daaA[1][1],
			daaA[0][2]*daaA[1][0] - daaA[0][0]*daaA[1][2],
			daaA[0][0]*daaA[1][1] - daaA[0][1]*daaA[1][0]];
		return daaA;
	}

	DrawScene() {
		// The pixel width and height in the coordinates of the space
		var dPixWidth = this.mdW/this.miPixelW;
		var dPixHeight = this.mdH/this.miPixelH;
		var qCanvasInSpace = this.CreateCoordinateVectors();
		var daDirX = qCanvasInSpace[0];
		var daDirY = [-qCanvasInSpace[1][0], -qCanvasInSpace[1][1], -qCanvasInSpace[1][2]];
		var daView = qCanvasInSpace[2];
		var dPixStartX = dPixWidth*.5 - (this.mdW/2);
		var dPixStartY = dPixHeight*.5 - (this.mdH/2);
		// The position in space of the center of the first pixel
		var daPixPosInit = [dPixStartX*daDirX[0]+dPixStartY*daDirY[0], 
			dPixStartX*daDirX[1]+dPixStartY*daDirY[1], 
			dPixStartX*daDirX[2]+dPixStartY*daDirY[2]];
/*
		{
			miIndex:this.miIndex,
			miWorkers:this.miWorkers,
			mdaInitPos = daPixPosInit;
			mdaDirX = daDirX;
			mdaDirY = daDirY;
			mdaViewDir = daView;
			mdPixelW = dPixWidth;
			mdPixelH = dPixHeight;
			miWidth = 600;
			miHeight = 600;
			// This is used to place the image in the canvas when it returns
			miPixelOffsetX = 0;
			miPixelOffsetY = 0;
		}
		*/
		var daInitial = new Array(3);
		var iWorkersPerDim = this.mqWorkers.length;
		var iWidth = this.miPixelW/iWorkersPerDim;
		var iHeight = this.miPixelH/iWorkersPerDim;
		// Reinitialize the thread count for the return threads
		var iWidthPerBitmap = iWidth*dPixWidth;
		var iHeightPerBitmap = iHeight*dPixHeight;
		gdStartTimes[this.miIndex] = performance.now();
		for (var i = 0; i < this.mqWorkers.length; ++i) {
			for (var j = 0; j < this.mqWorkers[i].length; ++j) {
				daInitial[0] = daPixPosInit[0] + i*iWidthPerBitmap*daDirX[0] + j*iHeightPerBitmap*daDirY[0];
				daInitial[1] = daPixPosInit[1] + i*iWidthPerBitmap*daDirX[1] + j*iHeightPerBitmap*daDirY[1];
				daInitial[2] = daPixPosInit[2] + i*iWidthPerBitmap*daDirX[2] + j*iHeightPerBitmap*daDirY[2];
				this.mqWorkers[i][j].postMessage({
					miIndex:this.miIndex,
					miWorkers:this.miWorkers,
					mdaInitPos:daInitial,
					mdaDirX:daDirX,
					mdaDirY:daDirY,
					mdaViewDir:daView,
					mdPixelW:dPixWidth,
					mdPixelH:dPixHeight,
					miWidth:iWidth,
					miHeight:iHeight,
					// This is used to place the image in the canvas when it returns
					miPixelOffsetX:i*iWidth,
					miPixelOffsetY:j*iHeight
				});
			}
		}
	}
}

function Initialize() {
	gqaTimeInputs[0] = document.getElementById("idTime0");
	gqaTimeInputs[1] = document.getElementById("idTime1");
	gqaCanvases[0] = document.getElementById("idCanvas0");
	gqaCanvases[0].tabIndex = 0;
	gqaCanvases[1] = document.getElementById("idCanvas1");
	gqaCanvases[1].tabIndex = 1;
	gqaContexts[0] = gqaCanvases[0].getContext("2d");
	gqaContexts[1] = gqaCanvases[1].getContext("2d");
	gqaCPs[0] = new CCanvasPlane(0, 4.0, 4.0, 1);
	gqaCPs[1] = new CCanvasPlane(1, 4.0, 4.0, 4);
	gqaCPs[0].DrawScene();
	gqaCPs[1].DrawScene();
	
	gqaCanvases[0].onfocus = function(e) {
		gqaCanvases[0].className ='cFocus';
	};
	gqaCanvases[0].onblur = function(e) {
		gqaCanvases[0].className ='cBlur';
	};
	gqaCanvases[0].onclick = function(e) {
		gqaCanvases[0].focus();
		gqaCanvases[1].blur();
		gpCurrCP = gqaCPs[0];
		console.log("Click0");
	};
	gqaCanvases[1].onfocus = function(e) {
		gqaCanvases[1].className ='cFocus';
	};
	gqaCanvases[1].onblur = function(e) {
		gqaCanvases[1].className ='cBlur';
	};
	gqaCanvases[1].onclick = function(e) {
		gqaCanvases[0].blur();
		gqaCanvases[1].focus();
		gpCurrCP = gqaCPs[1];
		console.log("Click1");
	};

	gpCurrCP = gqaCPs[1];
	window.onkeydown=KeyDownFunction;
	window.focus();
}

function KeyDownFunction(e) {
	var iKeyUp = 38;
	var iKeyLeft = 37;
	var iKeyRight = 39;
	var iKeyDown = 40;
	var iKeyCode = 0;
	if (e) {
		iKeyCode = e.which;
	} else {
		iKeyCode = window.event.keyCode;
	}
	switch (iKeyCode) {
		case iKeyUp: {
			gpCurrCP.Up();
			// Prevent the window scrolling
      e.preventDefault();
			break;
		}
		case iKeyLeft: {
			gpCurrCP.Left();
			// Prevent the window scrolling
      e.preventDefault();
			break;
		}
		case iKeyRight: {
			gpCurrCP.Right();
			// Prevent the window scrolling
      e.preventDefault();
			break;
		}
		case iKeyDown: {
			gpCurrCP.Down();
			// Prevent the window scrolling
      e.preventDefault();
			break;
		}
		default: {
			break;
		}
	}
	gpCurrCP.DrawScene();
}

Worker.js

// To find the position of a pixel in space: mdaInitPos, mdaDirX, mdaDirY, mdaViewDir, mdPixelW, mdPixelH
// For the loop count over the pixels: miWidth, miHeight
/*
{
	mdaInitPos
	mdaDirX
	mdaDirY
	mdaViewDir
	mdPixelW
	mdPixelH
	miWidth
	miHeight
	// This is used to place the image in the canvas when it returns
	miPixelOffsetX
	miPixelOffsetY
}
*/
var gqOffscreenCanvas = null;
var gqContext2D = null;
var gqImageData = null;
var giPixelOffsetX = 0;
var giPixelOffsetY = 0;
var giIndex = 0;
var giWorkers = 0;

onmessage = function (qEvent) {
	// This should only need to be done the first time
	if (gqOffscreenCanvas == null) {
		giIndex = qEvent.data.miIndex;
	  gqOffscreenCanvas = new OffscreenCanvas(qEvent.data.miWidth, qEvent.data.miHeight);
	  gqContext2D = gqOffscreenCanvas.getContext("2d");
	  gqImageData = gqContext2D.createImageData(qEvent.data.miWidth, qEvent.data.miHeight);
	  giPixelOffsetX = qEvent.data.miPixelOffsetX;
	  giPixelOffsetY = qEvent.data.miPixelOffsetY;
	}
	giWorkers = qEvent.data.miWorkers;

  Draw(qEvent.data);
  // Use the image data to draw the pixels at (0, 0)
	gqContext2D.putImageData(gqImageData, 0, 0);
  // Create a bitmap of the canvas that is sent back
  var qBitmap = gqOffscreenCanvas.transferToImageBitmap();
  postMessage({miIndex:giIndex, miWorkers:giWorkers, mqBitmap:qBitmap, miPixelOffsetX:giPixelOffsetX, miPixelOffsetY:giPixelOffsetY});
}

class CSphere {
	constructor(daC, dR) {
		this.mdaC = [daC[0], daC[1], daC[2]];;
		this.mdR = dR;
	}
	
	// Pass in the position and direction for the ray
	Intersect(daP, daV, daTanDir) {
		// Sphere: (x - cx)^2 + (y - cy)^2 + (z - cz)^2 = r^2
		// Sphere with ray: (px + tvx - cx)^2 + (py + tvy - cy)^2 + (pz + tvz - cz)^2 = r^2
		// Solve for terms: (px^2 - 2pxcx + cx^2) + (py^2 - 2pycy + cy^2) + (pz^2 - 2pzcz + cz^2) - r^2
		// + 2t[(vx(px - cx)) + (vy(py - cy)) + (vz(pz - cz))]
		// + t^2(vx^2 + vy^2 + vz^2)
		var dT = NaN;
		var dC = daP[0]*daP[0] + daP[1]*daP[1] + daP[2]*daP[2] + 
			-2.0*(daP[0]*this.mdaC[0] + daP[1]*this.mdaC[1] + daP[2]*this.mdaC[2]) +
			this.mdaC[0]*this.mdaC[0] + this.mdaC[1]*this.mdaC[1] + this.mdaC[2]*this.mdaC[2] -  this.mdR*this.mdR;
		var dB = 2.0*(daV[0]*(daP[0] - this.mdaC[0]) + daV[1]*(daP[1] - this.mdaC[1]) + daV[2]*(daP[2] - this.mdaC[2]));
		var dA = daV[0]*daV[0] + daV[1]*daV[1] + daV[2]*daV[2];
		var dDisc = dB*dB - 4.0*dA*dC;
		if (dDisc > 0) {
			// T is either 2C/(-B - sqrt(B*B - 4AC)) or (-B - sqrt(B*B - 4AC))/2A
			// The second anser is closer. So, we use that one to give the first intersection.
			dT = (-dB - Math.sqrt(dDisc))/(2.0*dA);
			// The tangent plane direction is [2(x - cx), 2(y - cy), 2(z - cz)]
			// x = px + t*vx, y = py + tvy, z = pz + tvz
			var dMag =  Math.sqrt(2.0*(daP[0] + dT*daV[0] - this.mdaC[0])*2.0*(daP[0] + dT*daV[0]  - this.mdaC[0]) +
				2.0*(daP[1] + dT*daV[1] - this.mdaC[1])*2.0*(daP[1] + dT*daV[1] - this.mdaC[1]) +
				2.0*(daP[2] + dT*daV[2] - this.mdaC[2])*2.0*(daP[2] + dT*daV[2] - this.mdaC[2]));
			daTanDir[0] = 2.0*(daP[0] + dT*daV[0] - this.mdaC[0])/dMag;
			daTanDir[1] = 2.0*(daP[1] + dT*daV[1] - this.mdaC[1])/dMag;
			daTanDir[2] = 2.0*(daP[2] + dT*daV[2] - this.mdaC[2])/dMag;
		}
		return dT;
	}
}

function Draw(qRayTraceSpec) {
/*
	// The pixel width and height in the coordinates of the space
	var dPixWidth = this.mdW/this.miPixelW;
	var dPixHeight = this.mdH/this.miPixelH;
	var qCanvasInSpace = this.CreateCoordinateVectors();
	var daDirX = qCanvasInSpace[0];
	var daDirY = [-qCanvasInSpace[1][0], -qCanvasInSpace[1][1], -qCanvasInSpace[1][2]];
	var daView = qCanvasInSpace[2];
	var dPixStartX = dPixWidth*.5 - (this.mdW/2);
	var dPixStartY = dPixHeight*.5 - (this.mdH/2);
	// The position in space of the center of the first pixel
	var daPixPosInit = [dPixStartX*daDirX[0]+dPixStartY*daDirY[0], 
		dPixStartX*daDirX[1]+dPixStartY*daDirY[1], 
		dPixStartX*daDirX[2]+dPixStartY*daDirY[2]];
*/
	var daPixPosInit = qRayTraceSpec.mdaInitPos;
	var daDirX = qRayTraceSpec.mdaDirX;
	var daDirY = qRayTraceSpec.mdaDirY;
	var daView = qRayTraceSpec.mdaViewDir;
	var dPixWidth = qRayTraceSpec.mdPixelW;
	var dPixHeight = qRayTraceSpec.mdPixelH;
	var iWidth = qRayTraceSpec.miWidth;
	var iHeight = qRayTraceSpec.miHeight;
	// The position of the center of the first pixel in the current row.
	var daPixPosRow = [daPixPosInit[0], daPixPosInit[1], daPixPosInit[2]];
	// The center point of the current pixel.
	var daPixPos = [daPixPosInit[0], daPixPosInit[1], daPixPosInit[2]];
	// The translation vector for a pixel in the x-direction
	var daPixDx = [dPixWidth*daDirX[0], dPixWidth*daDirX[1], dPixWidth*daDirX[2]];
	// The translation vector for a pixel in the y-direction
	var daPixDy = [dPixHeight*daDirY[0], dPixHeight*daDirY[1], dPixHeight*daDirY[2]];
	// The current pixel index
	var iPix = 0;
	var daTanDir = [0.0, 0.0, 0.0];
	// Create a sphere
	var qUnitSphere = new CSphere([0.0, 0.0, 0.0], 1.0);
	// A directional light source
	var daLightDir = [-1.0/Math.sqrt(14.0), -2.0/Math.sqrt(14.0), -3.0/Math.sqrt(14.0)]
	for (var i = 0; i < iWidth; ++i) {
		for (var j = 0; j < iHeight; ++j) {
			var daSubDx = [daPixDx[0]/4.0, daPixDx[1]/4.0, daPixDx[2]/4.0];
			var daSubDy = [daPixDy[0]/4.0, daPixDy[1]/4.0, daPixDy[2]/4.0];
			var daSubPosRow = [daPixPos[0] - 1.5*(daSubDx[0] + daSubDy[0]),
				daPixPos[1] - 1.5*(daSubDx[1] + daSubDy[1]),
				daPixPos[2] - 1.5*(daSubDx[2] + daSubDy[2])];
			var daSubPos = [daSubPosRow[0], daSubPosRow[1], daSubPosRow[2]];
			var dR = 0.0;
			var dG = 0.0;
			var dB = 0.0;
			// Pixel subsampling 4x4 = 16 samples per pixel.
			for (var m = 0; m < 4; ++m) {
				for (var n = 0; n < 4; ++n) {
					var dT = qUnitSphere.Intersect(daSubPos, daView, daTanDir);
					if (Number.isNaN(dT)) { // This branch renders the floor z = -1.0
						// Make a checkboard using the floor function pz + t*vz = -1.0 --> t = -(pz + 1.0)/vz
						if (daView[2] < 0.0) {
							var dFloorT = -(daSubPos[2] + 1.0)/daView[2];
							// Check for a shadow and reduce the color accordingly.
							var daGround = [daSubPos[0] + dFloorT*daView[0], daSubPos[1] + dFloorT*daView[1], daSubPos[2] + dFloorT*daView[2]];
							var iFloorX = Math.floor(daGround[0]);
							var iFloorY = Math.floor(daGround[1]);
							var daToLight = [daLightDir[0], daLightDir[1], daLightDir[2]];
							var daIgnored = [0.0,0.0,0.0];
							var dShadowT = qUnitSphere.Intersect(daGround, daToLight, daIgnored);
							if (Number.isNaN(dShadowT)) {
								if (((iFloorX + iFloorY) % 2) == 0) {
									dR += 150;
									dG += 150;
									dB += 150;
								} else {
									dR += 128;
									dG += 0;
									dB += 0;		
								}
							} else { // Shadow
								var dReduce = (1.0 - Math.exp(-dShadowT*dShadowT/16.0));
								if (((iFloorX + iFloorY) % 2) == 0) {
									dR += 150*dReduce;
									dG += 150*dReduce;
									dB += 150*dReduce;
								} else {
									dR += 128*dReduce;
									dG += 0*dReduce;
									dB += 0*dReduce;		
								}							
							}
						} else {
							dR += 128;
							dG += 0;
							dB += 0;
						}
					} else { // This branch renders the sphere with diffuse lighting
						var dDot = daTanDir[0]*daLightDir[0] + daTanDir[1]*daLightDir[1] + daTanDir[2]*daLightDir[2];
						dDot = (dDot < 0.0) ? -dDot: 0.0;
						dR += 0;
						dG += 128*dDot;
						dB += 128*dDot;
					}
					daSubPos[0] += daSubDx[0];
					daSubPos[1] += daSubDx[1];
					daSubPos[2] += daSubDx[2];
				}
				daSubPosRow[0] += daSubDy[0];
				daSubPosRow[1] += daSubDy[1];
				daSubPosRow[2] += daSubDy[2];
				daSubPos[0] = daSubPosRow[0];
				daSubPos[1] = daSubPosRow[1];
				daSubPos[2] = daSubPosRow[2];
			}
			// The sum is over 16 samples. So, divide by the sample size.
			gqImageData.data[iPix] = dR/16.0;
			gqImageData.data[iPix+1] = dG/16.0;;
			gqImageData.data[iPix+2] = dB/16.0;;
			gqImageData.data[iPix+3] = 255;
			daPixPos[0] += daPixDx[0];
			daPixPos[1] += daPixDx[1];
			daPixPos[2] += daPixDx[2];
			iPix += 4;
		}
		daPixPosRow[0] += daPixDy[0];
		daPixPosRow[1] += daPixDy[1];
		daPixPosRow[2] += daPixDy[2];
		daPixPos[0] = daPixPosRow[0];
		daPixPos[1] = daPixPosRow[1];
		daPixPos[2] = daPixPosRow[2];
	}
}
 

Output

 
 

© 2007–2025 XoaX.net LLC. All rights reserved.