forked from LeenkxTeam/LNXSDK
Update Files
This commit is contained in:
3
Kha/Backends/HTML5/kha/Blob.hx
Normal file
3
Kha/Backends/HTML5/kha/Blob.hx
Normal file
@ -0,0 +1,3 @@
|
||||
package kha;
|
||||
|
||||
typedef Blob = kha.internal.BytesBlob;
|
234
Kha/Backends/HTML5/kha/CanvasImage.hx
Normal file
234
Kha/Backends/HTML5/kha/CanvasImage.hx
Normal file
@ -0,0 +1,234 @@
|
||||
package kha;
|
||||
|
||||
import haxe.io.Bytes;
|
||||
import js.Browser;
|
||||
import js.lib.Uint8Array;
|
||||
import js.html.VideoElement;
|
||||
import js.html.webgl.GL;
|
||||
import kha.graphics4.TextureFormat;
|
||||
import kha.js.CanvasGraphics;
|
||||
|
||||
class CanvasImage extends Image {
|
||||
public var image: Dynamic;
|
||||
public var video: VideoElement;
|
||||
|
||||
static var context: Dynamic;
|
||||
|
||||
var data: Dynamic;
|
||||
|
||||
var myWidth: Int;
|
||||
var myHeight: Int;
|
||||
var myFormat: TextureFormat;
|
||||
var renderTarget: Bool;
|
||||
|
||||
public var frameBuffer: Dynamic;
|
||||
|
||||
var graphics1: kha.graphics1.Graphics;
|
||||
var g2canvas: CanvasGraphics = null;
|
||||
|
||||
public static function init() {
|
||||
var canvas: Dynamic = Browser.document.createElement("canvas");
|
||||
if (canvas != null) {
|
||||
context = canvas.getContext("2d");
|
||||
canvas.width = 2048;
|
||||
canvas.height = 2048;
|
||||
context.globalCompositeOperation = "copy";
|
||||
}
|
||||
}
|
||||
|
||||
public function new(width: Int, height: Int, format: TextureFormat, renderTarget: Bool) {
|
||||
myWidth = width;
|
||||
myHeight = height;
|
||||
myFormat = format;
|
||||
this.renderTarget = renderTarget;
|
||||
image = null;
|
||||
video = null;
|
||||
if (renderTarget)
|
||||
createTexture();
|
||||
}
|
||||
|
||||
override function get_g1(): kha.graphics1.Graphics {
|
||||
if (graphics1 == null) {
|
||||
graphics1 = new kha.graphics2.Graphics1(this);
|
||||
}
|
||||
return graphics1;
|
||||
}
|
||||
|
||||
override function get_g2(): kha.graphics2.Graphics {
|
||||
if (g2canvas == null) {
|
||||
var canvas: Dynamic = Browser.document.createElement("canvas");
|
||||
image = canvas;
|
||||
var context = canvas.getContext("2d");
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
g2canvas = new CanvasGraphics(context);
|
||||
}
|
||||
return g2canvas;
|
||||
}
|
||||
|
||||
override function get_g4(): kha.graphics4.Graphics {
|
||||
return null;
|
||||
}
|
||||
|
||||
override function get_width(): Int {
|
||||
return myWidth;
|
||||
}
|
||||
|
||||
override function get_height(): Int {
|
||||
return myHeight;
|
||||
}
|
||||
|
||||
override function get_format(): TextureFormat {
|
||||
return myFormat;
|
||||
}
|
||||
|
||||
override function get_realWidth(): Int {
|
||||
return myWidth;
|
||||
}
|
||||
|
||||
override function get_realHeight(): Int {
|
||||
return myHeight;
|
||||
}
|
||||
|
||||
override function get_stride(): Int {
|
||||
return myFormat == TextureFormat.RGBA32 ? 4 * width : width;
|
||||
}
|
||||
|
||||
override public function isOpaque(x: Int, y: Int): Bool {
|
||||
if (data == null) {
|
||||
if (context == null)
|
||||
return true;
|
||||
else
|
||||
createImageData();
|
||||
}
|
||||
return (data.data[y * Std.int(image.width) * 4 + x * 4 + 3] != 0);
|
||||
}
|
||||
|
||||
override public function at(x: Int, y: Int): Color {
|
||||
if (data == null) {
|
||||
if (context == null)
|
||||
return Color.Black;
|
||||
else
|
||||
createImageData();
|
||||
}
|
||||
|
||||
var r = data.data[y * Std.int(image.width) * 4 + x * 4];
|
||||
var g = data.data[y * Std.int(image.width) * 4 + x * 4 + 1];
|
||||
var b = data.data[y * Std.int(image.width) * 4 + x * 4 + 2];
|
||||
var a = data.data[y * Std.int(image.width) * 4 + x * 4 + 3];
|
||||
|
||||
return Color.fromValue((a << 24) | (r << 16) | (g << 8) | b);
|
||||
}
|
||||
|
||||
function createImageData() {
|
||||
context.strokeStyle = "rgba(0,0,0,0)";
|
||||
context.fillStyle = "rgba(0,0,0,0)";
|
||||
context.fillRect(0, 0, image.width, image.height);
|
||||
context.drawImage(image, 0, 0, image.width, image.height, 0, 0, image.width, image.height);
|
||||
data = context.getImageData(0, 0, image.width, image.height);
|
||||
}
|
||||
|
||||
var texture: Dynamic;
|
||||
|
||||
static function upperPowerOfTwo(v: Int): Int {
|
||||
v--;
|
||||
v |= v >>> 1;
|
||||
v |= v >>> 2;
|
||||
v |= v >>> 4;
|
||||
v |= v >>> 8;
|
||||
v |= v >>> 16;
|
||||
v++;
|
||||
return v;
|
||||
}
|
||||
|
||||
public function createTexture() {
|
||||
if (SystemImpl.gl == null)
|
||||
return;
|
||||
texture = SystemImpl.gl.createTexture();
|
||||
// texture.image = image;
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, texture);
|
||||
// Sys.gl.pixelStorei(Sys.gl.UNPACK_FLIP_Y_WEBGL, true);
|
||||
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MAG_FILTER, GL.LINEAR);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.LINEAR);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_S, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_T, GL.CLAMP_TO_EDGE);
|
||||
if (renderTarget) {
|
||||
frameBuffer = SystemImpl.gl.createFramebuffer();
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, realWidth, realHeight, 0, GL.RGBA, GL.UNSIGNED_BYTE, null);
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, GL.COLOR_ATTACHMENT0, GL.TEXTURE_2D, texture, 0);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, null);
|
||||
}
|
||||
else if (video != null)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, GL.RGBA, GL.UNSIGNED_BYTE, video);
|
||||
else
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, GL.RGBA, GL.UNSIGNED_BYTE, image);
|
||||
// Sys.gl.generateMipmap(Sys.gl.TEXTURE_2D);
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, null);
|
||||
}
|
||||
|
||||
public function set(stage: Int): Void {
|
||||
SystemImpl.gl.activeTexture(GL.TEXTURE0 + stage);
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, texture);
|
||||
if (video != null)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, GL.RGBA, GL.UNSIGNED_BYTE, video);
|
||||
}
|
||||
|
||||
public var bytes: Bytes;
|
||||
|
||||
override public function lock(level: Int = 0): Bytes {
|
||||
bytes = Bytes.alloc(myFormat == TextureFormat.RGBA32 ? 4 * width * height : width * height);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
override public function unlock(): Void {
|
||||
data = null;
|
||||
|
||||
if (SystemImpl.gl != null) {
|
||||
texture = SystemImpl.gl.createTexture();
|
||||
// texture.image = image;
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, texture);
|
||||
// Sys.gl.pixelStorei(Sys.gl.UNPACK_FLIP_Y_WEBGL, true);
|
||||
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MAG_FILTER, GL.LINEAR);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.LINEAR);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_S, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_T, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.LUMINANCE, width, height, 0, GL.LUMINANCE, GL.UNSIGNED_BYTE, new Uint8Array(bytes.getData()));
|
||||
|
||||
if (SystemImpl.ie && SystemImpl.gl.getError() == 1282) { // no LUMINANCE support in IE11
|
||||
var rgbaBytes = Bytes.alloc(width * height * 4);
|
||||
for (y in 0...height)
|
||||
for (x in 0...width) {
|
||||
var value = bytes.get(y * width + x);
|
||||
rgbaBytes.set(y * width * 4 + x * 4 + 0, value);
|
||||
rgbaBytes.set(y * width * 4 + x * 4 + 1, value);
|
||||
rgbaBytes.set(y * width * 4 + x * 4 + 2, value);
|
||||
rgbaBytes.set(y * width * 4 + x * 4 + 3, 255);
|
||||
}
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, width, height, 0, GL.RGBA, GL.UNSIGNED_BYTE, new Uint8Array(rgbaBytes.getData()));
|
||||
}
|
||||
|
||||
// Sys.gl.generateMipmap(Sys.gl.TEXTURE_2D);
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, null);
|
||||
bytes = null;
|
||||
}
|
||||
}
|
||||
|
||||
override public function getPixels(): Bytes {
|
||||
@:privateAccess var context: js.html.CanvasRenderingContext2D = g2canvas.canvas;
|
||||
var imageData: js.html.ImageData = context.getImageData(0, 0, width, height);
|
||||
var bytes = Bytes.alloc(imageData.data.length);
|
||||
for (i in 0...imageData.data.length) {
|
||||
bytes.set(i, imageData.data[i]);
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
override public function unload(): Void {
|
||||
image = null;
|
||||
video = null;
|
||||
data = null;
|
||||
}
|
||||
}
|
86
Kha/Backends/HTML5/kha/Display.hx
Normal file
86
Kha/Backends/HTML5/kha/Display.hx
Normal file
@ -0,0 +1,86 @@
|
||||
package kha;
|
||||
|
||||
import js.Browser;
|
||||
|
||||
class Display {
|
||||
static var instance: Display = new Display();
|
||||
|
||||
function new() {}
|
||||
|
||||
public static function init(): Void {}
|
||||
|
||||
public static var primary(get, never): Display;
|
||||
|
||||
static function get_primary(): Display {
|
||||
return instance;
|
||||
}
|
||||
|
||||
public static var all(get, never): Array<Display>;
|
||||
|
||||
static function get_all(): Array<Display> {
|
||||
return [primary];
|
||||
}
|
||||
|
||||
public var available(get, never): Bool;
|
||||
|
||||
function get_available(): Bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
public var name(get, never): String;
|
||||
|
||||
function get_name(): String {
|
||||
return "Display";
|
||||
}
|
||||
|
||||
public var x(get, never): Int;
|
||||
|
||||
function get_x(): Int {
|
||||
return js.Browser.window.screen.left;
|
||||
}
|
||||
|
||||
public var y(get, never): Int;
|
||||
|
||||
function get_y(): Int {
|
||||
return js.Browser.window.screen.top;
|
||||
}
|
||||
|
||||
public var width(get, never): Int;
|
||||
|
||||
function get_width(): Int {
|
||||
return js.Browser.window.screen.width;
|
||||
}
|
||||
|
||||
public var height(get, never): Int;
|
||||
|
||||
function get_height(): Int {
|
||||
return js.Browser.window.screen.height;
|
||||
}
|
||||
|
||||
public var frequency(get, never): Int;
|
||||
|
||||
function get_frequency(): Int {
|
||||
return SystemImpl.estimatedRefreshRate;
|
||||
}
|
||||
|
||||
public var pixelsPerInch(get, never): Int;
|
||||
|
||||
function get_pixelsPerInch(): Int {
|
||||
var dpiElement = Browser.document.createElement("div");
|
||||
dpiElement.style.position = "absolute";
|
||||
dpiElement.style.width = "1in";
|
||||
dpiElement.style.height = "1in";
|
||||
dpiElement.style.left = "-100%";
|
||||
dpiElement.style.top = "-100%";
|
||||
Browser.document.body.appendChild(dpiElement);
|
||||
var dpi: Int = dpiElement.offsetHeight;
|
||||
dpiElement.remove();
|
||||
return dpi;
|
||||
}
|
||||
|
||||
public var modes(get, never): Array<DisplayMode>;
|
||||
|
||||
function get_modes(): Array<DisplayMode> {
|
||||
return [];
|
||||
}
|
||||
}
|
24
Kha/Backends/HTML5/kha/EnvironmentVariables.hx
Normal file
24
Kha/Backends/HTML5/kha/EnvironmentVariables.hx
Normal file
@ -0,0 +1,24 @@
|
||||
package kha;
|
||||
|
||||
import js.Browser;
|
||||
|
||||
class EnvironmentVariables {
|
||||
public static function get(name: String): String {
|
||||
try {
|
||||
var query = Browser.location.href.substr(Browser.location.href.indexOf("?") + 1);
|
||||
var parts = query.split("&");
|
||||
|
||||
for (part in parts) {
|
||||
var subparts = part.split("=");
|
||||
if (subparts[0] == name) {
|
||||
return subparts[1];
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
catch (error:Dynamic) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
3
Kha/Backends/HTML5/kha/Font.hx
Normal file
3
Kha/Backends/HTML5/kha/Font.hx
Normal file
@ -0,0 +1,3 @@
|
||||
package kha;
|
||||
|
||||
typedef Font = kha.Kravur;
|
221
Kha/Backends/HTML5/kha/Image.hx
Normal file
221
Kha/Backends/HTML5/kha/Image.hx
Normal file
@ -0,0 +1,221 @@
|
||||
package kha;
|
||||
|
||||
import haxe.io.Bytes;
|
||||
import js.html.ImageElement;
|
||||
import js.html.CanvasElement;
|
||||
import js.html.webgl.GL;
|
||||
import kha.graphics4.TextureFormat;
|
||||
import kha.graphics4.DepthStencilFormat;
|
||||
import kha.graphics4.Usage;
|
||||
|
||||
class Image implements Canvas implements Resource {
|
||||
public static function create(width: Int, height: Int, format: TextureFormat = null, usage: Usage = null, readable: Bool = false): Image {
|
||||
if (format == null)
|
||||
format = TextureFormat.RGBA32;
|
||||
if (usage == null)
|
||||
usage = Usage.StaticUsage;
|
||||
if (SystemImpl.gl == null)
|
||||
return new CanvasImage(width, height, format, false);
|
||||
else
|
||||
return new WebGLImage(width, height, format, false, DepthStencilFormat.NoDepthAndStencil, 1, readable);
|
||||
}
|
||||
|
||||
public static function create3D(width: Int, height: Int, depth: Int, format: TextureFormat = null, usage: Usage = null, readable: Bool = false): Image {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static function createRenderTarget(width: Int, height: Int, format: TextureFormat = null,
|
||||
depthStencil: DepthStencilFormat = DepthStencilFormat.NoDepthAndStencil, antiAliasingSamples: Int = 1): Image {
|
||||
if (format == null)
|
||||
format = TextureFormat.RGBA32;
|
||||
if (SystemImpl.gl == null)
|
||||
return new CanvasImage(width, height, format, true);
|
||||
else
|
||||
return new WebGLImage(width, height, format, true, depthStencil, antiAliasingSamples, false);
|
||||
}
|
||||
|
||||
public static function fromCanvas(canvas: CanvasElement): Image {
|
||||
if (SystemImpl.gl == null) {
|
||||
var img = new CanvasImage(canvas.width, canvas.height, TextureFormat.RGBA32, false);
|
||||
img.image = canvas;
|
||||
img.createTexture();
|
||||
return img;
|
||||
}
|
||||
else {
|
||||
var img = new WebGLImage(canvas.width, canvas.height, TextureFormat.RGBA32, false, DepthStencilFormat.NoDepthAndStencil, 1, false);
|
||||
img.image = canvas;
|
||||
img.createTexture();
|
||||
return img;
|
||||
}
|
||||
}
|
||||
|
||||
public static function fromImage(image: ImageElement, readable: Bool): Image {
|
||||
if (SystemImpl.gl == null) {
|
||||
var img = new CanvasImage(image.width, image.height, TextureFormat.RGBA32, false);
|
||||
img.image = image;
|
||||
img.createTexture();
|
||||
return img;
|
||||
}
|
||||
else {
|
||||
var img = new WebGLImage(image.width, image.height, TextureFormat.RGBA32, false, DepthStencilFormat.NoDepthAndStencil, 1, readable);
|
||||
img.image = image;
|
||||
img.createTexture();
|
||||
return img;
|
||||
}
|
||||
}
|
||||
|
||||
public static function fromBytes(bytes: Bytes, width: Int, height: Int, format: TextureFormat = null, usage: Usage = null, readable: Bool = false): Image {
|
||||
if (format == null)
|
||||
format = TextureFormat.RGBA32;
|
||||
if (usage == null)
|
||||
usage = Usage.StaticUsage;
|
||||
if (SystemImpl.gl != null) {
|
||||
var img = new WebGLImage(width, height, format, false, DepthStencilFormat.NoDepthAndStencil, 1, readable);
|
||||
img.image = img.bytesToArray(bytes);
|
||||
img.createTexture();
|
||||
return img;
|
||||
}
|
||||
var img = new CanvasImage(width, height, format, false);
|
||||
var g2: kha.js.CanvasGraphics = cast img.g2;
|
||||
@:privateAccess var canvas = g2.canvas;
|
||||
var imageData = new js.html.ImageData(new js.lib.Uint8ClampedArray(bytes.getData()), width, height);
|
||||
canvas.putImageData(imageData, 0, 0);
|
||||
return img;
|
||||
}
|
||||
|
||||
public static function fromBytes3D(bytes: Bytes, width: Int, height: Int, depth: Int, format: TextureFormat = null, usage: Usage = null,
|
||||
readable: Bool = false): Image {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static function fromEncodedBytes(bytes: Bytes, fileExtention: String, doneCallback: Image->Void, errorCallback: String->Void,
|
||||
readable: Bool = false): Void {
|
||||
var dataUrl = "data:image;base64," + haxe.crypto.Base64.encode(bytes);
|
||||
var imageElement = cast(js.Browser.document.createElement("img"), ImageElement);
|
||||
imageElement.onload = function() doneCallback(fromImage(imageElement, readable));
|
||||
imageElement.onerror = function() errorCallback("Image was not created");
|
||||
imageElement.src = dataUrl;
|
||||
}
|
||||
|
||||
public static function fromVideo(video: kha.Video): Image {
|
||||
final jsvideo: kha.js.Video = cast video;
|
||||
|
||||
if (SystemImpl.gl == null) {
|
||||
var img = new CanvasImage(jsvideo.element.videoWidth, jsvideo.element.videoHeight, TextureFormat.RGBA32, false);
|
||||
img.video = jsvideo.element;
|
||||
img.createTexture();
|
||||
return img;
|
||||
}
|
||||
else {
|
||||
var img = new WebGLImage(jsvideo.element.videoWidth, jsvideo.element.videoHeight, TextureFormat.RGBA32, false,
|
||||
DepthStencilFormat.NoDepthAndStencil, 1, false);
|
||||
img.video = jsvideo.element;
|
||||
img.createTexture();
|
||||
return img;
|
||||
}
|
||||
}
|
||||
|
||||
public static var maxSize(get, never): Int;
|
||||
|
||||
static function get_maxSize(): Int {
|
||||
return SystemImpl.gl == null ? 1024 * 8 : SystemImpl.gl.getParameter(GL.MAX_TEXTURE_SIZE);
|
||||
}
|
||||
|
||||
public static var nonPow2Supported(get, never): Bool;
|
||||
|
||||
static function get_nonPow2Supported(): Bool {
|
||||
return SystemImpl.gl != null;
|
||||
}
|
||||
|
||||
public static function renderTargetsInvertedY(): Bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
public function isOpaque(x: Int, y: Int): Bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
public function at(x: Int, y: Int): Color {
|
||||
return Color.Black;
|
||||
}
|
||||
|
||||
public function unload(): Void {}
|
||||
|
||||
public function lock(level: Int = 0): Bytes {
|
||||
return null;
|
||||
}
|
||||
|
||||
public function unlock(): Void {}
|
||||
|
||||
public function getPixels(): Bytes {
|
||||
return null;
|
||||
}
|
||||
|
||||
public function generateMipmaps(levels: Int): Void {}
|
||||
|
||||
public function setMipmaps(mipmaps: Array<Image>): Void {}
|
||||
|
||||
public function setDepthStencilFrom(image: Image): Void {}
|
||||
|
||||
public function clear(x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, color: Color): Void {}
|
||||
|
||||
public var width(get, never): Int;
|
||||
|
||||
function get_width(): Int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
public var height(get, never): Int;
|
||||
|
||||
function get_height(): Int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
public var depth(get, never): Int;
|
||||
|
||||
function get_depth(): Int {
|
||||
return 1;
|
||||
}
|
||||
|
||||
public var format(get, never): TextureFormat;
|
||||
|
||||
function get_format(): TextureFormat {
|
||||
return TextureFormat.RGBA32;
|
||||
}
|
||||
|
||||
public var realWidth(get, never): Int;
|
||||
|
||||
function get_realWidth(): Int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
public var realHeight(get, never): Int;
|
||||
|
||||
function get_realHeight(): Int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
public var stride(get, never): Int;
|
||||
|
||||
function get_stride(): Int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
public var g1(get, never): kha.graphics1.Graphics;
|
||||
|
||||
function get_g1(): kha.graphics1.Graphics {
|
||||
return null;
|
||||
}
|
||||
|
||||
public var g2(get, never): kha.graphics2.Graphics;
|
||||
|
||||
function get_g2(): kha.graphics2.Graphics {
|
||||
return null;
|
||||
}
|
||||
|
||||
public var g4(get, never): kha.graphics4.Graphics;
|
||||
|
||||
function get_g4(): kha.graphics4.Graphics {
|
||||
return null;
|
||||
}
|
||||
}
|
236
Kha/Backends/HTML5/kha/LoaderImpl.hx
Normal file
236
Kha/Backends/HTML5/kha/LoaderImpl.hx
Normal file
@ -0,0 +1,236 @@
|
||||
package kha;
|
||||
|
||||
import js.html.FileReader;
|
||||
import js.Syntax;
|
||||
import js.Browser;
|
||||
import js.html.ImageElement;
|
||||
import js.html.XMLHttpRequest;
|
||||
import haxe.io.Bytes;
|
||||
import kha.Blob;
|
||||
import kha.js.WebAudioSound;
|
||||
import kha.js.MobileWebAudioSound;
|
||||
import kha.graphics4.TextureFormat;
|
||||
import kha.graphics4.Usage;
|
||||
|
||||
using StringTools;
|
||||
|
||||
class LoaderImpl {
|
||||
@:allow(kha.SystemImpl)
|
||||
static var dropFiles = new Map<String, js.html.File>();
|
||||
|
||||
public static function getImageFormats(): Array<String> {
|
||||
return ["png", "jpg", "hdr"];
|
||||
}
|
||||
|
||||
public static function loadImageFromDescription(desc: Dynamic, done: kha.Image->Void, failed: AssetError->Void) {
|
||||
var readable = Reflect.hasField(desc, "readable") ? desc.readable : false;
|
||||
if (StringTools.endsWith(desc.files[0], ".hdr")) {
|
||||
loadBlobFromDescription(desc, function(blob) {
|
||||
var hdrImage = kha.internal.HdrFormat.parse(blob.toBytes());
|
||||
done(Image.fromBytes(hdrImage.data.view.buffer, hdrImage.width, hdrImage.height, TextureFormat.RGBA128,
|
||||
readable ? Usage.DynamicUsage : Usage.StaticUsage));
|
||||
}, failed);
|
||||
}
|
||||
else {
|
||||
var img: ImageElement = cast Browser.document.createElement("img");
|
||||
img.onerror = function(event: Dynamic) failed({url: desc.files[0], error: event});
|
||||
img.onload = function(event: Dynamic) done(Image.fromImage(img, readable));
|
||||
img.crossOrigin = "";
|
||||
img.src = desc.files[0];
|
||||
}
|
||||
}
|
||||
|
||||
public static function getSoundFormats(): Array<String> {
|
||||
var element = Browser.document.createAudioElement();
|
||||
var formats = new Array<String>();
|
||||
#if !kha_debug_html5
|
||||
if (element.canPlayType("audio/mp4") != "")
|
||||
formats.push("mp4");
|
||||
if (element.canPlayType("audio/mp3") != "")
|
||||
formats.push("mp3");
|
||||
if (element.canPlayType("audio/wav") != "")
|
||||
formats.push("wav");
|
||||
#end
|
||||
if (SystemImpl._hasWebAudio || element.canPlayType("audio/ogg") != "")
|
||||
formats.push("ogg");
|
||||
return formats;
|
||||
}
|
||||
|
||||
public static function loadSoundFromDescription(desc: Dynamic, done: kha.Sound->Void, failed: AssetError->Void) {
|
||||
if (SystemImpl._hasWebAudio) {
|
||||
#if !kha_debug_html5
|
||||
var element = Browser.document.createAudioElement();
|
||||
if (element.canPlayType("audio/mp4") != "") {
|
||||
for (i in 0...desc.files.length) {
|
||||
var file: String = desc.files[i];
|
||||
if (file.endsWith(".mp4")) {
|
||||
new WebAudioSound(file, done, failed);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (element.canPlayType("audio/mp3") != "") {
|
||||
for (i in 0...desc.files.length) {
|
||||
var file: String = desc.files[i];
|
||||
if (file.endsWith(".mp3")) {
|
||||
new WebAudioSound(file, done, failed);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (element.canPlayType("audio/wav") != "") {
|
||||
for (i in 0...desc.files.length) {
|
||||
var file: String = desc.files[i];
|
||||
if (file.endsWith(".wav")) {
|
||||
new WebAudioSound(file, done, failed);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
#end
|
||||
for (i in 0...desc.files.length) {
|
||||
var file: String = desc.files[i];
|
||||
if (file.endsWith(".ogg")) {
|
||||
new WebAudioSound(file, done, failed);
|
||||
return;
|
||||
}
|
||||
}
|
||||
failed({
|
||||
url: desc.files.join(","),
|
||||
error: "Unable to find sound files with supported audio formats",
|
||||
});
|
||||
}
|
||||
else if (SystemImpl.mobile) {
|
||||
var element = Browser.document.createAudioElement();
|
||||
if (element.canPlayType("audio/mp4") != "") {
|
||||
for (i in 0...desc.files.length) {
|
||||
var file: String = desc.files[i];
|
||||
if (file.endsWith(".mp4")) {
|
||||
new MobileWebAudioSound(file, done, failed);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (element.canPlayType("audio/mp3") != "") {
|
||||
for (i in 0...desc.files.length) {
|
||||
var file: String = desc.files[i];
|
||||
if (file.endsWith(".mp3")) {
|
||||
new MobileWebAudioSound(file, done, failed);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (element.canPlayType("audio/wav") != "") {
|
||||
for (i in 0...desc.files.length) {
|
||||
var file: String = desc.files[i];
|
||||
if (file.endsWith(".wav")) {
|
||||
new MobileWebAudioSound(file, done, failed);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (i in 0...desc.files.length) {
|
||||
var file: String = desc.files[i];
|
||||
if (file.endsWith(".ogg")) {
|
||||
new MobileWebAudioSound(file, done, failed);
|
||||
return;
|
||||
}
|
||||
}
|
||||
failed({
|
||||
url: desc.files.join(","),
|
||||
error: "Unable to find sound files with supported audio formats",
|
||||
});
|
||||
}
|
||||
else {
|
||||
new kha.js.Sound(desc.files, done, failed);
|
||||
}
|
||||
}
|
||||
|
||||
public static function getVideoFormats(): Array<String> {
|
||||
#if kha_debug_html5
|
||||
return ["webm"];
|
||||
#else
|
||||
return ["mp4", "webm"];
|
||||
#end
|
||||
}
|
||||
|
||||
public static function loadVideoFromDescription(desc: Dynamic, done: kha.Video->Void, failed: AssetError->Void): Void {
|
||||
kha.js.Video.fromFile(desc.files, done);
|
||||
}
|
||||
|
||||
public static function loadRemote(desc: Dynamic, done: Blob->Void, failed: AssetError->Void) {
|
||||
var request = untyped new XMLHttpRequest();
|
||||
request.open("GET", desc.files[0], true);
|
||||
request.responseType = "arraybuffer";
|
||||
|
||||
request.onreadystatechange = function() {
|
||||
if (request.readyState != 4)
|
||||
return;
|
||||
if ((request.status >= 200 && request.status < 400)
|
||||
|| (request.status == 0 && request.statusText == "")) { // Blobs loaded using --allow-file-access-from-files
|
||||
var bytes: Bytes = null;
|
||||
var arrayBuffer = request.response;
|
||||
if (arrayBuffer != null) {
|
||||
var byteArray: Dynamic = Syntax.code("new Uint8Array(arrayBuffer)");
|
||||
bytes = Bytes.ofData(byteArray);
|
||||
}
|
||||
else if (request.responseBody != null) {
|
||||
var data: Dynamic = untyped Syntax.code("VBArray(request.responseBody).toArray()");
|
||||
bytes = Bytes.alloc(data.length);
|
||||
for (i in 0...data.length)
|
||||
bytes.set(i, data[i]);
|
||||
}
|
||||
else {
|
||||
failed({url: desc.files[0]});
|
||||
return;
|
||||
}
|
||||
|
||||
done(new Blob(bytes));
|
||||
}
|
||||
else {
|
||||
failed({url: desc.files[0]});
|
||||
}
|
||||
}
|
||||
request.send(null);
|
||||
}
|
||||
|
||||
public static function loadBlobFromDescription(desc: Dynamic, done: Blob->Void, failed: AssetError->Void) {
|
||||
#if kha_debug_html5
|
||||
var file: String = desc.files[0];
|
||||
|
||||
if (file.startsWith("http://") || file.startsWith("https://")) {
|
||||
loadRemote(desc, done, failed);
|
||||
}
|
||||
else if (file.startsWith("drop://")) {
|
||||
var dropFile = dropFiles.get(file.substring(7));
|
||||
if (dropFile == null)
|
||||
failed({url: file, error: 'file not found'});
|
||||
else {
|
||||
var reader = new FileReader();
|
||||
reader.onloadend = () -> {
|
||||
done(new Blob(Bytes.ofData(reader.result)));
|
||||
};
|
||||
reader.onerror = () -> failed({url: file, error: reader.error});
|
||||
reader.readAsArrayBuffer(dropFile);
|
||||
}
|
||||
}
|
||||
else {
|
||||
var loadBlob = Syntax.code("window.electron.loadBlob");
|
||||
loadBlob(desc, (byteArray: Dynamic) -> {
|
||||
var bytes = Bytes.alloc(byteArray.byteLength);
|
||||
for (i in 0...byteArray.byteLength)
|
||||
bytes.set(i, byteArray[i]);
|
||||
done(new Blob(bytes));
|
||||
}, failed);
|
||||
}
|
||||
#else
|
||||
loadRemote(desc, done, failed);
|
||||
#end
|
||||
}
|
||||
|
||||
public static function loadFontFromDescription(desc: Dynamic, done: Font->Void, failed: AssetError->Void): Void {
|
||||
loadBlobFromDescription(desc, function(blob: Blob) {
|
||||
done(new Font(blob));
|
||||
}, failed);
|
||||
}
|
||||
}
|
13
Kha/Backends/HTML5/kha/Macros.hx
Normal file
13
Kha/Backends/HTML5/kha/Macros.hx
Normal file
@ -0,0 +1,13 @@
|
||||
package kha;
|
||||
|
||||
import haxe.macro.Context;
|
||||
import haxe.macro.Expr;
|
||||
|
||||
class Macros {
|
||||
public static macro function canvasId(): Expr {
|
||||
return {
|
||||
expr: EConst(CString(Context.getDefines().get("canvas_id"))),
|
||||
pos: Context.currentPos()
|
||||
};
|
||||
}
|
||||
}
|
102
Kha/Backends/HTML5/kha/Storage.hx
Normal file
102
Kha/Backends/HTML5/kha/Storage.hx
Normal file
@ -0,0 +1,102 @@
|
||||
package kha;
|
||||
|
||||
import haxe.io.Bytes;
|
||||
import haxe.io.BytesBuffer;
|
||||
import haxe.io.BytesData;
|
||||
import js.Browser;
|
||||
|
||||
using StringTools;
|
||||
|
||||
class LocalStorageFile extends StorageFile {
|
||||
var name: String;
|
||||
|
||||
public function new(name: String) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
override public function read(): Blob {
|
||||
var storage = Browser.getLocalStorage();
|
||||
if (storage == null)
|
||||
return null;
|
||||
var value: String = storage.getItem(System.title + "_" + name);
|
||||
if (value == null)
|
||||
return null;
|
||||
else
|
||||
return Blob.fromBytes(decode(value));
|
||||
}
|
||||
|
||||
override public function write(data: Blob): Void {
|
||||
var storage = Browser.getLocalStorage();
|
||||
if (storage == null)
|
||||
return;
|
||||
storage.setItem(System.title + "_" + name, encode(data.bytes.getData()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes byte array to yEnc string (from SASStore).
|
||||
* @param {Array} source Byte array to convert to yEnc.
|
||||
* @return {string} Resulting yEnc string from byte array.
|
||||
*/
|
||||
static function encode(source: BytesData): String {
|
||||
var reserved = [0, 10, 13, 61];
|
||||
var output = "";
|
||||
var converted, ele;
|
||||
var bytes = new js.lib.Uint8Array(source);
|
||||
for (i in 0...bytes.length) {
|
||||
ele = bytes[i];
|
||||
converted = (ele + 42) % 256;
|
||||
if (!Lambda.has(reserved, converted)) {
|
||||
output += String.fromCharCode(converted);
|
||||
}
|
||||
else {
|
||||
converted = (converted + 64) % 256;
|
||||
output += "=" + String.fromCharCode(converted);
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decodes yEnc string to byte array (from SASStore).
|
||||
* @param {string} source yEnc string to decode to byte array.
|
||||
* @return {Array} Resulting byte array from yEnc string.
|
||||
*/
|
||||
static function decode(source: String): Bytes {
|
||||
var output = new BytesBuffer();
|
||||
var ck = false;
|
||||
var c;
|
||||
for (i in 0...source.length) {
|
||||
c = source.fastCodeAt(i);
|
||||
// ignore newlines
|
||||
if (c == 13 || c == 10) {
|
||||
continue;
|
||||
}
|
||||
// if we're an "=" and we haven't been flagged, set flag
|
||||
if (c == 61 && !ck) {
|
||||
ck = true;
|
||||
continue;
|
||||
}
|
||||
if (ck) {
|
||||
ck = false;
|
||||
c = c - 64;
|
||||
}
|
||||
if (c < 42 && c > 0) {
|
||||
output.addByte(c + 214);
|
||||
}
|
||||
else {
|
||||
output.addByte(c - 42);
|
||||
}
|
||||
}
|
||||
return output.getBytes();
|
||||
}
|
||||
}
|
||||
|
||||
class Storage {
|
||||
public static function namedFile(name: String): StorageFile {
|
||||
return new LocalStorageFile(name);
|
||||
}
|
||||
|
||||
public static function defaultFile(): StorageFile {
|
||||
return namedFile("default.kha");
|
||||
}
|
||||
}
|
1346
Kha/Backends/HTML5/kha/SystemImpl.hx
Normal file
1346
Kha/Backends/HTML5/kha/SystemImpl.hx
Normal file
File diff suppressed because it is too large
Load Diff
584
Kha/Backends/HTML5/kha/WebGLImage.hx
Normal file
584
Kha/Backends/HTML5/kha/WebGLImage.hx
Normal file
@ -0,0 +1,584 @@
|
||||
package kha;
|
||||
|
||||
import haxe.io.Bytes;
|
||||
import js.Browser;
|
||||
import js.lib.Uint8Array;
|
||||
import js.lib.Uint16Array;
|
||||
import js.lib.Float32Array;
|
||||
import js.html.VideoElement;
|
||||
import js.html.webgl.GL;
|
||||
import js.html.webgl.Framebuffer;
|
||||
import js.html.webgl.Renderbuffer;
|
||||
import js.html.webgl.Texture;
|
||||
import kha.graphics4.TextureFormat;
|
||||
import kha.graphics4.DepthStencilFormat;
|
||||
import kha.js.graphics4.Graphics;
|
||||
|
||||
class WebGLImage extends Image {
|
||||
public var image: Dynamic;
|
||||
public var video: VideoElement;
|
||||
|
||||
static var context: js.html.CanvasRenderingContext2D;
|
||||
|
||||
var data: js.html.ImageData;
|
||||
|
||||
var myWidth: Int;
|
||||
var myHeight: Int;
|
||||
var myFormat: TextureFormat;
|
||||
var renderTarget: Bool;
|
||||
var samples: Int;
|
||||
|
||||
public var frameBuffer: Framebuffer = null;
|
||||
public var renderBuffer: Renderbuffer = null;
|
||||
public var texture: Texture = null;
|
||||
public var depthTexture: Texture = null;
|
||||
public var MSAAFrameBuffer: Framebuffer = null;
|
||||
|
||||
var MSAAColorBuffer: Renderbuffer;
|
||||
var MSAADepthBuffer: Renderbuffer;
|
||||
|
||||
var graphics1: kha.graphics1.Graphics;
|
||||
var graphics2: kha.graphics2.Graphics;
|
||||
var graphics4: kha.graphics4.Graphics;
|
||||
|
||||
var depthStencilFormat: DepthStencilFormat;
|
||||
|
||||
var readable: Bool;
|
||||
|
||||
// WebGL2 constants
|
||||
static inline var GL_RGBA16F = 0x881A;
|
||||
static inline var GL_RGBA32F = 0x8814;
|
||||
static inline var GL_R16F = 0x822D;
|
||||
static inline var GL_R32F = 0x822E;
|
||||
static inline var GL_RED = 0x1903;
|
||||
static inline var GL_DEPTH_COMPONENT24 = 0x81A6;
|
||||
static inline var GL_DEPTH24_STENCIL8 = 0x88F0;
|
||||
static inline var GL_DEPTH32F_STENCIL8 = 0x8CAD;
|
||||
|
||||
static var canvas: js.html.CanvasElement;
|
||||
|
||||
public static function init() {
|
||||
if (context == null) {
|
||||
// create only once
|
||||
canvas = Browser.document.createCanvasElement();
|
||||
if (canvas != null) {
|
||||
context = canvas.getContext("2d");
|
||||
canvas.width = 4096;
|
||||
canvas.height = 4096;
|
||||
context.globalCompositeOperation = "copy";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public function new(width: Int, height: Int, format: TextureFormat, renderTarget: Bool, depthStencilFormat: DepthStencilFormat, samples: Int,
|
||||
readable: Bool) {
|
||||
myWidth = width;
|
||||
myHeight = height;
|
||||
myFormat = format;
|
||||
this.renderTarget = renderTarget;
|
||||
this.samples = samples;
|
||||
this.readable = readable;
|
||||
image = null;
|
||||
video = null;
|
||||
this.depthStencilFormat = depthStencilFormat;
|
||||
init();
|
||||
if (renderTarget)
|
||||
createTexture();
|
||||
}
|
||||
|
||||
override function get_g1(): kha.graphics1.Graphics {
|
||||
if (graphics1 == null) {
|
||||
graphics1 = new kha.graphics2.Graphics1(this);
|
||||
}
|
||||
return graphics1;
|
||||
}
|
||||
|
||||
override function get_g2(): kha.graphics2.Graphics {
|
||||
if (graphics2 == null) {
|
||||
graphics2 = new kha.js.graphics4.Graphics2(this);
|
||||
}
|
||||
return graphics2;
|
||||
}
|
||||
|
||||
override function get_g4(): kha.graphics4.Graphics {
|
||||
if (graphics4 == null) {
|
||||
graphics4 = new Graphics(this);
|
||||
}
|
||||
return graphics4;
|
||||
}
|
||||
|
||||
override function get_width(): Int {
|
||||
return myWidth;
|
||||
}
|
||||
|
||||
override function get_height(): Int {
|
||||
return myHeight;
|
||||
}
|
||||
|
||||
override function get_format(): TextureFormat {
|
||||
return myFormat;
|
||||
}
|
||||
|
||||
override function get_realWidth(): Int {
|
||||
return myWidth;
|
||||
}
|
||||
|
||||
override function get_realHeight(): Int {
|
||||
return myHeight;
|
||||
}
|
||||
|
||||
override function get_stride(): Int {
|
||||
return formatByteSize(myFormat) * width;
|
||||
}
|
||||
|
||||
override public function isOpaque(x: Int, y: Int): Bool {
|
||||
if (data == null) {
|
||||
if (context == null)
|
||||
return true;
|
||||
else
|
||||
createImageData();
|
||||
}
|
||||
return (data.data[y * Std.int(image.width) * 4 + x * 4 + 3] != 0);
|
||||
}
|
||||
|
||||
override public function at(x: Int, y: Int): Color {
|
||||
if (bytes != null) {
|
||||
var r = bytes.get(y * width * 4 + x * 4);
|
||||
var g = bytes.get(y * width * 4 + x * 4 + 1);
|
||||
var b = bytes.get(y * width * 4 + x * 4 + 2);
|
||||
var a = bytes.get(y * width * 4 + x * 4 + 3);
|
||||
|
||||
return Color.fromValue((a << 24) | (r << 16) | (g << 8) | b);
|
||||
}
|
||||
else {
|
||||
if (data == null) {
|
||||
if (context == null)
|
||||
return Color.Black;
|
||||
else
|
||||
createImageData();
|
||||
}
|
||||
|
||||
var r = data.data[y * width * 4 + x * 4];
|
||||
var g = data.data[y * width * 4 + x * 4 + 1];
|
||||
var b = data.data[y * width * 4 + x * 4 + 2];
|
||||
var a = data.data[y * width * 4 + x * 4 + 3];
|
||||
|
||||
return Color.fromValue((a << 24) | (r << 16) | (g << 8) | b);
|
||||
}
|
||||
}
|
||||
|
||||
function createImageData() {
|
||||
if (Std.isOfType(image, Uint8Array)) {
|
||||
data = new js.html.ImageData(new js.lib.Uint8ClampedArray(image.buffer), this.width, this.height);
|
||||
}
|
||||
else {
|
||||
if (this.width > canvas.width || this.height > canvas.height) {
|
||||
var cw = canvas.width;
|
||||
var ch = canvas.height;
|
||||
while (this.width > cw || this.height > ch) {
|
||||
cw *= 2;
|
||||
ch *= 2;
|
||||
}
|
||||
canvas.width = cw;
|
||||
canvas.height = ch;
|
||||
}
|
||||
context.strokeStyle = "rgba(0,0,0,0)";
|
||||
context.fillStyle = "rgba(0,0,0,0)";
|
||||
context.fillRect(0, 0, image.width, image.height);
|
||||
context.drawImage(image, 0, 0, image.width, image.height, 0, 0, image.width, image.height);
|
||||
data = context.getImageData(0, 0, image.width, image.height);
|
||||
}
|
||||
}
|
||||
|
||||
static function upperPowerOfTwo(v: Int): Int {
|
||||
v--;
|
||||
v |= v >>> 1;
|
||||
v |= v >>> 2;
|
||||
v |= v >>> 4;
|
||||
v |= v >>> 8;
|
||||
v |= v >>> 16;
|
||||
v++;
|
||||
return v;
|
||||
}
|
||||
|
||||
public function createTexture(): Void {
|
||||
if (SystemImpl.gl == null)
|
||||
return;
|
||||
texture = SystemImpl.gl.createTexture();
|
||||
// texture.image = image;
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, texture);
|
||||
// Sys.gl.pixelStorei(Sys.gl.UNPACK_FLIP_Y_WEBGL, true);
|
||||
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MAG_FILTER, GL.LINEAR);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.LINEAR);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_S, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_T, GL.CLAMP_TO_EDGE);
|
||||
if (renderTarget) {
|
||||
frameBuffer = SystemImpl.gl.createFramebuffer();
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
switch (myFormat) {
|
||||
case DEPTH16:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL.DEPTH_COMPONENT16 : GL.DEPTH_COMPONENT, realWidth, realHeight, 0,
|
||||
GL.DEPTH_COMPONENT, GL.UNSIGNED_SHORT, null);
|
||||
case RGBA128:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_RGBA32F : GL.RGBA, realWidth, realHeight, 0, GL.RGBA, GL.FLOAT, null);
|
||||
case RGBA64:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_RGBA16F : GL.RGBA, realWidth, realHeight, 0, GL.RGBA,
|
||||
SystemImpl.halfFloat.HALF_FLOAT_OES, null);
|
||||
case RGBA32:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, realWidth, realHeight, 0, GL.RGBA, GL.UNSIGNED_BYTE, null);
|
||||
case A32:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_R32F : GL.ALPHA, realWidth, realHeight, 0,
|
||||
SystemImpl.gl2 ? GL_RED : GL.ALPHA, GL.FLOAT, null);
|
||||
case A16:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_R16F : GL.ALPHA, realWidth, realHeight, 0,
|
||||
SystemImpl.gl2 ? GL_RED : GL.ALPHA, SystemImpl.halfFloat.HALF_FLOAT_OES, null);
|
||||
default:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, realWidth, realHeight, 0, GL.RGBA, GL.UNSIGNED_BYTE, null);
|
||||
}
|
||||
|
||||
if (myFormat == DEPTH16) {
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MAG_FILTER, GL.NEAREST);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.NEAREST);
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, GL.DEPTH_ATTACHMENT, GL.TEXTURE_2D, texture, 0);
|
||||
// Some WebGL implementations throw incomplete framebuffer error, create color attachment
|
||||
if (!SystemImpl.gl2) {
|
||||
var colortex = SystemImpl.gl.createTexture();
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, colortex);
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, realWidth, realHeight, 0, GL.RGBA, GL.UNSIGNED_BYTE, null);
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, GL.COLOR_ATTACHMENT0, GL.TEXTURE_2D, colortex, 0);
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, texture);
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (samples > 1 && SystemImpl.gl2) {
|
||||
MSAAFrameBuffer = SystemImpl.gl.createFramebuffer();
|
||||
MSAAColorBuffer = SystemImpl.gl.createRenderbuffer();
|
||||
SystemImpl.gl.bindRenderbuffer(GL.RENDERBUFFER, MSAAColorBuffer);
|
||||
var MSAAFormat = switch (myFormat) {
|
||||
case RGBA128:
|
||||
untyped SystemImpl.gl.RGBA32F;
|
||||
case RGBA64:
|
||||
untyped SystemImpl.gl.RGBA16F;
|
||||
case RGBA32:
|
||||
untyped SystemImpl.gl.RGBA8;
|
||||
case A32:
|
||||
GL_R32F;
|
||||
case A16:
|
||||
GL_R16F;
|
||||
default:
|
||||
untyped SystemImpl.gl.RGBA8;
|
||||
};
|
||||
untyped SystemImpl.gl.renderbufferStorageMultisample(GL.RENDERBUFFER, samples, MSAAFormat, realWidth, realHeight);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
SystemImpl.gl.framebufferRenderbuffer(GL.FRAMEBUFFER, GL.COLOR_ATTACHMENT0, GL.RENDERBUFFER, MSAAColorBuffer);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, MSAAFrameBuffer);
|
||||
}
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, GL.COLOR_ATTACHMENT0, GL.TEXTURE_2D, texture, 0);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, null);
|
||||
}
|
||||
|
||||
initDepthStencilBuffer(depthStencilFormat);
|
||||
var e = SystemImpl.gl.checkFramebufferStatus(GL.FRAMEBUFFER);
|
||||
if (e != GL.FRAMEBUFFER_COMPLETE) {
|
||||
trace("checkframebufferStatus error " + e);
|
||||
}
|
||||
|
||||
SystemImpl.gl.bindRenderbuffer(GL.RENDERBUFFER, null);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, null);
|
||||
}
|
||||
else if (video != null) {
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, GL.RGBA, GL.UNSIGNED_BYTE, video);
|
||||
}
|
||||
else {
|
||||
switch (myFormat) {
|
||||
case RGBA128:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_RGBA32F : GL.RGBA, myWidth, myHeight, 0, GL.RGBA, GL.FLOAT, image);
|
||||
case RGBA64:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_RGBA16F : GL.RGBA, myWidth, myHeight, 0, GL.RGBA,
|
||||
SystemImpl.halfFloat.HALF_FLOAT_OES, image);
|
||||
case RGBA32:
|
||||
if (Std.isOfType(image, Uint8Array)) {
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, myWidth, myHeight, 0, GL.RGBA, GL.UNSIGNED_BYTE, image);
|
||||
}
|
||||
else {
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, GL.RGBA, GL.UNSIGNED_BYTE, image);
|
||||
}
|
||||
case A32:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_R32F : GL.ALPHA, myWidth, myHeight, 0, SystemImpl.gl2 ? GL_RED : GL.ALPHA,
|
||||
GL.FLOAT, image);
|
||||
case A16:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_R16F : GL.ALPHA, myWidth, myHeight, 0, SystemImpl.gl2 ? GL_RED : GL.ALPHA,
|
||||
SystemImpl.halfFloat.HALF_FLOAT_OES, image);
|
||||
case L8:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.LUMINANCE, myWidth, myHeight, 0, GL.LUMINANCE, GL.UNSIGNED_BYTE, image);
|
||||
default:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, GL.RGBA, GL.UNSIGNED_BYTE, image);
|
||||
}
|
||||
}
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, null);
|
||||
}
|
||||
|
||||
function initDepthStencilBuffer(depthStencilFormat: DepthStencilFormat) {
|
||||
switch (depthStencilFormat) {
|
||||
case NoDepthAndStencil:
|
||||
case DepthOnly, Depth16:
|
||||
{
|
||||
if (SystemImpl.depthTexture == null) {
|
||||
renderBuffer = SystemImpl.gl.createRenderbuffer();
|
||||
SystemImpl.gl.bindRenderbuffer(GL.RENDERBUFFER, renderBuffer);
|
||||
SystemImpl.gl.renderbufferStorage(GL.RENDERBUFFER, GL.DEPTH_COMPONENT16, realWidth, realHeight);
|
||||
SystemImpl.gl.framebufferRenderbuffer(GL.FRAMEBUFFER, GL.DEPTH_ATTACHMENT, GL.RENDERBUFFER, renderBuffer);
|
||||
}
|
||||
else {
|
||||
depthTexture = SystemImpl.gl.createTexture();
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, depthTexture);
|
||||
if (depthStencilFormat == DepthOnly)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_DEPTH_COMPONENT24 : GL.DEPTH_COMPONENT, realWidth, realHeight, 0,
|
||||
GL.DEPTH_COMPONENT, GL.UNSIGNED_INT, null);
|
||||
else
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL.DEPTH_COMPONENT16 : GL.DEPTH_COMPONENT, realWidth, realHeight, 0,
|
||||
GL.DEPTH_COMPONENT, GL.UNSIGNED_SHORT, null);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MAG_FILTER, GL.NEAREST);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.NEAREST);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_S, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_T, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
|
||||
if (samples > 1 && SystemImpl.gl2) {
|
||||
MSAADepthBuffer = SystemImpl.gl.createRenderbuffer();
|
||||
SystemImpl.gl.bindRenderbuffer(GL.RENDERBUFFER, MSAADepthBuffer);
|
||||
if (depthStencilFormat == DepthOnly)
|
||||
untyped SystemImpl.gl.renderbufferStorageMultisample(GL.RENDERBUFFER, samples, GL_DEPTH_COMPONENT24, realWidth, realHeight);
|
||||
else
|
||||
untyped SystemImpl.gl.renderbufferStorageMultisample(GL.RENDERBUFFER, samples, GL.DEPTH_COMPONENT16, realWidth, realHeight);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
SystemImpl.gl.framebufferRenderbuffer(GL.FRAMEBUFFER, GL.DEPTH_ATTACHMENT, GL.RENDERBUFFER, MSAADepthBuffer);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, MSAAFrameBuffer);
|
||||
}
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, GL.DEPTH_ATTACHMENT, GL.TEXTURE_2D, depthTexture, 0);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, null);
|
||||
}
|
||||
}
|
||||
case DepthAutoStencilAuto, Depth24Stencil8, Depth32Stencil8:
|
||||
if (SystemImpl.depthTexture == null) {
|
||||
renderBuffer = SystemImpl.gl.createRenderbuffer();
|
||||
SystemImpl.gl.bindRenderbuffer(GL.RENDERBUFFER, renderBuffer);
|
||||
SystemImpl.gl.renderbufferStorage(GL.RENDERBUFFER, GL.DEPTH_STENCIL, realWidth, realHeight);
|
||||
SystemImpl.gl.framebufferRenderbuffer(GL.FRAMEBUFFER, GL.DEPTH_STENCIL_ATTACHMENT, GL.RENDERBUFFER, renderBuffer);
|
||||
}
|
||||
else {
|
||||
depthTexture = SystemImpl.gl.createTexture();
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, depthTexture);
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_DEPTH24_STENCIL8 : GL.DEPTH_STENCIL, realWidth, realHeight, 0,
|
||||
GL.DEPTH_STENCIL, SystemImpl.depthTexture.UNSIGNED_INT_24_8_WEBGL, null);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MAG_FILTER, GL.NEAREST);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.NEAREST);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_S, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_T, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
if (samples > 1 && SystemImpl.gl2) {
|
||||
MSAADepthBuffer = SystemImpl.gl.createRenderbuffer();
|
||||
SystemImpl.gl.bindRenderbuffer(GL.RENDERBUFFER, MSAADepthBuffer);
|
||||
untyped SystemImpl.gl.renderbufferStorageMultisample(GL.RENDERBUFFER, samples, GL_DEPTH24_STENCIL8, realWidth, realHeight);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
SystemImpl.gl.framebufferRenderbuffer(GL.FRAMEBUFFER, GL.DEPTH_STENCIL_ATTACHMENT, GL.RENDERBUFFER, MSAADepthBuffer);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, MSAAFrameBuffer);
|
||||
}
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, GL.DEPTH_STENCIL_ATTACHMENT, GL.TEXTURE_2D, depthTexture, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public function set(stage: Int): Void {
|
||||
SystemImpl.gl.activeTexture(GL.TEXTURE0 + stage);
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, texture);
|
||||
if (video != null)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, GL.RGBA, GL.UNSIGNED_BYTE, video);
|
||||
}
|
||||
|
||||
public function setDepth(stage: Int): Void {
|
||||
SystemImpl.gl.activeTexture(GL.TEXTURE0 + stage);
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, depthTexture);
|
||||
}
|
||||
|
||||
override public function setDepthStencilFrom(image: Image): Void {
|
||||
depthTexture = cast(image, WebGLImage).depthTexture;
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, GL.DEPTH_ATTACHMENT, GL.TEXTURE_2D, depthTexture, 0);
|
||||
if (samples > 1 && SystemImpl.gl2) {
|
||||
MSAADepthBuffer = cast(image, WebGLImage).MSAADepthBuffer;
|
||||
SystemImpl.gl.framebufferRenderbuffer(GL.FRAMEBUFFER, GL.DEPTH_ATTACHMENT, GL.RENDERBUFFER, MSAADepthBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
static function formatByteSize(format: TextureFormat): Int {
|
||||
return switch (format) {
|
||||
case RGBA32: 4;
|
||||
case L8: 1;
|
||||
case RGBA128: 16;
|
||||
case DEPTH16: 2;
|
||||
case RGBA64: 8;
|
||||
case A32: 4;
|
||||
case A16: 2;
|
||||
default: 4;
|
||||
}
|
||||
}
|
||||
|
||||
public function bytesToArray(bytes: Bytes): js.lib.ArrayBufferView {
|
||||
return switch (myFormat) {
|
||||
case RGBA32, L8:
|
||||
new Uint8Array(bytes.getData());
|
||||
case RGBA128, RGBA64, A32, A16:
|
||||
new Float32Array(bytes.getData());
|
||||
default:
|
||||
new Uint8Array(bytes.getData());
|
||||
}
|
||||
}
|
||||
|
||||
public var bytes: Bytes;
|
||||
|
||||
override public function lock(level: Int = 0): Bytes {
|
||||
bytes = Bytes.alloc(formatByteSize(myFormat) * width * height);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
override public function unlock(): Void {
|
||||
data = null;
|
||||
image = null;
|
||||
|
||||
if (SystemImpl.gl != null) {
|
||||
texture = SystemImpl.gl.createTexture();
|
||||
// texture.image = image;
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, texture);
|
||||
// Sys.gl.pixelStorei(Sys.gl.UNPACK_FLIP_Y_WEBGL, true);
|
||||
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MAG_FILTER, GL.LINEAR);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.LINEAR);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_S, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_T, GL.CLAMP_TO_EDGE);
|
||||
|
||||
switch (myFormat) {
|
||||
case L8:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.LUMINANCE, width, height, 0, GL.LUMINANCE, GL.UNSIGNED_BYTE, bytesToArray(bytes));
|
||||
|
||||
if (SystemImpl.ie && SystemImpl.gl.getError() == 1282) { // no LUMINANCE support in IE11
|
||||
var rgbaBytes = Bytes.alloc(width * height * 4);
|
||||
for (y in 0...height)
|
||||
for (x in 0...width) {
|
||||
var value = bytes.get(y * width + x);
|
||||
rgbaBytes.set(y * width * 4 + x * 4 + 0, value);
|
||||
rgbaBytes.set(y * width * 4 + x * 4 + 1, value);
|
||||
rgbaBytes.set(y * width * 4 + x * 4 + 2, value);
|
||||
rgbaBytes.set(y * width * 4 + x * 4 + 3, 255);
|
||||
}
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, width, height, 0, GL.RGBA, GL.UNSIGNED_BYTE, bytesToArray(rgbaBytes));
|
||||
}
|
||||
case RGBA128:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_RGBA32F : GL.RGBA, width, height, 0, GL.RGBA, GL.FLOAT,
|
||||
bytesToArray(bytes));
|
||||
case RGBA64:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_RGBA16F : GL.RGBA, width, height, 0, GL.RGBA,
|
||||
SystemImpl.halfFloat.HALF_FLOAT_OES, bytesToArray(bytes));
|
||||
case A32:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_R32F : GL.ALPHA, width, height, 0, SystemImpl.gl2 ? GL_RED : GL.ALPHA,
|
||||
GL.FLOAT, bytesToArray(bytes));
|
||||
case A16:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, SystemImpl.gl2 ? GL_R16F : GL.ALPHA, width, height, 0, SystemImpl.gl2 ? GL_RED : GL.ALPHA,
|
||||
SystemImpl.halfFloat.HALF_FLOAT_OES, bytesToArray(bytes));
|
||||
case RGBA32:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, width, height, 0, GL.RGBA, GL.UNSIGNED_BYTE, bytesToArray(bytes));
|
||||
default:
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, width, height, 0, GL.RGBA, GL.UNSIGNED_BYTE, bytesToArray(bytes));
|
||||
}
|
||||
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, null);
|
||||
|
||||
if (!readable) {
|
||||
bytes = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var pixels: js.lib.ArrayBufferView = null;
|
||||
|
||||
override public function getPixels(): Bytes {
|
||||
if (frameBuffer == null)
|
||||
return null;
|
||||
if (pixels == null) {
|
||||
switch (myFormat) {
|
||||
case RGBA128, A32:
|
||||
pixels = new Float32Array(Std.int(formatByteSize(myFormat) / 4) * width * height);
|
||||
case RGBA64, A16:
|
||||
pixels = new Uint16Array(Std.int(formatByteSize(myFormat) / 2) * width * height);
|
||||
case RGBA32, L8:
|
||||
pixels = new Uint8Array(formatByteSize(myFormat) * width * height);
|
||||
default:
|
||||
pixels = new Uint8Array(formatByteSize(myFormat) * width * height);
|
||||
}
|
||||
}
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
switch (myFormat) {
|
||||
case RGBA128:
|
||||
SystemImpl.gl.readPixels(0, 0, myWidth, myHeight, GL.RGBA, GL.FLOAT, pixels);
|
||||
case RGBA64:
|
||||
SystemImpl.gl.readPixels(0, 0, myWidth, myHeight, GL.RGBA, SystemImpl.halfFloat.HALF_FLOAT_OES, pixels);
|
||||
case RGBA32:
|
||||
SystemImpl.gl.readPixels(0, 0, myWidth, myHeight, GL.RGBA, GL.UNSIGNED_BYTE, pixels);
|
||||
case A32:
|
||||
SystemImpl.gl.readPixels(0, 0, myWidth, myHeight, SystemImpl.gl2 ? GL_RED : GL.ALPHA, GL.FLOAT, pixels);
|
||||
case A16:
|
||||
SystemImpl.gl.readPixels(0, 0, myWidth, myHeight, SystemImpl.gl2 ? GL_RED : GL.ALPHA, SystemImpl.halfFloat.HALF_FLOAT_OES, pixels);
|
||||
case L8:
|
||||
SystemImpl.gl.readPixels(0, 0, myWidth, myHeight, SystemImpl.gl2 ? GL_RED : GL.ALPHA, GL.UNSIGNED_BYTE, pixels);
|
||||
default:
|
||||
SystemImpl.gl.readPixels(0, 0, myWidth, myHeight, GL.RGBA, GL.UNSIGNED_BYTE, pixels);
|
||||
}
|
||||
return Bytes.ofData(pixels.buffer);
|
||||
}
|
||||
|
||||
override public function unload(): Void {
|
||||
if (texture != null)
|
||||
SystemImpl.gl.deleteTexture(texture);
|
||||
if (depthTexture != null)
|
||||
SystemImpl.gl.deleteTexture(depthTexture);
|
||||
if (frameBuffer != null)
|
||||
SystemImpl.gl.deleteFramebuffer(frameBuffer);
|
||||
if (renderBuffer != null)
|
||||
SystemImpl.gl.deleteRenderbuffer(renderBuffer);
|
||||
if (MSAAFrameBuffer != null)
|
||||
SystemImpl.gl.deleteFramebuffer(MSAAFrameBuffer);
|
||||
if (MSAAColorBuffer != null)
|
||||
SystemImpl.gl.deleteRenderbuffer(MSAAColorBuffer);
|
||||
if (MSAADepthBuffer != null)
|
||||
SystemImpl.gl.deleteRenderbuffer(MSAADepthBuffer);
|
||||
}
|
||||
|
||||
override public function generateMipmaps(levels: Int): Void {
|
||||
// WebGL requires to generate all mipmaps down to 1x1 size, ignoring levels for now
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, texture);
|
||||
SystemImpl.gl.generateMipmap(GL.TEXTURE_2D);
|
||||
}
|
||||
|
||||
override public function setMipmaps(mipmaps: Array<Image>): Void {
|
||||
// Similar to generateMipmaps, specify all the levels down to 1x1 size
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, texture);
|
||||
if (myFormat == TextureFormat.RGBA128) {
|
||||
for (i in 0...mipmaps.length) {
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, i + 1, SystemImpl.gl2 ? GL_RGBA32F : GL.RGBA, mipmaps[i].width, mipmaps[i].height, 0, GL.RGBA,
|
||||
GL.FLOAT, cast(mipmaps[i], WebGLImage).image);
|
||||
}
|
||||
}
|
||||
else if (myFormat == TextureFormat.RGBA64) {
|
||||
for (i in 0...mipmaps.length) {
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, i + 1, SystemImpl.gl2 ? GL_RGBA16F : GL.RGBA, mipmaps[i].width, mipmaps[i].height, 0, GL.RGBA,
|
||||
SystemImpl.halfFloat.HALF_FLOAT_OES, cast(mipmaps[i], WebGLImage).image);
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (i in 0...mipmaps.length) {
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_2D, i + 1, GL.RGBA, GL.RGBA, GL.UNSIGNED_BYTE, cast(mipmaps[i], WebGLImage).image);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
195
Kha/Backends/HTML5/kha/Window.hx
Normal file
195
Kha/Backends/HTML5/kha/Window.hx
Normal file
@ -0,0 +1,195 @@
|
||||
package kha;
|
||||
|
||||
import js.Syntax;
|
||||
import js.html.MutationObserver;
|
||||
|
||||
class Window {
|
||||
static var windows: Array<Window> = [];
|
||||
static var resizeCallbacks: Array<Array<Int->Int->Void>> = [];
|
||||
|
||||
var num: Int;
|
||||
var canvas: js.html.CanvasElement;
|
||||
var defaultWidth: Int;
|
||||
var defaultHeight: Int;
|
||||
|
||||
@:noCompletion
|
||||
@:noDoc
|
||||
public function new(num: Int, defaultWidth: Int, defaultHeight: Int, canvas: js.html.CanvasElement) {
|
||||
this.num = num;
|
||||
this.canvas = canvas;
|
||||
this.defaultWidth = defaultWidth;
|
||||
this.defaultHeight = defaultHeight;
|
||||
windows.push(this);
|
||||
resizeCallbacks[num] = [];
|
||||
windows.push(this);
|
||||
final observer: MutationObserver = new MutationObserver(function(mutations: Array<js.html.MutationRecord>, observer: MutationObserver) {
|
||||
var isResize = false;
|
||||
for (mutation in mutations) {
|
||||
if (mutation.attributeName == "width" || mutation.attributeName == "height") {
|
||||
isResize = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (isResize) {
|
||||
this.resize(canvas.width, canvas.height);
|
||||
}
|
||||
});
|
||||
observer.observe(canvas, {attributes: true});
|
||||
}
|
||||
|
||||
public static function create(win: WindowOptions = null, frame: FramebufferOptions = null): Window {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static function destroy(window: Window): Void {}
|
||||
|
||||
public static function get(index: Int): Window {
|
||||
return windows[index];
|
||||
}
|
||||
|
||||
public static var all(get, never): Array<Window>;
|
||||
|
||||
static function get_all(): Array<Window> {
|
||||
return windows;
|
||||
}
|
||||
|
||||
public function resize(width: Int, height: Int): Void {
|
||||
for (callback in resizeCallbacks[num]) {
|
||||
callback(width, height);
|
||||
}
|
||||
}
|
||||
|
||||
public function move(x: Int, y: Int): Void {}
|
||||
|
||||
public function changeWindowFeatures(features: Int): Void {}
|
||||
|
||||
public function changeFramebuffer(frame: FramebufferOptions): Void {}
|
||||
|
||||
public var x(get, set): Int;
|
||||
|
||||
function get_x(): Int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
function set_x(value: Int): Int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
public var y(get, set): Int;
|
||||
|
||||
function get_y(): Int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
function set_y(value: Int): Int {
|
||||
return 0;
|
||||
}
|
||||
|
||||
public var width(get, set): Int;
|
||||
|
||||
function get_width(): Int {
|
||||
return canvas.width == 0 ? defaultWidth : canvas.width;
|
||||
}
|
||||
|
||||
function set_width(value: Int): Int {
|
||||
return 800;
|
||||
}
|
||||
|
||||
public var height(get, set): Int;
|
||||
|
||||
function get_height(): Int {
|
||||
return canvas.height == 0 ? defaultHeight : canvas.height;
|
||||
}
|
||||
|
||||
function set_height(value: Int): Int {
|
||||
return 600;
|
||||
}
|
||||
|
||||
public var mode(get, set): WindowMode;
|
||||
|
||||
function get_mode(): WindowMode {
|
||||
return isFullscreen() ? Fullscreen : Windowed;
|
||||
}
|
||||
|
||||
function set_mode(mode: WindowMode): WindowMode {
|
||||
if (mode == Fullscreen || mode == ExclusiveFullscreen) {
|
||||
if (!isFullscreen()) {
|
||||
requestFullscreen();
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (isFullscreen()) {
|
||||
exitFullscreen();
|
||||
}
|
||||
}
|
||||
return mode;
|
||||
}
|
||||
|
||||
function isFullscreen(): Bool {
|
||||
return Syntax.code("document.fullscreenElement === this.canvas ||
|
||||
document.mozFullScreenElement === this.canvas ||
|
||||
document.webkitFullscreenElement === this.canvas ||
|
||||
document.msFullscreenElement === this.canvas ");
|
||||
}
|
||||
|
||||
function requestFullscreen(): Void {
|
||||
untyped if (canvas.requestFullscreen) {
|
||||
var c: Dynamic = canvas;
|
||||
c.requestFullscreen({navigationUI: "hide"});
|
||||
}
|
||||
else if (canvas.msRequestFullscreen) {
|
||||
canvas.msRequestFullscreen();
|
||||
}
|
||||
else if (canvas.mozRequestFullScreen) {
|
||||
canvas.mozRequestFullScreen();
|
||||
}
|
||||
else if (canvas.webkitRequestFullscreen) {
|
||||
canvas.webkitRequestFullscreen();
|
||||
}
|
||||
}
|
||||
|
||||
function exitFullscreen(): Void {
|
||||
untyped if (document.exitFullscreen) {
|
||||
document.exitFullscreen();
|
||||
}
|
||||
else if (document.msExitFullscreen) {
|
||||
document.msExitFullscreen();
|
||||
}
|
||||
else if (document.mozCancelFullScreen) {
|
||||
document.mozCancelFullScreen();
|
||||
}
|
||||
else if (document.webkitExitFullscreen) {
|
||||
document.webkitExitFullscreen();
|
||||
}
|
||||
}
|
||||
|
||||
public var visible(get, set): Bool;
|
||||
|
||||
function get_visible(): Bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
function set_visible(value: Bool): Bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
public var title(get, set): String;
|
||||
|
||||
function get_title(): String {
|
||||
return "Kha";
|
||||
}
|
||||
|
||||
function set_title(value: String): String {
|
||||
return "Kha";
|
||||
}
|
||||
|
||||
public function notifyOnResize(callback: Int->Int->Void): Void {
|
||||
resizeCallbacks[num].push(callback);
|
||||
}
|
||||
|
||||
public var vSynced(get, never): Bool;
|
||||
|
||||
function get_vSynced(): Bool {
|
||||
return true;
|
||||
}
|
||||
}
|
187
Kha/Backends/HTML5/kha/arrays/ByteArray.hx
Normal file
187
Kha/Backends/HTML5/kha/arrays/ByteArray.hx
Normal file
@ -0,0 +1,187 @@
|
||||
package kha.arrays;
|
||||
|
||||
import js.lib.DataView;
|
||||
import kha.FastFloat;
|
||||
|
||||
@:forward
|
||||
abstract ByteArray(DataView) to DataView {
|
||||
static final LITTLE_ENDIAN: Bool = js.Syntax.code("new Uint8Array(new Uint32Array([0x12345678]).buffer)[0] === 0x78");
|
||||
|
||||
public var buffer(get, never): ByteBuffer;
|
||||
|
||||
inline function get_buffer(): ByteBuffer {
|
||||
return cast this.buffer;
|
||||
}
|
||||
|
||||
public function new(buffer: ByteBuffer, ?byteOffset: Int, ?byteLength: Int) {
|
||||
this = new DataView(buffer, byteOffset, byteLength);
|
||||
}
|
||||
|
||||
static public function make(byteLength: Int): ByteArray {
|
||||
return new ByteArray(ByteBuffer.create(byteLength));
|
||||
}
|
||||
|
||||
public inline function getInt8(byteOffset: Int): Int {
|
||||
return this.getInt8(byteOffset);
|
||||
}
|
||||
|
||||
public inline function getUint8(byteOffset: Int): Int {
|
||||
return this.getUint8(byteOffset);
|
||||
}
|
||||
|
||||
public inline function getInt16(byteOffset: Int): Int {
|
||||
return this.getInt16(byteOffset, LITTLE_ENDIAN);
|
||||
}
|
||||
|
||||
public inline function getUint16(byteOffset: Int): Int {
|
||||
return this.getUint16(byteOffset, LITTLE_ENDIAN);
|
||||
}
|
||||
|
||||
public inline function getInt32(byteOffset: Int): Int {
|
||||
return this.getInt32(byteOffset, LITTLE_ENDIAN);
|
||||
}
|
||||
|
||||
public inline function getUint32(byteOffset: Int): Int {
|
||||
return this.getUint32(byteOffset, LITTLE_ENDIAN);
|
||||
}
|
||||
|
||||
public inline function getFloat32(byteOffset: Int): FastFloat {
|
||||
return this.getFloat32(byteOffset, LITTLE_ENDIAN);
|
||||
}
|
||||
|
||||
public inline function getFloat64(byteOffset: Int): Float {
|
||||
return this.getFloat64(byteOffset, LITTLE_ENDIAN);
|
||||
}
|
||||
|
||||
public inline function setInt8(byteOffset: Int, value: Int): Void {
|
||||
this.setInt8(byteOffset, value);
|
||||
}
|
||||
|
||||
public inline function setUint8(byteOffset: Int, value: Int): Void {
|
||||
this.setUint8(byteOffset, value);
|
||||
}
|
||||
|
||||
public inline function setInt16(byteOffset: Int, value: Int): Void {
|
||||
this.setInt16(byteOffset, value, LITTLE_ENDIAN);
|
||||
}
|
||||
|
||||
public inline function setUint16(byteOffset: Int, value: Int): Void {
|
||||
this.setUint16(byteOffset, value, LITTLE_ENDIAN);
|
||||
}
|
||||
|
||||
public inline function setInt32(byteOffset: Int, value: Int): Void {
|
||||
this.setInt32(byteOffset, value, LITTLE_ENDIAN);
|
||||
}
|
||||
|
||||
public inline function setUint32(byteOffset: Int, value: Int): Void {
|
||||
this.setUint32(byteOffset, value, LITTLE_ENDIAN);
|
||||
}
|
||||
|
||||
public inline function setFloat32(byteOffset: Int, value: FastFloat): Void {
|
||||
this.setFloat32(byteOffset, value, true);
|
||||
}
|
||||
|
||||
public inline function setFloat64(byteOffset: Int, value: Float): Void {
|
||||
this.setFloat64(byteOffset, value, LITTLE_ENDIAN);
|
||||
}
|
||||
|
||||
public inline function getInt16LE(byteOffset: Int): Int {
|
||||
return this.getInt16(byteOffset, true);
|
||||
}
|
||||
|
||||
public inline function getUint16LE(byteOffset: Int): Int {
|
||||
return this.getUint16(byteOffset, true);
|
||||
}
|
||||
|
||||
public inline function getInt32LE(byteOffset: Int): Int {
|
||||
return this.getInt32(byteOffset, true);
|
||||
}
|
||||
|
||||
public inline function getUint32LE(byteOffset: Int): Int {
|
||||
return this.getUint32(byteOffset, true);
|
||||
}
|
||||
|
||||
public inline function getFloat32LE(byteOffset: Int): FastFloat {
|
||||
return this.getFloat32(byteOffset, true);
|
||||
}
|
||||
|
||||
public inline function getFloat64LE(byteOffset: Int): Float {
|
||||
return this.getFloat64(byteOffset, true);
|
||||
}
|
||||
|
||||
public inline function setInt16LE(byteOffset: Int, value: Int): Void {
|
||||
this.setInt16(byteOffset, value, true);
|
||||
}
|
||||
|
||||
public inline function setUint16LE(byteOffset: Int, value: Int): Void {
|
||||
this.setUint16(byteOffset, value, true);
|
||||
}
|
||||
|
||||
public inline function setInt32LE(byteOffset: Int, value: Int): Void {
|
||||
this.setInt32(byteOffset, value, true);
|
||||
}
|
||||
|
||||
public inline function setUint32LE(byteOffset: Int, value: Int): Void {
|
||||
this.setUint32(byteOffset, value, true);
|
||||
}
|
||||
|
||||
public inline function setFloat32LE(byteOffset: Int, value: FastFloat): Void {
|
||||
this.setFloat32(byteOffset, value, true);
|
||||
}
|
||||
|
||||
public inline function setFloat64LE(byteOffset: Int, value: Float): Void {
|
||||
this.setFloat64(byteOffset, value, true);
|
||||
}
|
||||
|
||||
public inline function getInt16BE(byteOffset: Int): Int {
|
||||
return this.getInt16(byteOffset);
|
||||
}
|
||||
|
||||
public inline function getUint16BE(byteOffset: Int): Int {
|
||||
return this.getUint16(byteOffset);
|
||||
}
|
||||
|
||||
public inline function getInt32BE(byteOffset: Int): Int {
|
||||
return this.getInt32(byteOffset);
|
||||
}
|
||||
|
||||
public inline function getUint32BE(byteOffset: Int): Int {
|
||||
return this.getUint32(byteOffset);
|
||||
}
|
||||
|
||||
public inline function getFloat32BE(byteOffset: Int): FastFloat {
|
||||
return this.getFloat32(byteOffset);
|
||||
}
|
||||
|
||||
public inline function getFloat64BE(byteOffset: Int): Float {
|
||||
return this.getFloat64(byteOffset);
|
||||
}
|
||||
|
||||
public inline function setInt16BE(byteOffset: Int, value: Int): Void {
|
||||
this.setInt16(byteOffset, value);
|
||||
}
|
||||
|
||||
public inline function setUint16BE(byteOffset: Int, value: Int): Void {
|
||||
this.setUint16(byteOffset, value);
|
||||
}
|
||||
|
||||
public inline function setInt32BE(byteOffset: Int, value: Int): Void {
|
||||
this.setInt32(byteOffset, value);
|
||||
}
|
||||
|
||||
public inline function setUint32BE(byteOffset: Int, value: Int): Void {
|
||||
this.setUint32(byteOffset, value);
|
||||
}
|
||||
|
||||
public inline function setFloat32BE(byteOffset: Int, value: FastFloat): Void {
|
||||
this.setFloat32(byteOffset, value);
|
||||
}
|
||||
|
||||
public inline function setFloat64BE(byteOffset: Int, value: Float): Void {
|
||||
this.setFloat64(byteOffset, value);
|
||||
}
|
||||
|
||||
public inline function subarray(start: Int, ?end: Int): ByteArray {
|
||||
return new ByteArray(buffer, start, end != null ? end - start : null);
|
||||
}
|
||||
}
|
14
Kha/Backends/HTML5/kha/arrays/ByteBuffer.hx
Normal file
14
Kha/Backends/HTML5/kha/arrays/ByteBuffer.hx
Normal file
@ -0,0 +1,14 @@
|
||||
package kha.arrays;
|
||||
|
||||
import js.lib.ArrayBuffer;
|
||||
|
||||
@:forward
|
||||
abstract ByteBuffer(ArrayBuffer) from ArrayBuffer to ArrayBuffer {
|
||||
public static function create(length: Int): ByteBuffer {
|
||||
return new ByteBuffer(length);
|
||||
}
|
||||
|
||||
function new(length: Int) {
|
||||
this = new ArrayBuffer(length);
|
||||
}
|
||||
}
|
3
Kha/Backends/HTML5/kha/audio1/Audio.hx
Normal file
3
Kha/Backends/HTML5/kha/audio1/Audio.hx
Normal file
@ -0,0 +1,3 @@
|
||||
package kha.audio1;
|
||||
|
||||
typedef Audio = kha.audio2.Audio1;
|
107
Kha/Backends/HTML5/kha/audio2/Audio.hx
Normal file
107
Kha/Backends/HTML5/kha/audio2/Audio.hx
Normal file
@ -0,0 +1,107 @@
|
||||
package kha.audio2;
|
||||
|
||||
import js.Syntax;
|
||||
import js.Browser;
|
||||
import js.html.URL;
|
||||
import js.html.audio.AudioContext;
|
||||
import js.html.audio.AudioProcessingEvent;
|
||||
import js.html.audio.ScriptProcessorNode;
|
||||
import kha.internal.IntBox;
|
||||
import kha.js.AEAudioChannel;
|
||||
import kha.Sound;
|
||||
|
||||
class Audio {
|
||||
public static var disableGcInteractions = false;
|
||||
static var intBox: IntBox = new IntBox(0);
|
||||
static var buffer: Buffer;
|
||||
@:noCompletion public static var _context: AudioContext;
|
||||
static var processingNode: ScriptProcessorNode;
|
||||
|
||||
static function initContext(): Void {
|
||||
try {
|
||||
_context = new AudioContext();
|
||||
return;
|
||||
}
|
||||
catch (e:Dynamic) {}
|
||||
try {
|
||||
Syntax.code("this._context = new webkitAudioContext();");
|
||||
return;
|
||||
}
|
||||
catch (e:Dynamic) {}
|
||||
}
|
||||
|
||||
@:noCompletion
|
||||
public static function _init(): Bool {
|
||||
initContext();
|
||||
if (_context == null)
|
||||
return false;
|
||||
|
||||
Audio.samplesPerSecond = Math.round(_context.sampleRate);
|
||||
var bufferSize = 1024 * 2;
|
||||
buffer = new Buffer(bufferSize * 4, 2, Std.int(_context.sampleRate));
|
||||
|
||||
processingNode = _context.createScriptProcessor(bufferSize, 0, 2);
|
||||
processingNode.onaudioprocess = function(e: AudioProcessingEvent) {
|
||||
var output1 = e.outputBuffer.getChannelData(0);
|
||||
var output2 = e.outputBuffer.getChannelData(1);
|
||||
if (audioCallback != null) {
|
||||
intBox.value = e.outputBuffer.length * 2;
|
||||
audioCallback(intBox, buffer);
|
||||
for (i in 0...e.outputBuffer.length) {
|
||||
output1[i] = buffer.data.get(buffer.readLocation);
|
||||
buffer.readLocation += 1;
|
||||
output2[i] = buffer.data.get(buffer.readLocation);
|
||||
buffer.readLocation += 1;
|
||||
if (buffer.readLocation >= buffer.size) {
|
||||
buffer.readLocation = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (i in 0...e.outputBuffer.length) {
|
||||
output1[i] = 0;
|
||||
output2[i] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
processingNode.connect(_context.destination);
|
||||
return true;
|
||||
}
|
||||
|
||||
public static var samplesPerSecond: Int;
|
||||
|
||||
public static var audioCallback: kha.internal.IntBox->Buffer->Void;
|
||||
|
||||
static var virtualChannels: Array<VirtualStreamChannel> = [];
|
||||
|
||||
public static function wakeChannels() {
|
||||
SystemImpl.mobileAudioPlaying = true;
|
||||
for (channel in virtualChannels) {
|
||||
channel.wake();
|
||||
}
|
||||
}
|
||||
|
||||
public static function stream(sound: Sound, loop: Bool = false): kha.audio1.AudioChannel {
|
||||
// var source = _context.createMediaStreamSource(cast sound.compressedData.getData());
|
||||
// source.connect(_context.destination);
|
||||
var element = Browser.document.createAudioElement();
|
||||
#if kha_debug_html5
|
||||
var blob = new js.html.Blob([sound.compressedData.getData()], {type: "audio/ogg"});
|
||||
#else
|
||||
var blob = new js.html.Blob([sound.compressedData.getData()], {type: "audio/mp4"});
|
||||
#end
|
||||
element.src = URL.createObjectURL(blob);
|
||||
element.loop = loop;
|
||||
var channel = new AEAudioChannel(element, loop);
|
||||
|
||||
if (SystemImpl.mobileAudioPlaying) {
|
||||
channel.play();
|
||||
return channel;
|
||||
}
|
||||
else {
|
||||
var virtualChannel = new VirtualStreamChannel(channel, loop);
|
||||
virtualChannels.push(virtualChannel);
|
||||
return virtualChannel;
|
||||
}
|
||||
}
|
||||
}
|
126
Kha/Backends/HTML5/kha/audio2/VirtualStreamChannel.hx
Normal file
126
Kha/Backends/HTML5/kha/audio2/VirtualStreamChannel.hx
Normal file
@ -0,0 +1,126 @@
|
||||
package kha.audio2;
|
||||
|
||||
import kha.js.AEAudioChannel;
|
||||
import kha.audio1.AudioChannel;
|
||||
|
||||
enum abstract PlayMode(Int) {
|
||||
var Stopped;
|
||||
var Paused;
|
||||
var Playing;
|
||||
}
|
||||
|
||||
class VirtualStreamChannel implements kha.audio1.AudioChannel {
|
||||
var aeChannel: AEAudioChannel;
|
||||
var mode = PlayMode.Playing;
|
||||
var lastTickTime: Float;
|
||||
var lastPosition: Float;
|
||||
var looping: Bool;
|
||||
|
||||
public function new(aeChannel: AEAudioChannel, looping: Bool) {
|
||||
this.aeChannel = aeChannel;
|
||||
this.looping = looping;
|
||||
lastTickTime = Scheduler.realTime();
|
||||
lastPosition = 0;
|
||||
}
|
||||
|
||||
public function wake(): Void {
|
||||
updatePosition();
|
||||
aeChannel.position = lastPosition;
|
||||
aeChannel.play();
|
||||
}
|
||||
|
||||
function updatePosition(): Void {
|
||||
var now = Scheduler.realTime();
|
||||
switch (mode) {
|
||||
case Stopped:
|
||||
lastPosition = 0;
|
||||
case Paused:
|
||||
// nothing
|
||||
case Playing:
|
||||
lastPosition += now - lastTickTime;
|
||||
while (lastPosition > length) {
|
||||
lastPosition -= length;
|
||||
}
|
||||
}
|
||||
lastTickTime = now;
|
||||
}
|
||||
|
||||
public function play(): Void {
|
||||
if (SystemImpl.mobileAudioPlaying) {
|
||||
aeChannel.play();
|
||||
}
|
||||
else {
|
||||
updatePosition();
|
||||
mode = Playing;
|
||||
}
|
||||
}
|
||||
|
||||
public function pause(): Void {
|
||||
if (SystemImpl.mobileAudioPlaying) {
|
||||
aeChannel.pause();
|
||||
}
|
||||
else {
|
||||
updatePosition();
|
||||
mode = Paused;
|
||||
}
|
||||
}
|
||||
|
||||
public function stop(): Void {
|
||||
if (SystemImpl.mobileAudioPlaying) {
|
||||
aeChannel.stop();
|
||||
}
|
||||
else {
|
||||
updatePosition();
|
||||
mode = Stopped;
|
||||
}
|
||||
}
|
||||
|
||||
public var length(get, never): Float; // Seconds
|
||||
|
||||
function get_length(): Float {
|
||||
return aeChannel.length;
|
||||
}
|
||||
|
||||
public var position(get, set): Float; // Seconds
|
||||
|
||||
function get_position(): Float {
|
||||
if (SystemImpl.mobileAudioPlaying) {
|
||||
return aeChannel.position;
|
||||
}
|
||||
else {
|
||||
updatePosition();
|
||||
return lastPosition;
|
||||
}
|
||||
}
|
||||
|
||||
function set_position(value: Float): Float {
|
||||
if (SystemImpl.mobileAudioPlaying) {
|
||||
return aeChannel.position = value;
|
||||
}
|
||||
else {
|
||||
updatePosition();
|
||||
return lastPosition = value;
|
||||
}
|
||||
}
|
||||
|
||||
public var volume(get, set): Float;
|
||||
|
||||
function get_volume(): Float {
|
||||
return aeChannel.volume;
|
||||
}
|
||||
|
||||
function set_volume(value: Float): Float {
|
||||
return aeChannel.volume = value;
|
||||
}
|
||||
|
||||
public var finished(get, never): Bool;
|
||||
|
||||
function get_finished(): Bool {
|
||||
if (SystemImpl.mobileAudioPlaying) {
|
||||
return aeChannel.finished;
|
||||
}
|
||||
else {
|
||||
return mode == Stopped || (!looping && position >= length);
|
||||
}
|
||||
}
|
||||
}
|
51
Kha/Backends/HTML5/kha/capture/AudioCapture.hx
Normal file
51
Kha/Backends/HTML5/kha/capture/AudioCapture.hx
Normal file
@ -0,0 +1,51 @@
|
||||
package kha.capture;
|
||||
|
||||
import js.html.audio.AudioProcessingEvent;
|
||||
import kha.audio2.Buffer;
|
||||
|
||||
class AudioCapture {
|
||||
static var input: js.html.audio.MediaStreamAudioSourceNode;
|
||||
static var processingNode: js.html.audio.ScriptProcessorNode;
|
||||
static var buffer: Buffer;
|
||||
|
||||
public static var audioCallback: Int->Buffer->Void;
|
||||
|
||||
public static function init(initialized: Void->Void, error: Void->Void): Void {
|
||||
if (kha.audio2.Audio._context == null) {
|
||||
error();
|
||||
return;
|
||||
}
|
||||
|
||||
var getUserMedia = untyped __js__("navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia");
|
||||
getUserMedia.call(js.Browser.navigator, {audio: true}, function(stream: Dynamic) {
|
||||
input = kha.audio2.Audio._context.createMediaStreamSource(stream);
|
||||
|
||||
var bufferSize = 1024 * 2;
|
||||
buffer = new Buffer(bufferSize * 4, 2, Std.int(kha.audio2.Audio._context.sampleRate));
|
||||
|
||||
processingNode = kha.audio2.Audio._context.createScriptProcessor(bufferSize, 1, 0);
|
||||
processingNode.onaudioprocess = function(e: AudioProcessingEvent) {
|
||||
if (audioCallback != null) {
|
||||
var input1 = e.inputBuffer.getChannelData(0);
|
||||
var input2 = e.inputBuffer.getChannelData(0);
|
||||
for (i in 0...e.inputBuffer.length) {
|
||||
buffer.data.set(buffer.writeLocation, input1[i]);
|
||||
buffer.writeLocation += 1;
|
||||
buffer.data.set(buffer.writeLocation, input2[i]);
|
||||
buffer.writeLocation += 1;
|
||||
if (buffer.writeLocation >= buffer.size) {
|
||||
buffer.writeLocation = 0;
|
||||
}
|
||||
}
|
||||
audioCallback(e.inputBuffer.length * 2, buffer);
|
||||
}
|
||||
}
|
||||
|
||||
input.connect(processingNode);
|
||||
// input.connect(kha.audio2.Audio._context.destination);
|
||||
initialized();
|
||||
}, function() {
|
||||
error();
|
||||
});
|
||||
}
|
||||
}
|
18
Kha/Backends/HTML5/kha/capture/VideoCapture.hx
Normal file
18
Kha/Backends/HTML5/kha/capture/VideoCapture.hx
Normal file
@ -0,0 +1,18 @@
|
||||
package kha.capture;
|
||||
|
||||
import js.Browser;
|
||||
|
||||
class VideoCapture {
|
||||
public static function init(initialized: kha.Video->Void, error: Void->Void): Void {
|
||||
var getUserMedia = untyped __js__("navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia");
|
||||
getUserMedia.call(js.Browser.navigator, {audio: true, video: true}, function(stream: Dynamic) {
|
||||
var element: js.html.VideoElement = cast Browser.document.createElement("video");
|
||||
element.srcObject = stream;
|
||||
element.onloadedmetadata = function(e) {
|
||||
initialized(kha.js.Video.fromElement(element));
|
||||
}
|
||||
}, function() {
|
||||
error();
|
||||
});
|
||||
}
|
||||
}
|
199
Kha/Backends/HTML5/kha/graphics4/CubeMap.hx
Normal file
199
Kha/Backends/HTML5/kha/graphics4/CubeMap.hx
Normal file
@ -0,0 +1,199 @@
|
||||
package kha.graphics4;
|
||||
|
||||
import js.html.webgl.GL;
|
||||
import haxe.io.Bytes;
|
||||
import kha.js.graphics4.Graphics;
|
||||
|
||||
class CubeMap implements Canvas implements Resource {
|
||||
var myWidth: Int;
|
||||
var myHeight: Int;
|
||||
var format: TextureFormat;
|
||||
var renderTarget: Bool;
|
||||
var depthStencilFormat: DepthStencilFormat;
|
||||
var graphics4: kha.graphics4.Graphics;
|
||||
|
||||
public var frameBuffer: Dynamic = null;
|
||||
public var texture: Dynamic = null;
|
||||
public var depthTexture: Dynamic = null;
|
||||
public var isDepthAttachment: Bool = false;
|
||||
|
||||
// WebGL2 constants
|
||||
static inline var GL_RGBA16F = 0x881A;
|
||||
static inline var GL_RGBA32F = 0x8814;
|
||||
static inline var GL_R16F = 0x822D;
|
||||
static inline var GL_R32F = 0x822E;
|
||||
static inline var GL_DEPTH_COMPONENT24 = 0x81A6;
|
||||
static inline var GL_DEPTH24_STENCIL8 = 0x88F0;
|
||||
static inline var GL_DEPTH32F_STENCIL8 = 0x8CAD;
|
||||
|
||||
function new(size: Int, format: TextureFormat, renderTarget: Bool, depthStencilFormat: DepthStencilFormat) {
|
||||
myWidth = size;
|
||||
myHeight = size;
|
||||
this.format = format;
|
||||
this.renderTarget = renderTarget;
|
||||
this.depthStencilFormat = depthStencilFormat;
|
||||
if (renderTarget)
|
||||
createTexture();
|
||||
}
|
||||
|
||||
public static function createRenderTarget(size: Int, format: TextureFormat = null, depthStencil: DepthStencilFormat = null): CubeMap {
|
||||
if (format == null)
|
||||
format = TextureFormat.RGBA32;
|
||||
if (depthStencil == null)
|
||||
depthStencil = NoDepthAndStencil;
|
||||
return new CubeMap(size, format, true, depthStencil);
|
||||
}
|
||||
|
||||
function createTexture() {
|
||||
if (SystemImpl.gl == null)
|
||||
return;
|
||||
|
||||
texture = SystemImpl.gl.createTexture();
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_CUBE_MAP, texture);
|
||||
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_MAG_FILTER, GL.LINEAR);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_MIN_FILTER, GL.LINEAR);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_WRAP_S, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_WRAP_T, GL.CLAMP_TO_EDGE);
|
||||
|
||||
if (renderTarget) {
|
||||
frameBuffer = SystemImpl.gl.createFramebuffer();
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
|
||||
switch (format) {
|
||||
case DEPTH16:
|
||||
for (i in 0...6)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, SystemImpl.gl2 ? GL.DEPTH_COMPONENT16 : GL.DEPTH_COMPONENT, myWidth,
|
||||
myHeight, 0, GL.DEPTH_COMPONENT, GL.UNSIGNED_SHORT, null);
|
||||
case RGBA128:
|
||||
for (i in 0...6)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, SystemImpl.gl2 ? GL_RGBA32F : GL.RGBA, myWidth, myHeight, 0, GL.RGBA,
|
||||
GL.FLOAT, null);
|
||||
case RGBA64:
|
||||
for (i in 0...6)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, SystemImpl.gl2 ? GL_RGBA16F : GL.RGBA, myWidth, myHeight, 0, GL.RGBA,
|
||||
SystemImpl.halfFloat.HALF_FLOAT_OES, null);
|
||||
case RGBA32:
|
||||
for (i in 0...6)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, GL.RGBA, myWidth, myHeight, 0, GL.RGBA, GL.UNSIGNED_BYTE, null);
|
||||
case A32:
|
||||
for (i in 0...6)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, SystemImpl.gl2 ? GL_R32F : GL.ALPHA, myWidth, myHeight, 0, GL.ALPHA,
|
||||
GL.FLOAT, null);
|
||||
case A16:
|
||||
for (i in 0...6)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, SystemImpl.gl2 ? GL_R16F : GL.ALPHA, myWidth, myHeight, 0, GL.ALPHA,
|
||||
SystemImpl.halfFloat.HALF_FLOAT_OES, null);
|
||||
default:
|
||||
for (i in 0...6)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, GL.RGBA, myWidth, myHeight, 0, GL.RGBA, GL.UNSIGNED_BYTE, null);
|
||||
}
|
||||
|
||||
if (format == DEPTH16) {
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_MAG_FILTER, GL.NEAREST);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_MIN_FILTER, GL.NEAREST);
|
||||
isDepthAttachment = true;
|
||||
// Some WebGL implementations throw incomplete framebuffer error, create color attachment
|
||||
if (!SystemImpl.gl2) {
|
||||
var colortex = SystemImpl.gl.createTexture();
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_CUBE_MAP, colortex);
|
||||
for (i in 0...6) {
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, GL.RGBA, myWidth, myHeight, 0, GL.RGBA, GL.UNSIGNED_BYTE, null);
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, GL.COLOR_ATTACHMENT0, GL.TEXTURE_CUBE_MAP_POSITIVE_X + i, colortex, 0);
|
||||
}
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_CUBE_MAP, texture);
|
||||
}
|
||||
}
|
||||
|
||||
initDepthStencilBuffer(depthStencilFormat);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, null);
|
||||
}
|
||||
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_CUBE_MAP, null);
|
||||
}
|
||||
|
||||
function initDepthStencilBuffer(depthStencilFormat: DepthStencilFormat) {
|
||||
switch (depthStencilFormat) {
|
||||
case NoDepthAndStencil:
|
||||
case DepthOnly, Depth16:
|
||||
{
|
||||
depthTexture = SystemImpl.gl.createTexture();
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_CUBE_MAP, depthTexture);
|
||||
if (depthStencilFormat == DepthOnly)
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_CUBE_MAP, 0, SystemImpl.gl2 ? GL_DEPTH_COMPONENT24 : GL.DEPTH_COMPONENT, myWidth, myHeight, 0,
|
||||
GL.DEPTH_COMPONENT, GL.UNSIGNED_INT, null);
|
||||
else
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_CUBE_MAP, 0, SystemImpl.gl2 ? GL.DEPTH_COMPONENT16 : GL.DEPTH_COMPONENT, myWidth, myHeight, 0,
|
||||
GL.DEPTH_COMPONENT, GL.UNSIGNED_SHORT, null);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_MAG_FILTER, GL.NEAREST);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_MIN_FILTER, GL.NEAREST);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_WRAP_S, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_WRAP_T, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, GL.DEPTH_ATTACHMENT, GL.TEXTURE_CUBE_MAP, depthTexture, 0);
|
||||
}
|
||||
case DepthAutoStencilAuto, Depth24Stencil8, Depth32Stencil8:
|
||||
depthTexture = SystemImpl.gl.createTexture();
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_CUBE_MAP, depthTexture);
|
||||
SystemImpl.gl.texImage2D(GL.TEXTURE_CUBE_MAP, 0, SystemImpl.gl2 ? GL_DEPTH24_STENCIL8 : GL.DEPTH_STENCIL, myWidth, myHeight, 0,
|
||||
GL.DEPTH_STENCIL, SystemImpl.depthTexture.UNSIGNED_INT_24_8_WEBGL, null);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_MAG_FILTER, GL.NEAREST);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_MIN_FILTER, GL.NEAREST);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_WRAP_S, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL.TEXTURE_WRAP_T, GL.CLAMP_TO_EDGE);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, frameBuffer);
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, GL.DEPTH_STENCIL_ATTACHMENT, GL.TEXTURE_CUBE_MAP, depthTexture, 0);
|
||||
}
|
||||
}
|
||||
|
||||
public function set(stage: Int): Void {
|
||||
SystemImpl.gl.activeTexture(GL.TEXTURE0 + stage);
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_CUBE_MAP, texture);
|
||||
}
|
||||
|
||||
public function setDepth(stage: Int): Void {
|
||||
SystemImpl.gl.activeTexture(GL.TEXTURE0 + stage);
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_CUBE_MAP, depthTexture);
|
||||
}
|
||||
|
||||
public function unload(): Void {}
|
||||
|
||||
public function lock(level: Int = 0): Bytes {
|
||||
return null;
|
||||
}
|
||||
|
||||
public function unlock(): Void {}
|
||||
|
||||
public var width(get, never): Int;
|
||||
|
||||
function get_width(): Int {
|
||||
return myWidth;
|
||||
}
|
||||
|
||||
public var height(get, never): Int;
|
||||
|
||||
function get_height(): Int {
|
||||
return myHeight;
|
||||
}
|
||||
|
||||
public var g1(get, never): kha.graphics1.Graphics;
|
||||
|
||||
function get_g1(): kha.graphics1.Graphics {
|
||||
return null;
|
||||
}
|
||||
|
||||
public var g2(get, never): kha.graphics2.Graphics;
|
||||
|
||||
function get_g2(): kha.graphics2.Graphics {
|
||||
return null;
|
||||
}
|
||||
|
||||
public var g4(get, never): kha.graphics4.Graphics;
|
||||
|
||||
function get_g4(): kha.graphics4.Graphics {
|
||||
if (graphics4 == null) {
|
||||
graphics4 = new Graphics(this);
|
||||
}
|
||||
return graphics4;
|
||||
}
|
||||
}
|
32
Kha/Backends/HTML5/kha/graphics4/FragmentShader.hx
Normal file
32
Kha/Backends/HTML5/kha/graphics4/FragmentShader.hx
Normal file
@ -0,0 +1,32 @@
|
||||
package kha.graphics4;
|
||||
|
||||
import js.html.webgl.GL;
|
||||
|
||||
class FragmentShader {
|
||||
public var sources: Array<String>;
|
||||
public var type: Dynamic;
|
||||
public var shader: Dynamic;
|
||||
public var files: Array<String>;
|
||||
|
||||
public function new(sources: Array<Blob>, files: Array<String>) {
|
||||
this.sources = [];
|
||||
for (source in sources) {
|
||||
this.sources.push(source.toString());
|
||||
}
|
||||
this.type = GL.FRAGMENT_SHADER;
|
||||
this.shader = null;
|
||||
this.files = files;
|
||||
}
|
||||
|
||||
public static function fromSource(source: String): FragmentShader {
|
||||
var shader = new FragmentShader([], ["runtime-string"]);
|
||||
shader.sources.push(source);
|
||||
return shader;
|
||||
}
|
||||
|
||||
public function delete(): Void {
|
||||
SystemImpl.gl.deleteShader(shader);
|
||||
shader = null;
|
||||
sources = null;
|
||||
}
|
||||
}
|
50
Kha/Backends/HTML5/kha/graphics4/IndexBuffer.hx
Normal file
50
Kha/Backends/HTML5/kha/graphics4/IndexBuffer.hx
Normal file
@ -0,0 +1,50 @@
|
||||
package kha.graphics4;
|
||||
|
||||
import js.html.webgl.GL;
|
||||
import kha.arrays.Uint32Array;
|
||||
import kha.graphics4.Usage;
|
||||
|
||||
class IndexBuffer {
|
||||
public var _data: Uint32Array;
|
||||
|
||||
var buffer: Dynamic;
|
||||
var mySize: Int;
|
||||
var usage: Usage;
|
||||
var lockStart: Int = 0;
|
||||
var lockEnd: Int = 0;
|
||||
|
||||
public function new(indexCount: Int, usage: Usage, canRead: Bool = false) {
|
||||
this.usage = usage;
|
||||
mySize = indexCount;
|
||||
buffer = SystemImpl.gl.createBuffer();
|
||||
_data = new Uint32Array(indexCount);
|
||||
}
|
||||
|
||||
public function delete(): Void {
|
||||
_data = null;
|
||||
SystemImpl.gl.deleteBuffer(buffer);
|
||||
}
|
||||
|
||||
public function lock(?start: Int, ?count: Int): Uint32Array {
|
||||
lockStart = start != null ? start : 0;
|
||||
lockEnd = count != null ? start + count : mySize;
|
||||
return _data.subarray(lockStart, lockEnd);
|
||||
}
|
||||
|
||||
public function unlock(?count: Int): Void {
|
||||
if (count != null)
|
||||
lockEnd = lockStart + count;
|
||||
SystemImpl.gl.bindBuffer(GL.ELEMENT_ARRAY_BUFFER, buffer);
|
||||
var data = _data.subarray(lockStart, lockEnd);
|
||||
var glData: Dynamic = SystemImpl.elementIndexUint == null ? new js.lib.Uint16Array(data.buffer) : data;
|
||||
SystemImpl.gl.bufferData(GL.ELEMENT_ARRAY_BUFFER, glData, usage == Usage.DynamicUsage ? GL.DYNAMIC_DRAW : GL.STATIC_DRAW);
|
||||
}
|
||||
|
||||
public function set(): Void {
|
||||
SystemImpl.gl.bindBuffer(GL.ELEMENT_ARRAY_BUFFER, buffer);
|
||||
}
|
||||
|
||||
public function count(): Int {
|
||||
return mySize;
|
||||
}
|
||||
}
|
133
Kha/Backends/HTML5/kha/graphics4/PipelineState.hx
Normal file
133
Kha/Backends/HTML5/kha/graphics4/PipelineState.hx
Normal file
@ -0,0 +1,133 @@
|
||||
package kha.graphics4;
|
||||
|
||||
import js.html.webgl.GL;
|
||||
import kha.graphics4.VertexData;
|
||||
|
||||
class PipelineState extends PipelineStateBase {
|
||||
var program: Dynamic = null;
|
||||
var textures: Array<String>;
|
||||
var textureValues: Array<Dynamic>;
|
||||
|
||||
public function new() {
|
||||
super();
|
||||
textures = new Array<String>();
|
||||
textureValues = new Array<Dynamic>();
|
||||
}
|
||||
|
||||
public function delete(): Void {
|
||||
if (program != null) {
|
||||
SystemImpl.gl.deleteProgram(program);
|
||||
}
|
||||
}
|
||||
|
||||
public function compile(): Void {
|
||||
if (program != null) {
|
||||
SystemImpl.gl.deleteProgram(program);
|
||||
}
|
||||
program = SystemImpl.gl.createProgram();
|
||||
compileShader(vertexShader);
|
||||
compileShader(fragmentShader);
|
||||
SystemImpl.gl.attachShader(program, vertexShader.shader);
|
||||
SystemImpl.gl.attachShader(program, fragmentShader.shader);
|
||||
|
||||
var index = 0;
|
||||
for (structure in inputLayout) {
|
||||
for (element in structure.elements) {
|
||||
SystemImpl.gl.bindAttribLocation(program, index, element.name);
|
||||
if (element.data == VertexData.Float32_4X4) {
|
||||
index += 4;
|
||||
}
|
||||
else {
|
||||
++index;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SystemImpl.gl.linkProgram(program);
|
||||
if (!SystemImpl.gl.getProgramParameter(program, GL.LINK_STATUS)) {
|
||||
var message = "Could not link the shader program:\n" + SystemImpl.gl.getProgramInfoLog(program);
|
||||
trace("Error: " + message);
|
||||
throw message;
|
||||
}
|
||||
}
|
||||
|
||||
public function set(): Void {
|
||||
SystemImpl.gl.useProgram(program);
|
||||
for (index in 0...textureValues.length)
|
||||
SystemImpl.gl.uniform1i(textureValues[index], index);
|
||||
SystemImpl.gl.colorMask(colorWriteMaskRed, colorWriteMaskGreen, colorWriteMaskBlue, colorWriteMaskAlpha);
|
||||
}
|
||||
|
||||
function compileShader(shader: Dynamic): Void {
|
||||
if (shader.shader != null)
|
||||
return;
|
||||
var s = SystemImpl.gl.createShader(shader.type);
|
||||
var highp = SystemImpl.gl.getShaderPrecisionFormat(GL.FRAGMENT_SHADER, GL.HIGH_FLOAT);
|
||||
var highpSupported = highp.precision != 0;
|
||||
var files: Array<String> = shader.files;
|
||||
for (i in 0...files.length) {
|
||||
if (SystemImpl.gl2) {
|
||||
if (files[i].indexOf("-webgl2") >= 0 || files[i].indexOf("runtime-string") >= 0) {
|
||||
SystemImpl.gl.shaderSource(s, shader.sources[i]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (!highpSupported && (files[i].indexOf("-relaxed") >= 0 || files[i].indexOf("runtime-string") >= 0)) {
|
||||
SystemImpl.gl.shaderSource(s, shader.sources[i]);
|
||||
break;
|
||||
}
|
||||
if (highpSupported && (files[i].indexOf("-relaxed") < 0 || files[i].indexOf("runtime-string") >= 0)) {
|
||||
SystemImpl.gl.shaderSource(s, shader.sources[i]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
SystemImpl.gl.compileShader(s);
|
||||
if (!SystemImpl.gl.getShaderParameter(s, GL.COMPILE_STATUS)) {
|
||||
var message = "Could not compile shader:\n" + SystemImpl.gl.getShaderInfoLog(s);
|
||||
trace("Error: " + message);
|
||||
throw message;
|
||||
}
|
||||
shader.shader = s;
|
||||
}
|
||||
|
||||
public function getConstantLocation(name: String): kha.graphics4.ConstantLocation {
|
||||
var location = SystemImpl.gl.getUniformLocation(program, name);
|
||||
if (location == null) {
|
||||
trace("Warning: Uniform " + name + " not found.");
|
||||
}
|
||||
var type = GL.FLOAT;
|
||||
var count: Int = SystemImpl.gl.getProgramParameter(program, GL.ACTIVE_UNIFORMS);
|
||||
for (i in 0...count) {
|
||||
var info = SystemImpl.gl.getActiveUniform(program, i);
|
||||
if (info.name == name || info.name == name + "[0]") {
|
||||
type = info.type;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return new kha.js.graphics4.ConstantLocation(location, type);
|
||||
}
|
||||
|
||||
public function getTextureUnit(name: String): kha.graphics4.TextureUnit {
|
||||
var index = findTexture(name);
|
||||
if (index < 0) {
|
||||
var location = SystemImpl.gl.getUniformLocation(program, name);
|
||||
if (location == null) {
|
||||
trace("Warning: Sampler " + name + " not found.");
|
||||
}
|
||||
index = textures.length;
|
||||
textureValues.push(location);
|
||||
textures.push(name);
|
||||
}
|
||||
return new kha.js.graphics4.TextureUnit(index);
|
||||
}
|
||||
|
||||
function findTexture(name: String): Int {
|
||||
for (index in 0...textures.length) {
|
||||
if (textures[index] == name)
|
||||
return index;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
}
|
206
Kha/Backends/HTML5/kha/graphics4/VertexBuffer.hx
Normal file
206
Kha/Backends/HTML5/kha/graphics4/VertexBuffer.hx
Normal file
@ -0,0 +1,206 @@
|
||||
package kha.graphics4;
|
||||
|
||||
import kha.arrays.Float32Array;
|
||||
import js.html.webgl.GL;
|
||||
import kha.arrays.ByteArray;
|
||||
import kha.graphics4.Usage;
|
||||
import kha.graphics4.VertexStructure;
|
||||
|
||||
class VertexBuffer {
|
||||
public var _data: ByteArray;
|
||||
|
||||
var buffer: Dynamic;
|
||||
var mySize: Int;
|
||||
var myStride: Int;
|
||||
var sizes: Array<Int>;
|
||||
var offsets: Array<Int>;
|
||||
var types: Array<Int>;
|
||||
var instanceDataStepRate: Int;
|
||||
var lockStart: Int = 0;
|
||||
var lockEnd: Int = 0;
|
||||
|
||||
public function new(vertexCount: Int, structure: VertexStructure, usage: Usage, instanceDataStepRate: Int = 0, canRead: Bool = false) {
|
||||
this.instanceDataStepRate = instanceDataStepRate;
|
||||
mySize = vertexCount;
|
||||
myStride = 0;
|
||||
for (element in structure.elements) {
|
||||
myStride += VertexStructure.dataByteSize(element.data);
|
||||
}
|
||||
|
||||
buffer = SystemImpl.gl.createBuffer();
|
||||
_data = ByteArray.make(vertexCount * myStride);
|
||||
|
||||
sizes = new Array<Int>();
|
||||
offsets = new Array<Int>();
|
||||
types = new Array<Int>();
|
||||
sizes[structure.elements.length - 1] = 0;
|
||||
offsets[structure.elements.length - 1] = 0;
|
||||
types[structure.elements.length - 1] = 0;
|
||||
|
||||
var offset = 0;
|
||||
var index = 0;
|
||||
for (element in structure.elements) {
|
||||
var size;
|
||||
var type;
|
||||
switch (element.data) {
|
||||
case Float32_1X:
|
||||
size = 1;
|
||||
type = GL.FLOAT;
|
||||
case Float32_2X:
|
||||
size = 2;
|
||||
type = GL.FLOAT;
|
||||
case Float32_3X:
|
||||
size = 3;
|
||||
type = GL.FLOAT;
|
||||
case Float32_4X:
|
||||
size = 4;
|
||||
type = GL.FLOAT;
|
||||
case Float32_4X4:
|
||||
size = 4 * 4;
|
||||
type = GL.FLOAT;
|
||||
case Int8_1X, Int8_1X_Normalized:
|
||||
size = 1;
|
||||
type = GL.BYTE;
|
||||
case Int8_2X, Int8_2X_Normalized:
|
||||
size = 2;
|
||||
type = GL.BYTE;
|
||||
case Int8_4X, Int8_4X_Normalized:
|
||||
size = 4;
|
||||
type = GL.BYTE;
|
||||
case UInt8_1X, UInt8_1X_Normalized:
|
||||
size = 1;
|
||||
type = GL.UNSIGNED_BYTE;
|
||||
case UInt8_2X, UInt8_2X_Normalized:
|
||||
size = 2;
|
||||
type = GL.UNSIGNED_BYTE;
|
||||
case UInt8_4X, UInt8_4X_Normalized:
|
||||
size = 4;
|
||||
type = GL.UNSIGNED_BYTE;
|
||||
case Int16_1X, Int16_1X_Normalized:
|
||||
size = 1;
|
||||
type = GL.SHORT;
|
||||
case Int16_2X, Int16_2X_Normalized:
|
||||
size = 2;
|
||||
type = GL.SHORT;
|
||||
case Int16_4X, Int16_4X_Normalized:
|
||||
size = 4;
|
||||
type = GL.SHORT;
|
||||
case UInt16_1X, UInt16_1X_Normalized:
|
||||
size = 1;
|
||||
type = GL.UNSIGNED_SHORT;
|
||||
case UInt16_2X, UInt16_2X_Normalized:
|
||||
size = 2;
|
||||
type = GL.UNSIGNED_SHORT;
|
||||
case UInt16_4X, UInt16_4X_Normalized:
|
||||
size = 4;
|
||||
type = GL.UNSIGNED_SHORT;
|
||||
case Int32_1X:
|
||||
size = 1;
|
||||
type = GL.INT;
|
||||
case Int32_2X:
|
||||
size = 2;
|
||||
type = GL.INT;
|
||||
case Int32_3X:
|
||||
size = 3;
|
||||
type = GL.INT;
|
||||
case Int32_4X:
|
||||
size = 4;
|
||||
type = GL.INT;
|
||||
case UInt32_1X:
|
||||
size = 1;
|
||||
type = GL.UNSIGNED_INT;
|
||||
case UInt32_2X:
|
||||
size = 2;
|
||||
type = GL.UNSIGNED_INT;
|
||||
case UInt32_3X:
|
||||
size = 3;
|
||||
type = GL.UNSIGNED_INT;
|
||||
case UInt32_4X:
|
||||
size = 4;
|
||||
type = GL.UNSIGNED_INT;
|
||||
}
|
||||
sizes[index] = size;
|
||||
offsets[index] = offset;
|
||||
types[index] = type;
|
||||
offset += VertexStructure.dataByteSize(element.data);
|
||||
++index;
|
||||
}
|
||||
|
||||
SystemImpl.gl.bindBuffer(GL.ARRAY_BUFFER, buffer);
|
||||
SystemImpl.gl.bufferData(GL.ARRAY_BUFFER, _data.subarray(0 * stride(), mySize * stride()),
|
||||
usage == Usage.DynamicUsage ? GL.DYNAMIC_DRAW : GL.STATIC_DRAW);
|
||||
}
|
||||
|
||||
public function delete(): Void {
|
||||
_data = null;
|
||||
SystemImpl.gl.deleteBuffer(buffer);
|
||||
}
|
||||
|
||||
public function lock(?start: Int, ?count: Int): Float32Array {
|
||||
lockStart = start != null ? start : 0;
|
||||
lockEnd = count != null ? start + count : mySize;
|
||||
return _data.subarray(lockStart * stride(), lockEnd * stride());
|
||||
}
|
||||
|
||||
public function unlock(?count: Int): Void {
|
||||
if (count != null)
|
||||
lockEnd = lockStart + count;
|
||||
SystemImpl.gl.bindBuffer(GL.ARRAY_BUFFER, buffer);
|
||||
if (SystemImpl.safari) {
|
||||
SystemImpl.gl.bufferData(GL.ARRAY_BUFFER, _data.subarray(0 * stride(), lockEnd * stride()), GL.DYNAMIC_DRAW);
|
||||
}
|
||||
else {
|
||||
SystemImpl.gl.bufferSubData(GL.ARRAY_BUFFER, lockStart * stride(), _data.subarray(lockStart * stride(), lockEnd * stride()));
|
||||
}
|
||||
}
|
||||
|
||||
public function stride(): Int {
|
||||
return myStride;
|
||||
}
|
||||
|
||||
public function count(): Int {
|
||||
return mySize;
|
||||
}
|
||||
|
||||
public function set(offset: Int): Int {
|
||||
var ext: Dynamic = SystemImpl.gl2 ? true : SystemImpl.gl.getExtension("ANGLE_instanced_arrays");
|
||||
SystemImpl.gl.bindBuffer(GL.ARRAY_BUFFER, buffer);
|
||||
var attributesOffset = 0;
|
||||
for (i in 0...sizes.length) {
|
||||
if (sizes[i] > 4) {
|
||||
var size = sizes[i];
|
||||
var addonOffset = 0;
|
||||
while (size > 0) {
|
||||
SystemImpl.gl.enableVertexAttribArray(offset + attributesOffset);
|
||||
SystemImpl.gl.vertexAttribPointer(offset + attributesOffset, 4, GL.FLOAT, false, myStride, offsets[i] + addonOffset);
|
||||
if (ext) {
|
||||
if (SystemImpl.gl2) {
|
||||
untyped SystemImpl.gl.vertexAttribDivisor(offset + attributesOffset, instanceDataStepRate);
|
||||
}
|
||||
else {
|
||||
ext.vertexAttribDivisorANGLE(offset + attributesOffset, instanceDataStepRate);
|
||||
}
|
||||
}
|
||||
size -= 4;
|
||||
addonOffset += 4 * 4;
|
||||
++attributesOffset;
|
||||
}
|
||||
}
|
||||
else {
|
||||
var normalized = types[i] == GL.FLOAT ? false : true;
|
||||
SystemImpl.gl.enableVertexAttribArray(offset + attributesOffset);
|
||||
SystemImpl.gl.vertexAttribPointer(offset + attributesOffset, sizes[i], types[i], normalized, myStride, offsets[i]);
|
||||
if (ext) {
|
||||
if (SystemImpl.gl2) {
|
||||
untyped SystemImpl.gl.vertexAttribDivisor(offset + attributesOffset, instanceDataStepRate);
|
||||
}
|
||||
else {
|
||||
ext.vertexAttribDivisorANGLE(offset + attributesOffset, instanceDataStepRate);
|
||||
}
|
||||
}
|
||||
++attributesOffset;
|
||||
}
|
||||
}
|
||||
return attributesOffset;
|
||||
}
|
||||
}
|
32
Kha/Backends/HTML5/kha/graphics4/VertexShader.hx
Normal file
32
Kha/Backends/HTML5/kha/graphics4/VertexShader.hx
Normal file
@ -0,0 +1,32 @@
|
||||
package kha.graphics4;
|
||||
|
||||
import js.html.webgl.GL;
|
||||
|
||||
class VertexShader {
|
||||
public var sources: Array<String>;
|
||||
public var type: Dynamic;
|
||||
public var shader: Dynamic;
|
||||
public var files: Array<String>;
|
||||
|
||||
public function new(sources: Array<Blob>, files: Array<String>) {
|
||||
this.sources = [];
|
||||
for (source in sources) {
|
||||
this.sources.push(source.toString());
|
||||
}
|
||||
this.type = GL.VERTEX_SHADER;
|
||||
this.shader = null;
|
||||
this.files = files;
|
||||
}
|
||||
|
||||
public static function fromSource(source: String): VertexShader {
|
||||
var shader = new VertexShader([], ["runtime-string"]);
|
||||
shader.sources.push(source);
|
||||
return shader;
|
||||
}
|
||||
|
||||
public function delete(): Void {
|
||||
SystemImpl.gl.deleteShader(shader);
|
||||
shader = null;
|
||||
sources = null;
|
||||
}
|
||||
}
|
76
Kha/Backends/HTML5/kha/input/MouseImpl.hx
Normal file
76
Kha/Backends/HTML5/kha/input/MouseImpl.hx
Normal file
@ -0,0 +1,76 @@
|
||||
package kha.input;
|
||||
|
||||
import kha.SystemImpl;
|
||||
import kha.input.Mouse;
|
||||
|
||||
class MouseImpl extends kha.input.Mouse {
|
||||
public function new() {
|
||||
super();
|
||||
}
|
||||
|
||||
override public function lock(): Void {
|
||||
SystemImpl.lockMouse();
|
||||
}
|
||||
|
||||
override public function unlock(): Void {
|
||||
SystemImpl.unlockMouse();
|
||||
}
|
||||
|
||||
override public function canLock(): Bool {
|
||||
return SystemImpl.canLockMouse();
|
||||
}
|
||||
|
||||
override public function isLocked(): Bool {
|
||||
return SystemImpl.isMouseLocked();
|
||||
}
|
||||
|
||||
override public function notifyOnLockChange(func: Void->Void, error: Void->Void): Void {
|
||||
SystemImpl.notifyOfMouseLockChange(func, error);
|
||||
}
|
||||
|
||||
override public function removeFromLockChange(func: Void->Void, error: Void->Void): Void {
|
||||
SystemImpl.removeFromMouseLockChange(func, error);
|
||||
}
|
||||
|
||||
override public function hideSystemCursor(): Void {
|
||||
SystemImpl.khanvas.style.cursor = "none";
|
||||
}
|
||||
|
||||
override public function showSystemCursor(): Void {
|
||||
SystemImpl.khanvas.style.cursor = "default";
|
||||
}
|
||||
|
||||
override public function setSystemCursor(cursor: MouseCursor): Void {
|
||||
SystemImpl.khanvas.style.cursor = switch (cursor) {
|
||||
case Default: "default";
|
||||
case Pointer: "pointer";
|
||||
case Text: "text";
|
||||
case EastWestResize: "ew-resize";
|
||||
case NorthSouthResize: "ns-resize";
|
||||
case NorthEastResize: "ne-resize";
|
||||
case SouthEastResize: "se-resize";
|
||||
case NorthWestResize: "nw-resize";
|
||||
case SouthWestResize: "sw-resize";
|
||||
case Grab: "grab";
|
||||
case Grabbing: "grabbing";
|
||||
case NotAllowed: "not-allowed";
|
||||
case Wait: "wait";
|
||||
case Crosshair: "crosshair";
|
||||
case Custom(image):
|
||||
var canvas = js.Browser.document.createCanvasElement();
|
||||
canvas.width = image.width;
|
||||
canvas.height = image.height;
|
||||
if (Std.isOfType(image, WebGLImage)) {
|
||||
canvas.getContext2d().drawImage(cast(image, WebGLImage).image, 0, 0);
|
||||
}
|
||||
else {
|
||||
canvas.getContext2d().drawImage(cast(image, CanvasImage).image, 0, 0);
|
||||
}
|
||||
var dataURL = canvas.toDataURL("image/png");
|
||||
dataURL = StringTools.replace(dataURL, "/^data:image\\/(png|jpg);base64,/", "");
|
||||
|
||||
'url(\'$dataURL\'),auto';
|
||||
default: "default";
|
||||
}
|
||||
}
|
||||
}
|
37
Kha/Backends/HTML5/kha/input/Sensor.hx
Normal file
37
Kha/Backends/HTML5/kha/input/Sensor.hx
Normal file
@ -0,0 +1,37 @@
|
||||
package kha.input;
|
||||
|
||||
import kha.SystemImpl;
|
||||
|
||||
class Sensor {
|
||||
static var isInited: Bool = false;
|
||||
static var accelerometer: Sensor = new Sensor();
|
||||
static var gyroscope: Sensor = new Sensor();
|
||||
|
||||
var listeners: Array<Float->Float->Float->Void> = new Array();
|
||||
|
||||
public static function get(type: SensorType): Sensor {
|
||||
switch (type) {
|
||||
case Accelerometer:
|
||||
return accelerometer;
|
||||
case Gyroscope:
|
||||
return gyroscope;
|
||||
}
|
||||
}
|
||||
|
||||
public function notify(listener: Float->Float->Float->Void): Void {
|
||||
if (!isInited) {
|
||||
SystemImpl.initSensor();
|
||||
isInited = true;
|
||||
}
|
||||
listeners.push(listener);
|
||||
}
|
||||
|
||||
function new() {}
|
||||
|
||||
public static function _changed(type: Int, x: Float, y: Float, z: Float): Void {
|
||||
var sensor = get(type == 0 ? SensorType.Accelerometer : SensorType.Gyroscope);
|
||||
for (listener in sensor.listeners) {
|
||||
listener(x, y, z);
|
||||
}
|
||||
}
|
||||
}
|
77
Kha/Backends/HTML5/kha/js/AEAudioChannel.hx
Normal file
77
Kha/Backends/HTML5/kha/js/AEAudioChannel.hx
Normal file
@ -0,0 +1,77 @@
|
||||
package kha.js;
|
||||
|
||||
import js.html.AudioElement;
|
||||
import kha.audio1.AudioChannel;
|
||||
|
||||
class AEAudioChannel implements kha.audio1.AudioChannel {
|
||||
var element: AudioElement;
|
||||
var stopped = false;
|
||||
var looping: Bool;
|
||||
|
||||
public function new(element: AudioElement, looping: Bool) {
|
||||
this.element = element;
|
||||
this.looping = looping;
|
||||
}
|
||||
|
||||
public function play(): Void {
|
||||
stopped = false;
|
||||
element.play();
|
||||
}
|
||||
|
||||
public function pause(): Void {
|
||||
try {
|
||||
element.pause();
|
||||
}
|
||||
catch (e:Dynamic) {
|
||||
trace(e);
|
||||
}
|
||||
}
|
||||
|
||||
public function stop(): Void {
|
||||
try {
|
||||
element.pause();
|
||||
element.currentTime = 0;
|
||||
stopped = true;
|
||||
}
|
||||
catch (e:Dynamic) {
|
||||
trace(e);
|
||||
}
|
||||
}
|
||||
|
||||
public var length(get, never): Float; // Seconds
|
||||
|
||||
function get_length(): Float {
|
||||
if (Math.isFinite(element.duration)) {
|
||||
return element.duration;
|
||||
}
|
||||
else {
|
||||
return Math.POSITIVE_INFINITY;
|
||||
}
|
||||
}
|
||||
|
||||
public var position(get, set): Float; // Seconds
|
||||
|
||||
function get_position(): Float {
|
||||
return element.currentTime;
|
||||
}
|
||||
|
||||
function set_position(value: Float): Float {
|
||||
return element.currentTime = value;
|
||||
}
|
||||
|
||||
public var volume(get, set): Float;
|
||||
|
||||
function get_volume(): Float {
|
||||
return element.volume;
|
||||
}
|
||||
|
||||
function set_volume(value: Float): Float {
|
||||
return element.volume = value;
|
||||
}
|
||||
|
||||
public var finished(get, never): Bool;
|
||||
|
||||
function get_finished(): Bool {
|
||||
return stopped || (!looping && position >= length);
|
||||
}
|
||||
}
|
15
Kha/Backends/HTML5/kha/js/AudioElementAudio.hx
Normal file
15
Kha/Backends/HTML5/kha/js/AudioElementAudio.hx
Normal file
@ -0,0 +1,15 @@
|
||||
package kha.js;
|
||||
|
||||
@:keep
|
||||
class AudioElementAudio {
|
||||
public static function play(sound: Sound, loop: Bool = false): kha.audio1.AudioChannel {
|
||||
return stream(sound, loop);
|
||||
}
|
||||
|
||||
public static function stream(sound: Sound, loop: Bool = false): kha.audio1.AudioChannel {
|
||||
sound.element.loop = loop;
|
||||
var channel = new AEAudioChannel(sound.element, loop);
|
||||
channel.play();
|
||||
return cast channel;
|
||||
}
|
||||
}
|
264
Kha/Backends/HTML5/kha/js/CanvasGraphics.hx
Normal file
264
Kha/Backends/HTML5/kha/js/CanvasGraphics.hx
Normal file
@ -0,0 +1,264 @@
|
||||
package kha.js;
|
||||
|
||||
import kha.Color;
|
||||
import kha.graphics2.Graphics;
|
||||
import kha.graphics2.ImageScaleQuality;
|
||||
import kha.math.FastMatrix3;
|
||||
import js.html.CanvasRenderingContext2D;
|
||||
|
||||
class CanvasGraphics extends Graphics {
|
||||
var canvas: CanvasRenderingContext2D;
|
||||
var webfont: kha.js.Font;
|
||||
var myColor: Color;
|
||||
var scaleQuality: ImageScaleQuality;
|
||||
var clipping: Bool = false;
|
||||
|
||||
static var instance: CanvasGraphics;
|
||||
|
||||
public function new(canvas: CanvasRenderingContext2D) {
|
||||
super();
|
||||
this.canvas = canvas;
|
||||
instance = this;
|
||||
myColor = Color.fromBytes(0, 0, 0);
|
||||
// webfont = new Font("Arial", new FontStyle(false, false, false), 12);
|
||||
// canvas.globalCompositeOperation = "normal";
|
||||
}
|
||||
|
||||
public static function stringWidth(font: kha.Font, text: String): Float {
|
||||
if (instance == null)
|
||||
return 5 * text.length;
|
||||
else {
|
||||
instance.font = font;
|
||||
return instance.canvas.measureText(text).width;
|
||||
}
|
||||
}
|
||||
|
||||
override public function begin(clear: Bool = true, clearColor: Color = null): Void {
|
||||
if (clear)
|
||||
this.clear(clearColor);
|
||||
}
|
||||
|
||||
override public function clear(color: Color = null): Void {
|
||||
if (color == null)
|
||||
color = 0x00000000;
|
||||
canvas.strokeStyle = "rgba(" + color.Rb + "," + color.Gb + "," + color.Bb + "," + color.A + ")";
|
||||
canvas.fillStyle = "rgba(" + color.Rb + "," + color.Gb + "," + color.Bb + "," + color.A + ")";
|
||||
if (color.A == 0) // if color is transparent, clear the screen. Note: in Canvas, transparent colors will overlay, not overwrite.
|
||||
canvas.clearRect(0, 0, canvas.canvas.width, canvas.canvas.height);
|
||||
else
|
||||
canvas.fillRect(0, 0, canvas.canvas.width, canvas.canvas.height);
|
||||
this.color = myColor;
|
||||
}
|
||||
|
||||
override public function end(): Void {}
|
||||
|
||||
/*override public function translate(x: Float, y: Float) {
|
||||
tx = x;
|
||||
ty = y;
|
||||
}*/
|
||||
override public function drawImage(img: kha.Image, x: Float, y: Float) {
|
||||
canvas.globalAlpha = opacity;
|
||||
canvas.drawImage(cast(img, CanvasImage).image, x, y);
|
||||
canvas.globalAlpha = 1;
|
||||
}
|
||||
|
||||
override public function drawScaledSubImage(image: kha.Image, sx: Float, sy: Float, sw: Float, sh: Float, dx: Float, dy: Float, dw: Float, dh: Float) {
|
||||
canvas.globalAlpha = opacity;
|
||||
try {
|
||||
if (dw < 0 || dh < 0) {
|
||||
canvas.save();
|
||||
canvas.translate(dx, dy);
|
||||
var x = 0.0;
|
||||
var y = 0.0;
|
||||
if (dw < 0) {
|
||||
canvas.scale(-1, 1);
|
||||
x = -dw;
|
||||
}
|
||||
if (dh < 0) {
|
||||
canvas.scale(1, -1);
|
||||
y = -dh;
|
||||
}
|
||||
canvas.drawImage(cast(image, CanvasImage).image, sx, sy, sw, sh, x, y, dw, dh);
|
||||
canvas.restore();
|
||||
}
|
||||
else {
|
||||
canvas.drawImage(cast(image, CanvasImage).image, sx, sy, sw, sh, dx, dy, dw, dh);
|
||||
}
|
||||
}
|
||||
catch (ex:Dynamic) {}
|
||||
canvas.globalAlpha = 1;
|
||||
}
|
||||
|
||||
override function set_color(color: Color): Color {
|
||||
myColor = color;
|
||||
canvas.strokeStyle = "rgba(" + color.Rb + "," + color.Gb + "," + color.Bb + "," + color.A + ")";
|
||||
canvas.fillStyle = "rgba(" + color.Rb + "," + color.Gb + "," + color.Bb + "," + color.A + ")";
|
||||
return color;
|
||||
}
|
||||
|
||||
override function get_color(): Color {
|
||||
return myColor;
|
||||
}
|
||||
|
||||
override function get_imageScaleQuality(): ImageScaleQuality {
|
||||
return scaleQuality;
|
||||
}
|
||||
|
||||
override function set_imageScaleQuality(value: ImageScaleQuality): ImageScaleQuality {
|
||||
if (value == ImageScaleQuality.Low) {
|
||||
untyped canvas.mozImageSmoothingEnabled = false;
|
||||
untyped canvas.webkitImageSmoothingEnabled = false;
|
||||
untyped canvas.msImageSmoothingEnabled = false;
|
||||
canvas.imageSmoothingEnabled = false;
|
||||
}
|
||||
else {
|
||||
untyped canvas.mozImageSmoothingEnabled = true;
|
||||
untyped canvas.webkitImageSmoothingEnabled = true;
|
||||
untyped canvas.msImageSmoothingEnabled = true;
|
||||
canvas.imageSmoothingEnabled = true;
|
||||
}
|
||||
return scaleQuality = value;
|
||||
}
|
||||
|
||||
override public function drawRect(x: Float, y: Float, width: Float, height: Float, strength: Float = 1.0) {
|
||||
canvas.beginPath();
|
||||
var oldStrength = canvas.lineWidth;
|
||||
canvas.lineWidth = Math.round(strength);
|
||||
canvas.rect(x, y, width, height);
|
||||
canvas.stroke();
|
||||
canvas.lineWidth = oldStrength;
|
||||
}
|
||||
|
||||
override public function fillRect(x: Float, y: Float, width: Float, height: Float) {
|
||||
canvas.globalAlpha = opacity * myColor.A;
|
||||
canvas.fillRect(x, y, width, height);
|
||||
canvas.globalAlpha = opacity;
|
||||
}
|
||||
|
||||
public function drawArc(cx: Float, cy: Float, radius: Float, sAngle: Float, eAngle: Float, strength: Float = 1.0, ccw: Bool = false) {
|
||||
_drawArc(cx, cy, radius, sAngle, eAngle, strength, ccw);
|
||||
}
|
||||
|
||||
public function drawCircle(cx: Float, cy: Float, radius: Float, strength: Float = 1.0) {
|
||||
_drawArc(cx, cy, radius, 0, 2 * Math.PI, strength, false);
|
||||
}
|
||||
|
||||
inline function _drawArc(cx: Float, cy: Float, radius: Float, sAngle: Float, eAngle: Float, strength: Float, ccw: Bool) {
|
||||
canvas.beginPath();
|
||||
var oldStrength = canvas.lineWidth;
|
||||
canvas.lineWidth = Math.round(strength);
|
||||
canvas.arc(cx, cy, radius, sAngle, eAngle, ccw);
|
||||
canvas.stroke();
|
||||
canvas.lineWidth = oldStrength;
|
||||
}
|
||||
|
||||
public function fillArc(cx: Float, cy: Float, radius: Float, sAngle: Float, eAngle: Float, ccw: Bool = false) {
|
||||
canvas.beginPath();
|
||||
canvas.arc(cx, cy, radius, sAngle, eAngle, ccw);
|
||||
canvas.fill();
|
||||
}
|
||||
|
||||
public function fillCircle(cx: Float, cy: Float, radius: Float) {
|
||||
canvas.beginPath();
|
||||
canvas.arc(cx, cy, radius, 0, 2 * Math.PI, false);
|
||||
canvas.fill();
|
||||
}
|
||||
|
||||
var bakedQuadCache = new kha.Kravur.AlignedQuad();
|
||||
|
||||
override public function drawString(text: String, x: Float, y: Float) {
|
||||
// canvas.fillText(text, tx + x, ty + y + webfont.getHeight());
|
||||
// canvas.drawImage(cast(webfont.getTexture(), Image).image, 0, 0, 50, 50, tx + x, ty + y, 50, 50);
|
||||
|
||||
var image = webfont.getImage(fontSize, myColor);
|
||||
if (image.width > 0) {
|
||||
// the image created in getImage() is not imediately useable
|
||||
var xpos = x;
|
||||
var ypos = y;
|
||||
for (i in 0...text.length) {
|
||||
var q = webfont.kravur._get(fontSize).getBakedQuad(bakedQuadCache, kha.graphics2.Graphics.fontGlyphs.indexOf(text.charCodeAt(i)), xpos, ypos);
|
||||
|
||||
if (q != null) {
|
||||
if (q.s1 - q.s0 > 0 && q.t1 - q.t0 > 0 && q.x1 - q.x0 > 0 && q.y1 - q.y0 > 0)
|
||||
canvas.drawImage(image, q.s0 * image.width, q.t0 * image.height, (q.s1 - q.s0) * image.width, (q.t1 - q.t0) * image.height, q.x0,
|
||||
q.y0, q.x1 - q.x0, q.y1 - q.y0);
|
||||
xpos += q.xadvance;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override public function drawCharacters(text: Array<Int>, start: Int, length: Int, x: Float, y: Float): Void {
|
||||
var image = webfont.getImage(fontSize, myColor);
|
||||
if (image.width > 0) {
|
||||
// the image created in getImage() is not imediately useable
|
||||
var xpos = x;
|
||||
var ypos = y;
|
||||
for (i in start...start + length) {
|
||||
var q = webfont.kravur._get(fontSize).getBakedQuad(bakedQuadCache, kha.graphics2.Graphics.fontGlyphs.indexOf(text[i]), xpos, ypos);
|
||||
|
||||
if (q != null) {
|
||||
if (q.s1 - q.s0 > 0 && q.t1 - q.t0 > 0 && q.x1 - q.x0 > 0 && q.y1 - q.y0 > 0)
|
||||
canvas.drawImage(image, q.s0 * image.width, q.t0 * image.height, (q.s1 - q.s0) * image.width, (q.t1 - q.t0) * image.height, q.x0,
|
||||
q.y0, q.x1 - q.x0, q.y1 - q.y0);
|
||||
xpos += q.xadvance;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override function set_font(font: kha.Font): kha.Font {
|
||||
webfont = cast(font, kha.js.Font);
|
||||
// canvas.font = webfont.size + "px " + webfont.name;
|
||||
return cast webfont;
|
||||
}
|
||||
|
||||
override function get_font(): kha.Font {
|
||||
return cast webfont;
|
||||
}
|
||||
|
||||
override public function drawLine(x1: Float, y1: Float, x2: Float, y2: Float, strength: Float = 1.0) {
|
||||
canvas.beginPath();
|
||||
var oldWith = canvas.lineWidth;
|
||||
canvas.lineWidth = Math.round(strength);
|
||||
canvas.moveTo(x1, y1);
|
||||
canvas.lineTo(x2, y2);
|
||||
canvas.moveTo(0, 0);
|
||||
canvas.stroke();
|
||||
canvas.lineWidth = oldWith;
|
||||
}
|
||||
|
||||
override public function fillTriangle(x1: Float, y1: Float, x2: Float, y2: Float, x3: Float, y3: Float) {
|
||||
canvas.beginPath();
|
||||
canvas.moveTo(x1, y1);
|
||||
canvas.lineTo(x2, y2);
|
||||
canvas.lineTo(x3, y3);
|
||||
canvas.closePath();
|
||||
canvas.fill();
|
||||
}
|
||||
|
||||
override public function scissor(x: Int, y: Int, width: Int, height: Int): Void {
|
||||
if (!clipping) {
|
||||
canvas.save();
|
||||
clipping = true;
|
||||
}
|
||||
canvas.beginPath();
|
||||
canvas.rect(x, y, width, height);
|
||||
canvas.clip();
|
||||
}
|
||||
|
||||
override public function disableScissor(): Void {
|
||||
if (clipping) {
|
||||
canvas.restore();
|
||||
clipping = false;
|
||||
}
|
||||
}
|
||||
|
||||
override public function drawVideo(video: kha.Video, x: Float, y: Float, width: Float, height: Float): Void {
|
||||
canvas.drawImage(cast(video, Video).element, x, y, width, height);
|
||||
}
|
||||
|
||||
override public function setTransformation(transformation: FastMatrix3): Void {
|
||||
canvas.setTransform(transformation._00, transformation._01, transformation._10, transformation._11, transformation._20, transformation._21);
|
||||
}
|
||||
}
|
77
Kha/Backends/HTML5/kha/js/Font.hx
Normal file
77
Kha/Backends/HTML5/kha/js/Font.hx
Normal file
@ -0,0 +1,77 @@
|
||||
package kha.js;
|
||||
|
||||
import js.Syntax;
|
||||
import haxe.io.Bytes;
|
||||
import js.Browser;
|
||||
import js.html.ImageElement;
|
||||
import kha.Color;
|
||||
import kha.Kravur;
|
||||
|
||||
@:keepInit
|
||||
class Font implements Resource {
|
||||
public var kravur: Kravur;
|
||||
|
||||
var images: Map<Int, Map<Int, ImageElement>> = new Map();
|
||||
|
||||
public function new(blob: Blob) {
|
||||
this.kravur = Syntax.code("new kha_js_Font.Kravur(blob);");
|
||||
}
|
||||
|
||||
public static function fromBytes(bytes: Bytes): Font {
|
||||
return new Font(Blob.fromBytes(bytes));
|
||||
}
|
||||
|
||||
public function height(fontSize: Int): Float {
|
||||
return kravur._get(fontSize).getHeight();
|
||||
}
|
||||
|
||||
public function width(fontSize: Int, str: String): Float {
|
||||
return kravur._get(fontSize).stringWidth(str);
|
||||
}
|
||||
|
||||
public function widthOfCharacters(fontSize: Int, characters: Array<Int>, start: Int, length: Int): Float {
|
||||
return kravur._get(fontSize).charactersWidth(characters, start, length);
|
||||
}
|
||||
|
||||
public function baseline(fontSize: Int): Float {
|
||||
return kravur._get(fontSize).getBaselinePosition();
|
||||
}
|
||||
|
||||
public function getImage(fontSize: Int, color: Color): ImageElement {
|
||||
var glyphs = kha.graphics2.Graphics.fontGlyphs;
|
||||
var imageIndex = fontSize * 10000 + glyphs.length;
|
||||
if (!images.exists(imageIndex)) {
|
||||
images[imageIndex] = new Map();
|
||||
}
|
||||
if (!images[imageIndex].exists(color.value)) {
|
||||
var kravur = this.kravur._get(fontSize);
|
||||
var canvas = Browser.document.createCanvasElement();
|
||||
canvas.width = kravur.width;
|
||||
canvas.height = kravur.height;
|
||||
var ctx = canvas.getContext("2d");
|
||||
ctx.fillStyle = "black";
|
||||
ctx.fillRect(0, 0, kravur.width, kravur.height);
|
||||
|
||||
var imageData = ctx.getImageData(0, 0, kravur.width, kravur.height);
|
||||
var bytes = cast(kravur.getTexture(), CanvasImage).bytes;
|
||||
for (i in 0...bytes.length) {
|
||||
imageData.data[i * 4 + 0] = color.Rb;
|
||||
imageData.data[i * 4 + 1] = color.Gb;
|
||||
imageData.data[i * 4 + 2] = color.Bb;
|
||||
imageData.data[i * 4 + 3] = bytes.get(i);
|
||||
}
|
||||
ctx.putImageData(imageData, 0, 0);
|
||||
|
||||
var img = Browser.document.createImageElement();
|
||||
img.src = canvas.toDataURL("image/png");
|
||||
images[imageIndex][color.value] = img;
|
||||
return img;
|
||||
}
|
||||
return images[imageIndex][color.value];
|
||||
}
|
||||
|
||||
public function unload(): Void {
|
||||
kravur = null;
|
||||
images = null;
|
||||
}
|
||||
}
|
32
Kha/Backends/HTML5/kha/js/MobileWebAudio.hx
Normal file
32
Kha/Backends/HTML5/kha/js/MobileWebAudio.hx
Normal file
@ -0,0 +1,32 @@
|
||||
package kha.js;
|
||||
|
||||
import js.Syntax;
|
||||
import js.html.audio.AudioContext;
|
||||
|
||||
@:keep
|
||||
class MobileWebAudio {
|
||||
@:noCompletion public static var _context: AudioContext;
|
||||
|
||||
@:noCompletion public static function _init(): Void {
|
||||
try {
|
||||
_context = new AudioContext();
|
||||
return;
|
||||
}
|
||||
catch (e:Dynamic) {}
|
||||
try {
|
||||
Syntax.code("this._context = new webkitAudioContext();");
|
||||
return;
|
||||
}
|
||||
catch (e:Dynamic) {}
|
||||
}
|
||||
|
||||
public static function play(sound: Sound, loop: Bool = false): kha.audio1.AudioChannel {
|
||||
var channel = new MobileWebAudioChannel(cast sound, loop);
|
||||
channel.play();
|
||||
return channel;
|
||||
}
|
||||
|
||||
public static function stream(sound: Sound, loop: Bool = false): kha.audio1.AudioChannel {
|
||||
return play(sound, loop);
|
||||
}
|
||||
}
|
105
Kha/Backends/HTML5/kha/js/MobileWebAudioChannel.hx
Normal file
105
Kha/Backends/HTML5/kha/js/MobileWebAudioChannel.hx
Normal file
@ -0,0 +1,105 @@
|
||||
package kha.js;
|
||||
|
||||
import js.html.audio.AudioBuffer;
|
||||
import js.html.audio.AudioBufferSourceNode;
|
||||
import js.html.audio.GainNode;
|
||||
|
||||
class MobileWebAudioChannel implements kha.audio1.AudioChannel {
|
||||
var buffer: AudioBuffer;
|
||||
var loop: Bool;
|
||||
var source: AudioBufferSourceNode;
|
||||
var gain: GainNode;
|
||||
var startTime: Float;
|
||||
var pauseTime: Float;
|
||||
var paused: Bool = false;
|
||||
var stopped: Bool = false;
|
||||
|
||||
public function new(sound: MobileWebAudioSound, loop: Bool) {
|
||||
this.buffer = sound._buffer;
|
||||
this.loop = loop;
|
||||
createSource();
|
||||
}
|
||||
|
||||
function createSource(): Void {
|
||||
source = MobileWebAudio._context.createBufferSource();
|
||||
source.loop = loop;
|
||||
source.buffer = buffer;
|
||||
source.onended = function() {
|
||||
stopped = true;
|
||||
}
|
||||
gain = MobileWebAudio._context.createGain();
|
||||
source.connect(gain);
|
||||
gain.connect(MobileWebAudio._context.destination);
|
||||
}
|
||||
|
||||
public function play(): Void {
|
||||
if (paused || stopped) {
|
||||
createSource();
|
||||
}
|
||||
stopped = false;
|
||||
if (paused) {
|
||||
paused = false;
|
||||
startTime = MobileWebAudio._context.currentTime - pauseTime;
|
||||
source.start(0, pauseTime);
|
||||
}
|
||||
else {
|
||||
startTime = MobileWebAudio._context.currentTime;
|
||||
source.start();
|
||||
}
|
||||
}
|
||||
|
||||
public function pause(): Void {
|
||||
final wasStopped = paused || stopped;
|
||||
pauseTime = MobileWebAudio._context.currentTime - startTime;
|
||||
paused = true;
|
||||
if (wasStopped)
|
||||
return;
|
||||
source.stop();
|
||||
}
|
||||
|
||||
public function stop(): Void {
|
||||
final wasStopped = paused || stopped;
|
||||
paused = false;
|
||||
stopped = true;
|
||||
if (wasStopped)
|
||||
return;
|
||||
source.stop();
|
||||
}
|
||||
|
||||
public var length(get, never): Float; // Seconds
|
||||
|
||||
function get_length(): Float {
|
||||
return source.buffer.duration;
|
||||
}
|
||||
|
||||
public var position(get, set): Float; // Seconds
|
||||
|
||||
function get_position(): Float {
|
||||
if (stopped)
|
||||
return length;
|
||||
if (paused)
|
||||
return pauseTime;
|
||||
else
|
||||
return MobileWebAudio._context.currentTime - startTime;
|
||||
}
|
||||
|
||||
function set_position(value: Float): Float {
|
||||
return value;
|
||||
}
|
||||
|
||||
public var volume(get, set): Float;
|
||||
|
||||
function get_volume(): Float {
|
||||
return gain.gain.value;
|
||||
}
|
||||
|
||||
function set_volume(value: Float): Float {
|
||||
return gain.gain.value = value;
|
||||
}
|
||||
|
||||
public var finished(get, never): Bool;
|
||||
|
||||
function get_finished(): Bool {
|
||||
return stopped;
|
||||
}
|
||||
}
|
37
Kha/Backends/HTML5/kha/js/MobileWebAudioSound.hx
Normal file
37
Kha/Backends/HTML5/kha/js/MobileWebAudioSound.hx
Normal file
@ -0,0 +1,37 @@
|
||||
package kha.js;
|
||||
|
||||
import haxe.io.Bytes;
|
||||
import js.html.XMLHttpRequest;
|
||||
|
||||
class MobileWebAudioSound extends kha.Sound {
|
||||
public var _buffer: Dynamic;
|
||||
|
||||
public function new(filename: String, done: kha.Sound->Void, failed: AssetError->Void) {
|
||||
super();
|
||||
var request = untyped new XMLHttpRequest();
|
||||
request.open("GET", filename, true);
|
||||
request.responseType = "arraybuffer";
|
||||
|
||||
request.onerror = function() {
|
||||
failed({url: filename});
|
||||
};
|
||||
|
||||
request.onload = function() {
|
||||
compressedData = Bytes.ofData(request.response);
|
||||
uncompressedData = null;
|
||||
MobileWebAudio._context.decodeAudioData(compressedData.getData(), function(buffer) {
|
||||
length = buffer.duration;
|
||||
channels = buffer.numberOfChannels;
|
||||
_buffer = buffer;
|
||||
done(this);
|
||||
}, function() {
|
||||
failed({url: filename, error: "Audio format not supported"});
|
||||
});
|
||||
};
|
||||
request.send(null);
|
||||
}
|
||||
|
||||
override public function uncompress(done: Void->Void): Void {
|
||||
done();
|
||||
}
|
||||
}
|
132
Kha/Backends/HTML5/kha/js/Sound.hx
Normal file
132
Kha/Backends/HTML5/kha/js/Sound.hx
Normal file
@ -0,0 +1,132 @@
|
||||
package kha.js;
|
||||
|
||||
import js.Browser;
|
||||
import js.html.AudioElement;
|
||||
import js.html.ErrorEvent;
|
||||
import js.html.Event;
|
||||
import js.html.MediaError;
|
||||
|
||||
using StringTools;
|
||||
|
||||
/*class SoundChannel extends kha.SoundChannel {
|
||||
private var element: Dynamic;
|
||||
|
||||
public function new(element: Dynamic) {
|
||||
super();
|
||||
this.element = element;
|
||||
}
|
||||
|
||||
override public function play(): Void {
|
||||
super.play();
|
||||
element.play();
|
||||
}
|
||||
|
||||
override public function pause(): Void {
|
||||
try {
|
||||
element.pause();
|
||||
}
|
||||
catch (e: Dynamic) {
|
||||
trace(e);
|
||||
}
|
||||
}
|
||||
|
||||
override public function stop(): Void {
|
||||
try {
|
||||
element.pause();
|
||||
element.currentTime = 0;
|
||||
super.stop();
|
||||
}
|
||||
catch (e: Dynamic) {
|
||||
trace(e);
|
||||
}
|
||||
}
|
||||
|
||||
override public function getCurrentPos(): Int {
|
||||
return Math.ceil(element.currentTime * 1000); // Miliseconds
|
||||
}
|
||||
|
||||
override public function getLength(): Int {
|
||||
if (Math.isFinite(element.duration)) {
|
||||
return Math.floor(element.duration * 1000); // Miliseconds
|
||||
}
|
||||
else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}*/
|
||||
class Sound extends kha.Sound {
|
||||
var filenames: Array<String>;
|
||||
|
||||
static var loading: Array<Sound> = new Array();
|
||||
|
||||
var done: kha.Sound->Void;
|
||||
var failed: AssetError->Void;
|
||||
|
||||
public var element: AudioElement;
|
||||
|
||||
public function new(filenames: Array<String>, done: kha.Sound->Void, failed: AssetError->Void) {
|
||||
super();
|
||||
|
||||
this.done = done;
|
||||
this.failed = failed;
|
||||
loading.push(this); // prevent gc from removing this
|
||||
|
||||
element = Browser.document.createAudioElement();
|
||||
|
||||
this.filenames = [];
|
||||
for (filename in filenames) {
|
||||
if (element.canPlayType("audio/ogg") != "" && filename.endsWith(".ogg"))
|
||||
this.filenames.push(filename);
|
||||
if (element.canPlayType("audio/mp4") != "" && filename.endsWith(".mp4"))
|
||||
this.filenames.push(filename);
|
||||
if (element.canPlayType("audio/wav") != "" && filename.endsWith(".wav"))
|
||||
this.filenames.push(filename);
|
||||
}
|
||||
|
||||
element.addEventListener("error", errorListener, false);
|
||||
element.addEventListener("canplay", canPlayThroughListener, false);
|
||||
|
||||
element.src = this.filenames[0];
|
||||
element.preload = "auto";
|
||||
element.load();
|
||||
}
|
||||
|
||||
// override public function play(): kha.SoundChannel {
|
||||
// try {
|
||||
// element.play();
|
||||
// }
|
||||
// catch (e: Dynamic) {
|
||||
// trace(e);
|
||||
// }
|
||||
// return new SoundChannel(element);
|
||||
// }
|
||||
function errorListener(eventInfo: ErrorEvent): Void {
|
||||
if (element.error.code == MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED) {
|
||||
for (i in 0...filenames.length - 1) {
|
||||
if (element.src == filenames[i]) {
|
||||
// try loading with next extension:
|
||||
element.src = filenames[i + 1];
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
failed({url: element.src});
|
||||
finishAsset();
|
||||
}
|
||||
|
||||
function canPlayThroughListener(eventInfo: Event): Void {
|
||||
finishAsset();
|
||||
}
|
||||
|
||||
function finishAsset() {
|
||||
element.removeEventListener("error", errorListener, false);
|
||||
element.removeEventListener("canplaythrough", canPlayThroughListener, false);
|
||||
done(this);
|
||||
loading.remove(this);
|
||||
}
|
||||
|
||||
override public function uncompress(done: Void->Void): Void {
|
||||
done();
|
||||
}
|
||||
}
|
147
Kha/Backends/HTML5/kha/js/Video.hx
Normal file
147
Kha/Backends/HTML5/kha/js/Video.hx
Normal file
@ -0,0 +1,147 @@
|
||||
package kha.js;
|
||||
|
||||
import js.Browser;
|
||||
import js.html.ErrorEvent;
|
||||
import js.html.Event;
|
||||
import js.html.MediaError;
|
||||
import js.html.VideoElement;
|
||||
|
||||
using StringTools;
|
||||
|
||||
class Video extends kha.Video {
|
||||
public var element: VideoElement;
|
||||
public var texture: Image;
|
||||
|
||||
var filenames: Array<String>;
|
||||
var done: kha.Video->Void;
|
||||
|
||||
function new() {
|
||||
super();
|
||||
}
|
||||
|
||||
public static function fromElement(element: js.html.VideoElement): Video {
|
||||
var video = new Video();
|
||||
video.element = element;
|
||||
if (SystemImpl.gl != null)
|
||||
video.texture = Image.fromVideo(video);
|
||||
return video;
|
||||
}
|
||||
|
||||
public static function fromFile(filenames: Array<String>, done: kha.Video->Void): Void {
|
||||
var video = new Video();
|
||||
|
||||
video.done = done;
|
||||
|
||||
video.element = cast Browser.document.createElement("video");
|
||||
|
||||
video.filenames = [];
|
||||
for (filename in filenames) {
|
||||
if (video.element.canPlayType("video/webm") != "" && filename.endsWith(".webm"))
|
||||
video.filenames.push(filename);
|
||||
#if !kha_debug_html5
|
||||
if (video.element.canPlayType("video/mp4") != "" && filename.endsWith(".mp4"))
|
||||
video.filenames.push(filename);
|
||||
#end
|
||||
}
|
||||
|
||||
video.element.addEventListener("error", video.errorListener, false);
|
||||
video.element.addEventListener("canplaythrough", video.canPlayThroughListener, false);
|
||||
|
||||
video.element.preload = "auto";
|
||||
video.element.src = video.filenames[0];
|
||||
}
|
||||
|
||||
override public function width(): Int {
|
||||
return element.videoWidth;
|
||||
}
|
||||
|
||||
override public function height(): Int {
|
||||
return element.videoHeight;
|
||||
}
|
||||
|
||||
override public function play(loop: Bool = false): Void {
|
||||
try {
|
||||
element.loop = loop;
|
||||
element.play();
|
||||
}
|
||||
catch (e:Dynamic) {
|
||||
trace(e);
|
||||
}
|
||||
}
|
||||
|
||||
override public function pause(): Void {
|
||||
try {
|
||||
element.pause();
|
||||
}
|
||||
catch (e:Dynamic) {
|
||||
trace(e);
|
||||
}
|
||||
}
|
||||
|
||||
override public function stop(): Void {
|
||||
try {
|
||||
element.pause();
|
||||
element.currentTime = 0;
|
||||
}
|
||||
catch (e:Dynamic) {
|
||||
trace(e);
|
||||
}
|
||||
}
|
||||
|
||||
override public function getCurrentPos(): Int {
|
||||
return Math.ceil(element.currentTime * 1000); // Miliseconds
|
||||
}
|
||||
|
||||
override function get_position(): Int {
|
||||
return Math.ceil(element.currentTime * 1000);
|
||||
}
|
||||
|
||||
override function set_position(value: Int): Int {
|
||||
element.currentTime = value / 1000;
|
||||
return value;
|
||||
}
|
||||
|
||||
override public function getVolume(): Float {
|
||||
return element.volume;
|
||||
}
|
||||
|
||||
override public function setVolume(volume: Float): Void {
|
||||
element.volume = volume;
|
||||
}
|
||||
|
||||
override public function getLength(): Int {
|
||||
if (Math.isFinite(element.duration)) {
|
||||
return Math.floor(element.duration * 1000); // Miliseconds
|
||||
}
|
||||
else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
function errorListener(eventInfo: ErrorEvent): Void {
|
||||
if (element.error.code == MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED) {
|
||||
for (i in 0...filenames.length - 1) {
|
||||
if (element.src == filenames[i]) {
|
||||
// try loading with next extension:
|
||||
element.src = filenames[i + 1];
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trace("Error loading " + element.src);
|
||||
finishAsset();
|
||||
}
|
||||
|
||||
function canPlayThroughListener(eventInfo: Event): Void {
|
||||
finishAsset();
|
||||
}
|
||||
|
||||
function finishAsset() {
|
||||
element.removeEventListener("error", errorListener, false);
|
||||
element.removeEventListener("canplaythrough", canPlayThroughListener, false);
|
||||
if (SystemImpl.gl != null)
|
||||
texture = Image.fromVideo(this);
|
||||
done(this);
|
||||
}
|
||||
}
|
114
Kha/Backends/HTML5/kha/js/WebAudioSound.hx
Normal file
114
Kha/Backends/HTML5/kha/js/WebAudioSound.hx
Normal file
@ -0,0 +1,114 @@
|
||||
package kha.js;
|
||||
|
||||
import haxe.io.Bytes;
|
||||
import js.Browser;
|
||||
import js.html.XMLHttpRequest;
|
||||
import kha.audio2.Audio;
|
||||
|
||||
/*
|
||||
class WebAudioChannel extends kha.SoundChannel {
|
||||
private var buffer: Dynamic;
|
||||
private var startTime: Float;
|
||||
private var offset: Float;
|
||||
private var source: Dynamic;
|
||||
|
||||
public function new(buffer: Dynamic) {
|
||||
super();
|
||||
this.offset = 0;
|
||||
this.buffer = buffer;
|
||||
this.startTime = Audio._context.currentTime;
|
||||
this.source = Audio._context.createBufferSource();
|
||||
this.source.buffer = this.buffer;
|
||||
this.source.connect(Audio._context.destination);
|
||||
this.source.start(0);
|
||||
}
|
||||
|
||||
override public function play(): Void {
|
||||
if (source != null) return;
|
||||
super.play();
|
||||
startTime = Audio._context.currentTime - offset;
|
||||
source.start(0, offset);
|
||||
}
|
||||
|
||||
override public function pause(): Void {
|
||||
source.stop();
|
||||
offset = Audio._context.currentTime - startTime;
|
||||
startTime = -1;
|
||||
source = null;
|
||||
}
|
||||
|
||||
override public function stop(): Void {
|
||||
source.stop();
|
||||
source = null;
|
||||
offset = 0;
|
||||
startTime = -1;
|
||||
super.stop();
|
||||
}
|
||||
|
||||
override public function getCurrentPos(): Int {
|
||||
if (startTime < 0) return Math.ceil(offset * 1000);
|
||||
else return Math.ceil((Audio._context.currentTime - startTime) * 1000); //Miliseconds
|
||||
}
|
||||
|
||||
override public function getLength(): Int {
|
||||
return Math.floor(buffer.duration * 1000); //Miliseconds
|
||||
}
|
||||
}
|
||||
*/
|
||||
class WebAudioSound extends kha.Sound {
|
||||
public function new(filename: String, done: kha.Sound->Void, failed: AssetError->Void) {
|
||||
super();
|
||||
var request = untyped new XMLHttpRequest();
|
||||
request.open("GET", filename, true);
|
||||
request.responseType = "arraybuffer";
|
||||
|
||||
request.onerror = function() {
|
||||
failed({url: filename});
|
||||
};
|
||||
|
||||
request.onload = function() {
|
||||
compressedData = Bytes.ofData(request.response);
|
||||
uncompressedData = null;
|
||||
done(this);
|
||||
};
|
||||
request.send(null);
|
||||
}
|
||||
|
||||
function superUncompress(done: Void->Void): Void {
|
||||
super.uncompress(done);
|
||||
}
|
||||
|
||||
override public function uncompress(done: Void->Void): Void {
|
||||
Audio._context.decodeAudioData(compressedData.getData(), function(buffer: js.html.audio.AudioBuffer) {
|
||||
final ch0 = buffer.getChannelData(0);
|
||||
final ch1 = buffer.numberOfChannels == 1 ? ch0 : buffer.getChannelData(1);
|
||||
final len = ch0.length;
|
||||
uncompressedData = new kha.arrays.Float32Array(len * 2);
|
||||
length = buffer.duration;
|
||||
channels = buffer.numberOfChannels;
|
||||
sampleRate = Math.round(buffer.sampleRate);
|
||||
var idx = 0;
|
||||
var i = 0;
|
||||
final lidx = len * 2;
|
||||
function uncompressInner() {
|
||||
var chk_len = idx + 11025;
|
||||
var next_chk = chk_len > lidx ? lidx : chk_len;
|
||||
while (idx < next_chk) {
|
||||
uncompressedData[idx] = ch0[i];
|
||||
uncompressedData[idx + 1] = ch1[i];
|
||||
idx += 2;
|
||||
++i;
|
||||
}
|
||||
if (idx < lidx)
|
||||
js.Browser.window.setTimeout(uncompressInner, 0);
|
||||
else {
|
||||
compressedData = null;
|
||||
done();
|
||||
}
|
||||
};
|
||||
uncompressInner();
|
||||
}, function() {
|
||||
superUncompress(done);
|
||||
});
|
||||
}
|
||||
}
|
11
Kha/Backends/HTML5/kha/js/graphics4/ConstantLocation.hx
Normal file
11
Kha/Backends/HTML5/kha/js/graphics4/ConstantLocation.hx
Normal file
@ -0,0 +1,11 @@
|
||||
package kha.js.graphics4;
|
||||
|
||||
class ConstantLocation implements kha.graphics4.ConstantLocation {
|
||||
public var value: Dynamic;
|
||||
public var type: Int;
|
||||
|
||||
public function new(value: Dynamic, type: Int) {
|
||||
this.value = value;
|
||||
this.type = type;
|
||||
}
|
||||
}
|
738
Kha/Backends/HTML5/kha/js/graphics4/Graphics.hx
Normal file
738
Kha/Backends/HTML5/kha/js/graphics4/Graphics.hx
Normal file
@ -0,0 +1,738 @@
|
||||
package kha.js.graphics4;
|
||||
|
||||
import js.html.webgl.GL2;
|
||||
import kha.graphics4.StencilValue;
|
||||
import kha.arrays.Float32Array;
|
||||
import kha.arrays.Int32Array;
|
||||
import js.html.webgl.GL;
|
||||
import kha.graphics4.BlendingFactor;
|
||||
import kha.graphics4.BlendingOperation;
|
||||
import kha.graphics4.CompareMode;
|
||||
import kha.graphics4.CubeMap;
|
||||
import kha.graphics4.CullMode;
|
||||
import kha.graphics4.IndexBuffer;
|
||||
import kha.graphics4.MipMapFilter;
|
||||
import kha.graphics4.PipelineState;
|
||||
import kha.graphics4.StencilAction;
|
||||
import kha.graphics4.TextureAddressing;
|
||||
import kha.graphics4.TextureFilter;
|
||||
import kha.graphics4.Usage;
|
||||
import kha.graphics4.VertexBuffer;
|
||||
import kha.graphics4.VertexStructure;
|
||||
import kha.Image;
|
||||
import kha.math.FastMatrix3;
|
||||
import kha.math.FastMatrix4;
|
||||
import kha.math.FastVector2;
|
||||
import kha.math.FastVector3;
|
||||
import kha.math.FastVector4;
|
||||
import kha.WebGLImage;
|
||||
|
||||
class Graphics implements kha.graphics4.Graphics {
|
||||
var currentPipeline: PipelineState = null;
|
||||
var depthTest: Bool = false;
|
||||
var depthMask: Bool = false;
|
||||
var colorMaskRed: Bool = true;
|
||||
var colorMaskGreen: Bool = true;
|
||||
var colorMaskBlue: Bool = true;
|
||||
var colorMaskAlpha: Bool = true;
|
||||
var indicesCount: Int;
|
||||
var renderTarget: Canvas;
|
||||
var renderTargetFrameBuffer: Dynamic;
|
||||
var renderTargetMSAA: Dynamic;
|
||||
var renderTargetTexture: Dynamic;
|
||||
var isCubeMap: Bool = false;
|
||||
var isDepthAttachment: Bool = false;
|
||||
var instancedExtension: Dynamic;
|
||||
var blendMinMaxExtension: Dynamic;
|
||||
|
||||
static var current: Graphics = null;
|
||||
static var useVertexAttributes: Int = 0;
|
||||
|
||||
public function new(renderTarget: Canvas = null) {
|
||||
this.renderTarget = renderTarget;
|
||||
init();
|
||||
if (SystemImpl.gl2) {
|
||||
instancedExtension = true;
|
||||
}
|
||||
else {
|
||||
instancedExtension = SystemImpl.gl.getExtension("ANGLE_instanced_arrays");
|
||||
blendMinMaxExtension = SystemImpl.gl.getExtension("EXT_blend_minmax");
|
||||
}
|
||||
}
|
||||
|
||||
function init() {
|
||||
if (renderTarget == null)
|
||||
return;
|
||||
isCubeMap = Std.isOfType(renderTarget, CubeMap);
|
||||
if (isCubeMap) {
|
||||
var cubeMap: CubeMap = cast(renderTarget, CubeMap);
|
||||
renderTargetFrameBuffer = cubeMap.frameBuffer;
|
||||
renderTargetTexture = cubeMap.texture;
|
||||
isDepthAttachment = cubeMap.isDepthAttachment;
|
||||
}
|
||||
else {
|
||||
var image: WebGLImage = cast(renderTarget, WebGLImage);
|
||||
renderTargetFrameBuffer = image.frameBuffer;
|
||||
renderTargetMSAA = image.MSAAFrameBuffer;
|
||||
renderTargetTexture = image.texture;
|
||||
}
|
||||
}
|
||||
|
||||
public function begin(additionalRenderTargets: Array<Canvas> = null): Void {
|
||||
if (current == null) {
|
||||
current = this;
|
||||
}
|
||||
else {
|
||||
throw "End before you begin";
|
||||
}
|
||||
|
||||
SystemImpl.gl.enable(GL.BLEND);
|
||||
SystemImpl.gl.blendFunc(GL.SRC_ALPHA, GL.ONE_MINUS_SRC_ALPHA);
|
||||
if (renderTarget == null) {
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, null);
|
||||
SystemImpl.gl.viewport(0, 0, System.windowWidth(), System.windowHeight());
|
||||
}
|
||||
else {
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, renderTargetFrameBuffer);
|
||||
// if (isCubeMap) SystemImpl.gl.framebufferTexture(GL.FRAMEBUFFER, GL.COLOR_ATTACHMENT0, GL.TEXTURE_CUBE_MAP, renderTargetTexture, 0); // Layered
|
||||
SystemImpl.gl.viewport(0, 0, renderTarget.width, renderTarget.height);
|
||||
if (additionalRenderTargets != null) {
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, SystemImpl.drawBuffers.COLOR_ATTACHMENT0_WEBGL, GL.TEXTURE_2D, renderTargetTexture, 0);
|
||||
for (i in 0...additionalRenderTargets.length) {
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, SystemImpl.drawBuffers.COLOR_ATTACHMENT0_WEBGL + i + 1, GL.TEXTURE_2D,
|
||||
cast(additionalRenderTargets[i], WebGLImage).texture, 0);
|
||||
}
|
||||
var attachments = [SystemImpl.drawBuffers.COLOR_ATTACHMENT0_WEBGL];
|
||||
for (i in 0...additionalRenderTargets.length) {
|
||||
attachments.push(SystemImpl.drawBuffers.COLOR_ATTACHMENT0_WEBGL + i + 1);
|
||||
}
|
||||
SystemImpl.gl2 ? untyped SystemImpl.gl.drawBuffers(attachments) : SystemImpl.drawBuffers.drawBuffersWEBGL(attachments);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public function beginFace(face: Int): Void {
|
||||
if (current == null) {
|
||||
current = this;
|
||||
}
|
||||
else {
|
||||
throw "End before you begin";
|
||||
}
|
||||
|
||||
SystemImpl.gl.enable(GL.BLEND);
|
||||
SystemImpl.gl.blendFunc(GL.SRC_ALPHA, GL.ONE_MINUS_SRC_ALPHA);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, renderTargetFrameBuffer);
|
||||
SystemImpl.gl.framebufferTexture2D(GL.FRAMEBUFFER, isDepthAttachment ? GL.DEPTH_ATTACHMENT : GL.COLOR_ATTACHMENT0,
|
||||
GL.TEXTURE_CUBE_MAP_POSITIVE_X + face, renderTargetTexture, 0);
|
||||
SystemImpl.gl.viewport(0, 0, renderTarget.width, renderTarget.height);
|
||||
}
|
||||
|
||||
public function beginEye(eye: Int): Void {
|
||||
if (current == null) {
|
||||
current = this;
|
||||
}
|
||||
else {
|
||||
throw "End before you begin";
|
||||
}
|
||||
|
||||
SystemImpl.gl.enable(GL.BLEND);
|
||||
SystemImpl.gl.blendFunc(GL.SRC_ALPHA, GL.ONE_MINUS_SRC_ALPHA);
|
||||
SystemImpl.gl.bindFramebuffer(GL.FRAMEBUFFER, null);
|
||||
if (eye == 0) {
|
||||
SystemImpl.gl.viewport(0, 0, Std.int(System.windowWidth() * 0.5), System.windowHeight());
|
||||
}
|
||||
else {
|
||||
SystemImpl.gl.viewport(Std.int(System.windowWidth() * 0.5), 0, Std.int(System.windowWidth() * 0.5), System.windowHeight());
|
||||
}
|
||||
}
|
||||
|
||||
public function end(): Void {
|
||||
if (current == this) {
|
||||
current = null;
|
||||
}
|
||||
else {
|
||||
throw "Begin before you end";
|
||||
}
|
||||
|
||||
if (renderTargetMSAA != null) {
|
||||
untyped SystemImpl.gl.bindFramebuffer(SystemImpl.gl.READ_FRAMEBUFFER, renderTargetFrameBuffer);
|
||||
untyped SystemImpl.gl.bindFramebuffer(SystemImpl.gl.DRAW_FRAMEBUFFER, renderTargetMSAA);
|
||||
untyped SystemImpl.gl.blitFramebuffer(0, 0, renderTarget.width, renderTarget.height, 0, 0, renderTarget.width, renderTarget.height,
|
||||
GL.COLOR_BUFFER_BIT, GL.NEAREST);
|
||||
}
|
||||
#if (debug || kha_debug_html5)
|
||||
var error = SystemImpl.gl.getError();
|
||||
switch (error) {
|
||||
case GL.NO_ERROR:
|
||||
|
||||
case GL.INVALID_ENUM:
|
||||
trace("WebGL error: Invalid enum");
|
||||
case GL.INVALID_VALUE:
|
||||
trace("WebGL error: Invalid value");
|
||||
case GL.INVALID_OPERATION:
|
||||
trace("WebGL error: Invalid operation");
|
||||
case GL.INVALID_FRAMEBUFFER_OPERATION:
|
||||
trace("WebGL error: Invalid framebuffer operation");
|
||||
case GL.OUT_OF_MEMORY:
|
||||
trace("WebGL error: Out of memory");
|
||||
case GL.CONTEXT_LOST_WEBGL:
|
||||
trace("WebGL error: Context lost");
|
||||
default:
|
||||
trace("Unknown WebGL error");
|
||||
}
|
||||
#end
|
||||
}
|
||||
|
||||
public function flush(): Void {}
|
||||
|
||||
public function vsynced(): Bool {
|
||||
return true;
|
||||
}
|
||||
|
||||
public function refreshRate(): Int {
|
||||
return 60;
|
||||
}
|
||||
|
||||
public function clear(?color: Color, ?depth: Float, ?stencil: Int): Void {
|
||||
var clearMask: Int = 0;
|
||||
if (color != null) {
|
||||
clearMask |= GL.COLOR_BUFFER_BIT;
|
||||
SystemImpl.gl.colorMask(true, true, true, true);
|
||||
SystemImpl.gl.clearColor(color.R, color.G, color.B, color.A);
|
||||
}
|
||||
if (depth != null) {
|
||||
clearMask |= GL.DEPTH_BUFFER_BIT;
|
||||
SystemImpl.gl.enable(GL.DEPTH_TEST);
|
||||
SystemImpl.gl.depthMask(true);
|
||||
SystemImpl.gl.clearDepth(depth);
|
||||
}
|
||||
if (stencil != null) {
|
||||
clearMask |= GL.STENCIL_BUFFER_BIT;
|
||||
SystemImpl.gl.enable(GL.STENCIL_TEST);
|
||||
SystemImpl.gl.stencilMask(0xff);
|
||||
SystemImpl.gl.clearStencil(stencil);
|
||||
}
|
||||
SystemImpl.gl.clear(clearMask);
|
||||
SystemImpl.gl.colorMask(colorMaskRed, colorMaskGreen, colorMaskBlue, colorMaskAlpha);
|
||||
if (depthTest) {
|
||||
SystemImpl.gl.enable(GL.DEPTH_TEST);
|
||||
}
|
||||
else {
|
||||
SystemImpl.gl.disable(GL.DEPTH_TEST);
|
||||
}
|
||||
SystemImpl.gl.depthMask(depthMask);
|
||||
}
|
||||
|
||||
public function viewport(x: Int, y: Int, width: Int, height: Int): Void {
|
||||
if (renderTarget == null) {
|
||||
SystemImpl.gl.viewport(x, System.windowHeight(0) - y - height, width, height);
|
||||
}
|
||||
else {
|
||||
SystemImpl.gl.viewport(x, y, width, height);
|
||||
}
|
||||
}
|
||||
|
||||
public function scissor(x: Int, y: Int, width: Int, height: Int): Void {
|
||||
SystemImpl.gl.enable(GL.SCISSOR_TEST);
|
||||
if (renderTarget == null) {
|
||||
SystemImpl.gl.scissor(x, System.windowHeight(0) - y - height, width, height);
|
||||
}
|
||||
else {
|
||||
SystemImpl.gl.scissor(x, y, width, height);
|
||||
}
|
||||
}
|
||||
|
||||
public function disableScissor(): Void {
|
||||
SystemImpl.gl.disable(GL.SCISSOR_TEST);
|
||||
}
|
||||
|
||||
public function setDepthMode(write: Bool, mode: CompareMode): Void {
|
||||
switch (mode) {
|
||||
case Always:
|
||||
write ? SystemImpl.gl.enable(GL.DEPTH_TEST) : SystemImpl.gl.disable(GL.DEPTH_TEST);
|
||||
depthTest = write;
|
||||
SystemImpl.gl.depthFunc(GL.ALWAYS);
|
||||
case Never:
|
||||
SystemImpl.gl.enable(GL.DEPTH_TEST);
|
||||
depthTest = true;
|
||||
SystemImpl.gl.depthFunc(GL.NEVER);
|
||||
case Equal:
|
||||
SystemImpl.gl.enable(GL.DEPTH_TEST);
|
||||
depthTest = true;
|
||||
SystemImpl.gl.depthFunc(GL.EQUAL);
|
||||
case NotEqual:
|
||||
SystemImpl.gl.enable(GL.DEPTH_TEST);
|
||||
depthTest = true;
|
||||
SystemImpl.gl.depthFunc(GL.NOTEQUAL);
|
||||
case Less:
|
||||
SystemImpl.gl.enable(GL.DEPTH_TEST);
|
||||
depthTest = true;
|
||||
SystemImpl.gl.depthFunc(GL.LESS);
|
||||
case LessEqual:
|
||||
SystemImpl.gl.enable(GL.DEPTH_TEST);
|
||||
depthTest = true;
|
||||
SystemImpl.gl.depthFunc(GL.LEQUAL);
|
||||
case Greater:
|
||||
SystemImpl.gl.enable(GL.DEPTH_TEST);
|
||||
depthTest = true;
|
||||
SystemImpl.gl.depthFunc(GL.GREATER);
|
||||
case GreaterEqual:
|
||||
SystemImpl.gl.enable(GL.DEPTH_TEST);
|
||||
depthTest = true;
|
||||
SystemImpl.gl.depthFunc(GL.GEQUAL);
|
||||
}
|
||||
SystemImpl.gl.depthMask(write);
|
||||
depthMask = write;
|
||||
}
|
||||
|
||||
static function getBlendFunc(factor: BlendingFactor): Int {
|
||||
switch (factor) {
|
||||
case BlendZero, Undefined:
|
||||
return GL.ZERO;
|
||||
case BlendOne:
|
||||
return GL.ONE;
|
||||
case SourceAlpha:
|
||||
return GL.SRC_ALPHA;
|
||||
case DestinationAlpha:
|
||||
return GL.DST_ALPHA;
|
||||
case InverseSourceAlpha:
|
||||
return GL.ONE_MINUS_SRC_ALPHA;
|
||||
case InverseDestinationAlpha:
|
||||
return GL.ONE_MINUS_DST_ALPHA;
|
||||
case SourceColor:
|
||||
return GL.SRC_COLOR;
|
||||
case DestinationColor:
|
||||
return GL.DST_COLOR;
|
||||
case InverseSourceColor:
|
||||
return GL.ONE_MINUS_SRC_COLOR;
|
||||
case InverseDestinationColor:
|
||||
return GL.ONE_MINUS_DST_COLOR;
|
||||
}
|
||||
}
|
||||
|
||||
static function getBlendOp(op: BlendingOperation): Int {
|
||||
switch (op) {
|
||||
case Add:
|
||||
return GL.FUNC_ADD;
|
||||
case Subtract:
|
||||
return GL.FUNC_SUBTRACT;
|
||||
case ReverseSubtract:
|
||||
return GL.FUNC_REVERSE_SUBTRACT;
|
||||
case Min:
|
||||
return 0x8007;
|
||||
case Max:
|
||||
return 0x8008;
|
||||
}
|
||||
}
|
||||
|
||||
public function setBlendingMode(source: BlendingFactor, destination: BlendingFactor, operation: BlendingOperation, alphaSource: BlendingFactor,
|
||||
alphaDestination: BlendingFactor, alphaOperation: BlendingOperation): Void {
|
||||
if (source == BlendOne && destination == BlendZero) {
|
||||
SystemImpl.gl.disable(GL.BLEND);
|
||||
}
|
||||
else {
|
||||
SystemImpl.gl.enable(GL.BLEND);
|
||||
SystemImpl.gl.blendFuncSeparate(getBlendFunc(source), getBlendFunc(destination), getBlendFunc(alphaSource), getBlendFunc(alphaDestination));
|
||||
SystemImpl.gl.blendEquationSeparate(getBlendOp(operation), getBlendOp(alphaOperation));
|
||||
}
|
||||
}
|
||||
|
||||
public function createVertexBuffer(vertexCount: Int, structure: VertexStructure, usage: Usage, canRead: Bool = false): kha.graphics4.VertexBuffer {
|
||||
return new VertexBuffer(vertexCount, structure, usage);
|
||||
}
|
||||
|
||||
public function setVertexBuffer(vertexBuffer: kha.graphics4.VertexBuffer): Void {
|
||||
for (i in 0...useVertexAttributes) {
|
||||
SystemImpl.gl.disableVertexAttribArray(i);
|
||||
}
|
||||
useVertexAttributes = vertexBuffer.set(0);
|
||||
}
|
||||
|
||||
public function setVertexBuffers(vertexBuffers: Array<kha.graphics4.VertexBuffer>): Void {
|
||||
for (i in 0...useVertexAttributes) {
|
||||
SystemImpl.gl.disableVertexAttribArray(i);
|
||||
}
|
||||
var offset: Int = 0;
|
||||
for (vertexBuffer in vertexBuffers) {
|
||||
offset += vertexBuffer.set(offset);
|
||||
}
|
||||
useVertexAttributes = offset;
|
||||
}
|
||||
|
||||
public function createIndexBuffer(indexCount: Int, usage: Usage, canRead: Bool = false): kha.graphics4.IndexBuffer {
|
||||
return new IndexBuffer(indexCount, usage);
|
||||
}
|
||||
|
||||
public function setIndexBuffer(indexBuffer: kha.graphics4.IndexBuffer): Void {
|
||||
indicesCount = indexBuffer.count();
|
||||
indexBuffer.set();
|
||||
}
|
||||
|
||||
// public function maxTextureSize(): Int {
|
||||
// return Sys.gl == null ? 8192 : Sys.gl.getParameter(Sys.gl.MAX_TEXTURE_SIZE);
|
||||
// }
|
||||
// public function supportsNonPow2Textures(): Bool {
|
||||
// return false;
|
||||
// }
|
||||
|
||||
public function setTexture(stage: kha.graphics4.TextureUnit, texture: kha.Image): Void {
|
||||
if (texture == null) {
|
||||
SystemImpl.gl.activeTexture(GL.TEXTURE0 + (cast stage : TextureUnit).value);
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, null);
|
||||
}
|
||||
else {
|
||||
cast(texture, WebGLImage).set((cast stage : TextureUnit).value);
|
||||
}
|
||||
}
|
||||
|
||||
public function setTextureDepth(stage: kha.graphics4.TextureUnit, texture: kha.Image): Void {
|
||||
cast(texture, WebGLImage).setDepth((cast stage : TextureUnit).value);
|
||||
}
|
||||
|
||||
public function setTextureArray(unit: kha.graphics4.TextureUnit, texture: kha.Image): Void {
|
||||
// not implemented yet.
|
||||
}
|
||||
|
||||
public function setVideoTexture(unit: kha.graphics4.TextureUnit, texture: kha.Video): Void {
|
||||
if (texture == null) {
|
||||
SystemImpl.gl.activeTexture(GL.TEXTURE0 + (cast unit : TextureUnit).value);
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_2D, null);
|
||||
}
|
||||
else {
|
||||
cast((cast texture : kha.js.Video).texture, WebGLImage).set((cast unit : TextureUnit).value);
|
||||
}
|
||||
}
|
||||
|
||||
public function setImageTexture(unit: kha.graphics4.TextureUnit, texture: kha.Image): Void {}
|
||||
|
||||
public function setTextureParameters(texunit: kha.graphics4.TextureUnit, uAddressing: TextureAddressing, vAddressing: TextureAddressing,
|
||||
minificationFilter: TextureFilter, magnificationFilter: TextureFilter, mipmapFilter: MipMapFilter): Void {
|
||||
SystemImpl.gl.activeTexture(GL.TEXTURE0 + (cast texunit : TextureUnit).value);
|
||||
|
||||
switch (uAddressing) {
|
||||
case Clamp:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_S, GL.CLAMP_TO_EDGE);
|
||||
case Repeat:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_S, GL.REPEAT);
|
||||
case Mirror:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_S, GL.MIRRORED_REPEAT);
|
||||
}
|
||||
|
||||
switch (vAddressing) {
|
||||
case Clamp:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_T, GL.CLAMP_TO_EDGE);
|
||||
case Repeat:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_T, GL.REPEAT);
|
||||
case Mirror:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_T, GL.MIRRORED_REPEAT);
|
||||
}
|
||||
|
||||
switch (minificationFilter) {
|
||||
case PointFilter:
|
||||
switch (mipmapFilter) {
|
||||
case NoMipFilter:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.NEAREST);
|
||||
case PointMipFilter:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.NEAREST_MIPMAP_NEAREST);
|
||||
case LinearMipFilter:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.NEAREST_MIPMAP_LINEAR);
|
||||
}
|
||||
case LinearFilter, AnisotropicFilter:
|
||||
switch (mipmapFilter) {
|
||||
case NoMipFilter:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.LINEAR);
|
||||
case PointMipFilter:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.LINEAR_MIPMAP_NEAREST);
|
||||
case LinearMipFilter:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.LINEAR_MIPMAP_LINEAR);
|
||||
}
|
||||
if (minificationFilter == AnisotropicFilter) {
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, SystemImpl.anisotropicFilter.TEXTURE_MAX_ANISOTROPY_EXT, 4);
|
||||
}
|
||||
}
|
||||
|
||||
switch (magnificationFilter) {
|
||||
case PointFilter:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MAG_FILTER, GL.NEAREST);
|
||||
case LinearFilter, AnisotropicFilter:
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MAG_FILTER, GL.LINEAR);
|
||||
}
|
||||
}
|
||||
|
||||
public function setTexture3DParameters(texunit: kha.graphics4.TextureUnit, uAddressing: TextureAddressing, vAddressing: TextureAddressing,
|
||||
wAddressing: TextureAddressing, minificationFilter: TextureFilter, magnificationFilter: TextureFilter, mipmapFilter: MipMapFilter): Void {}
|
||||
|
||||
public function setTextureCompareMode(texunit: kha.graphics4.TextureUnit, enabled: Bool) {
|
||||
if (enabled) {
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL2.TEXTURE_COMPARE_MODE, GL2.COMPARE_REF_TO_TEXTURE);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL2.TEXTURE_COMPARE_FUNC, GL.LEQUAL);
|
||||
}
|
||||
else {
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_2D, GL2.TEXTURE_COMPARE_MODE, GL.NONE);
|
||||
}
|
||||
}
|
||||
|
||||
public function setCubeMapCompareMode(texunit: kha.graphics4.TextureUnit, enabled: Bool) {
|
||||
if (enabled) {
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL2.TEXTURE_COMPARE_MODE, GL2.COMPARE_REF_TO_TEXTURE);
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL2.TEXTURE_COMPARE_FUNC, GL.LEQUAL);
|
||||
}
|
||||
else {
|
||||
SystemImpl.gl.texParameteri(GL.TEXTURE_CUBE_MAP, GL2.TEXTURE_COMPARE_MODE, GL.NONE);
|
||||
}
|
||||
}
|
||||
|
||||
public function setCubeMap(stage: kha.graphics4.TextureUnit, cubeMap: kha.graphics4.CubeMap): Void {
|
||||
if (cubeMap == null) {
|
||||
SystemImpl.gl.activeTexture(GL.TEXTURE0 + (cast stage : TextureUnit).value);
|
||||
SystemImpl.gl.bindTexture(GL.TEXTURE_CUBE_MAP, null);
|
||||
}
|
||||
else {
|
||||
cubeMap.set((cast stage : TextureUnit).value);
|
||||
}
|
||||
}
|
||||
|
||||
public function setCubeMapDepth(stage: kha.graphics4.TextureUnit, cubeMap: kha.graphics4.CubeMap): Void {
|
||||
cubeMap.setDepth((cast stage : TextureUnit).value);
|
||||
}
|
||||
|
||||
public function maxBoundTextures(): Int {
|
||||
return SystemImpl.gl.getParameter(GL.MAX_TEXTURE_IMAGE_UNITS);
|
||||
}
|
||||
|
||||
public function setCullMode(mode: CullMode): Void {
|
||||
switch (mode) {
|
||||
case None:
|
||||
SystemImpl.gl.disable(GL.CULL_FACE);
|
||||
case Clockwise:
|
||||
SystemImpl.gl.enable(GL.CULL_FACE);
|
||||
SystemImpl.gl.cullFace(GL.BACK);
|
||||
case CounterClockwise:
|
||||
SystemImpl.gl.enable(GL.CULL_FACE);
|
||||
SystemImpl.gl.cullFace(GL.FRONT);
|
||||
}
|
||||
}
|
||||
|
||||
public function setPipeline(pipe: PipelineState): Void {
|
||||
setCullMode(pipe.cullMode);
|
||||
setDepthMode(pipe.depthWrite, pipe.depthMode);
|
||||
if (pipe.stencilFrontMode == Always && pipe.stencilBackMode == Always && pipe.stencilFrontBothPass == Keep && pipe.stencilBackBothPass == Keep
|
||||
&& pipe.stencilFrontDepthFail == Keep && pipe.stencilBackDepthFail == Keep && pipe.stencilFrontFail == Keep && pipe.stencilBackFail == Keep) {
|
||||
SystemImpl.gl.disable(GL.STENCIL_TEST);
|
||||
}
|
||||
else {
|
||||
SystemImpl.gl.enable(GL.STENCIL_TEST);
|
||||
setStencilParameters(true, pipe.stencilFrontMode, pipe.stencilFrontBothPass, pipe.stencilFrontDepthFail, pipe.stencilFrontFail,
|
||||
pipe.stencilReferenceValue, pipe.stencilReadMask, pipe.stencilWriteMask);
|
||||
setStencilParameters(false, pipe.stencilBackMode, pipe.stencilBackBothPass, pipe.stencilBackDepthFail, pipe.stencilBackFail,
|
||||
pipe.stencilReferenceValue, pipe.stencilReadMask, pipe.stencilWriteMask);
|
||||
}
|
||||
setBlendingMode(pipe.blendSource, pipe.blendDestination, pipe.blendOperation, pipe.alphaBlendSource, pipe.alphaBlendDestination,
|
||||
pipe.alphaBlendOperation);
|
||||
currentPipeline = pipe;
|
||||
pipe.set();
|
||||
colorMaskRed = pipe.colorWriteMaskRed;
|
||||
colorMaskGreen = pipe.colorWriteMaskGreen;
|
||||
colorMaskBlue = pipe.colorWriteMaskBlue;
|
||||
colorMaskAlpha = pipe.colorWriteMaskAlpha;
|
||||
}
|
||||
|
||||
public function setStencilReferenceValue(value: Int): Void {
|
||||
SystemImpl.gl.stencilFuncSeparate(GL.FRONT, convertCompareMode(currentPipeline.stencilFrontMode), value, currentPipeline.stencilReadMask);
|
||||
SystemImpl.gl.stencilFuncSeparate(GL.BACK, convertCompareMode(currentPipeline.stencilBackMode), value, currentPipeline.stencilReadMask);
|
||||
}
|
||||
|
||||
public function setBool(location: kha.graphics4.ConstantLocation, value: Bool): Void {
|
||||
SystemImpl.gl.uniform1i((cast location : ConstantLocation).value, value ? 1 : 0);
|
||||
}
|
||||
|
||||
public function setInt(location: kha.graphics4.ConstantLocation, value: Int): Void {
|
||||
SystemImpl.gl.uniform1i((cast location : ConstantLocation).value, value);
|
||||
}
|
||||
|
||||
public function setInt2(location: kha.graphics4.ConstantLocation, value1: Int, value2: Int): Void {
|
||||
SystemImpl.gl.uniform2i((cast location : ConstantLocation).value, value1, value2);
|
||||
}
|
||||
|
||||
public function setInt3(location: kha.graphics4.ConstantLocation, value1: Int, value2: Int, value3: Int): Void {
|
||||
SystemImpl.gl.uniform3i((cast location : ConstantLocation).value, value1, value2, value3);
|
||||
}
|
||||
|
||||
public function setInt4(location: kha.graphics4.ConstantLocation, value1: Int, value2: Int, value3: Int, value4: Int): Void {
|
||||
SystemImpl.gl.uniform4i((cast location : ConstantLocation).value, value1, value2, value3, value4);
|
||||
}
|
||||
|
||||
public function setInts(location: kha.graphics4.ConstantLocation, values: Int32Array): Void {
|
||||
var webglLocation = (cast location : ConstantLocation);
|
||||
var rawValues = new js.lib.Int32Array(values.buffer, values.byteOffset, values.length);
|
||||
switch (webglLocation.type) {
|
||||
case GL.INT_VEC2:
|
||||
SystemImpl.gl.uniform2iv(webglLocation.value, rawValues);
|
||||
case GL.INT_VEC3:
|
||||
SystemImpl.gl.uniform3iv(webglLocation.value, rawValues);
|
||||
case GL.INT_VEC4:
|
||||
SystemImpl.gl.uniform4iv(webglLocation.value, rawValues);
|
||||
default:
|
||||
SystemImpl.gl.uniform1iv(webglLocation.value, rawValues);
|
||||
}
|
||||
}
|
||||
|
||||
public function setFloat(location: kha.graphics4.ConstantLocation, value: FastFloat): Void {
|
||||
SystemImpl.gl.uniform1f((cast location : ConstantLocation).value, value);
|
||||
}
|
||||
|
||||
public function setFloat2(location: kha.graphics4.ConstantLocation, value1: FastFloat, value2: FastFloat): Void {
|
||||
SystemImpl.gl.uniform2f((cast location : ConstantLocation).value, value1, value2);
|
||||
}
|
||||
|
||||
public function setFloat3(location: kha.graphics4.ConstantLocation, value1: FastFloat, value2: FastFloat, value3: FastFloat): Void {
|
||||
SystemImpl.gl.uniform3f((cast location : ConstantLocation).value, value1, value2, value3);
|
||||
}
|
||||
|
||||
public function setFloat4(location: kha.graphics4.ConstantLocation, value1: FastFloat, value2: FastFloat, value3: FastFloat, value4: FastFloat): Void {
|
||||
SystemImpl.gl.uniform4f((cast location : ConstantLocation).value, value1, value2, value3, value4);
|
||||
}
|
||||
|
||||
public function setFloats(location: kha.graphics4.ConstantLocation, values: Float32Array): Void {
|
||||
var webglLocation = (cast location : ConstantLocation);
|
||||
var rawValues = new js.lib.Float32Array(values.buffer, values.byteOffset, values.length);
|
||||
switch (webglLocation.type) {
|
||||
case GL.FLOAT_VEC2:
|
||||
SystemImpl.gl.uniform2fv(webglLocation.value, rawValues);
|
||||
case GL.FLOAT_VEC3:
|
||||
SystemImpl.gl.uniform3fv(webglLocation.value, rawValues);
|
||||
case GL.FLOAT_VEC4:
|
||||
SystemImpl.gl.uniform4fv(webglLocation.value, rawValues);
|
||||
case GL.FLOAT_MAT4:
|
||||
SystemImpl.gl.uniformMatrix4fv(webglLocation.value, false, rawValues);
|
||||
default:
|
||||
SystemImpl.gl.uniform1fv(webglLocation.value, rawValues);
|
||||
}
|
||||
}
|
||||
|
||||
public function setVector2(location: kha.graphics4.ConstantLocation, value: FastVector2): Void {
|
||||
SystemImpl.gl.uniform2f((cast location : ConstantLocation).value, value.x, value.y);
|
||||
}
|
||||
|
||||
public function setVector3(location: kha.graphics4.ConstantLocation, value: FastVector3): Void {
|
||||
SystemImpl.gl.uniform3f((cast location : ConstantLocation).value, value.x, value.y, value.z);
|
||||
}
|
||||
|
||||
public function setVector4(location: kha.graphics4.ConstantLocation, value: FastVector4): Void {
|
||||
SystemImpl.gl.uniform4f((cast location : ConstantLocation).value, value.x, value.y, value.z, value.w);
|
||||
}
|
||||
|
||||
static var matrixCache = new js.lib.Float32Array(16);
|
||||
public inline function setMatrix(location: kha.graphics4.ConstantLocation, matrix: FastMatrix4): Void {
|
||||
matrixCache[0] = matrix._00;
|
||||
matrixCache[1] = matrix._01;
|
||||
matrixCache[2] = matrix._02;
|
||||
matrixCache[3] = matrix._03;
|
||||
matrixCache[4] = matrix._10;
|
||||
matrixCache[5] = matrix._11;
|
||||
matrixCache[6] = matrix._12;
|
||||
matrixCache[7] = matrix._13;
|
||||
matrixCache[8] = matrix._20;
|
||||
matrixCache[9] = matrix._21;
|
||||
matrixCache[10] = matrix._22;
|
||||
matrixCache[11] = matrix._23;
|
||||
matrixCache[12] = matrix._30;
|
||||
matrixCache[13] = matrix._31;
|
||||
matrixCache[14] = matrix._32;
|
||||
matrixCache[15] = matrix._33;
|
||||
SystemImpl.gl.uniformMatrix4fv((cast location : ConstantLocation).value, false, matrixCache);
|
||||
}
|
||||
|
||||
static var matrix3Cache = new js.lib.Float32Array(9);
|
||||
public inline function setMatrix3(location: kha.graphics4.ConstantLocation, matrix: FastMatrix3): Void {
|
||||
matrix3Cache[0] = matrix._00;
|
||||
matrix3Cache[1] = matrix._01;
|
||||
matrix3Cache[2] = matrix._02;
|
||||
matrix3Cache[3] = matrix._10;
|
||||
matrix3Cache[4] = matrix._11;
|
||||
matrix3Cache[5] = matrix._12;
|
||||
matrix3Cache[6] = matrix._20;
|
||||
matrix3Cache[7] = matrix._21;
|
||||
matrix3Cache[8] = matrix._22;
|
||||
SystemImpl.gl.uniformMatrix3fv((cast location : ConstantLocation).value, false, matrix3Cache);
|
||||
}
|
||||
|
||||
public function drawIndexedVertices(start: Int = 0, count: Int = -1): Void {
|
||||
var type = SystemImpl.elementIndexUint == null ? GL.UNSIGNED_SHORT : GL.UNSIGNED_INT;
|
||||
var size = type == GL.UNSIGNED_SHORT ? 2 : 4;
|
||||
SystemImpl.gl.drawElements(GL.TRIANGLES, count == -1 ? indicesCount : count, type, start * size);
|
||||
}
|
||||
|
||||
function convertStencilAction(action: StencilAction) {
|
||||
switch (action) {
|
||||
case StencilAction.Decrement:
|
||||
return GL.DECR;
|
||||
case StencilAction.DecrementWrap:
|
||||
return GL.DECR_WRAP;
|
||||
case StencilAction.Increment:
|
||||
return GL.INCR;
|
||||
case StencilAction.IncrementWrap:
|
||||
return GL.INCR_WRAP;
|
||||
case StencilAction.Invert:
|
||||
return GL.INVERT;
|
||||
case StencilAction.Keep:
|
||||
return GL.KEEP;
|
||||
case StencilAction.Replace:
|
||||
return GL.REPLACE;
|
||||
case StencilAction.Zero:
|
||||
return GL.ZERO;
|
||||
}
|
||||
}
|
||||
|
||||
function convertCompareMode(compareMode: CompareMode) {
|
||||
switch (compareMode) {
|
||||
case Always:
|
||||
return GL.ALWAYS;
|
||||
case Equal:
|
||||
return GL.EQUAL;
|
||||
case Greater:
|
||||
return GL.GREATER;
|
||||
case GreaterEqual:
|
||||
return GL.GEQUAL;
|
||||
case Less:
|
||||
return GL.LESS;
|
||||
case LessEqual:
|
||||
return GL.LEQUAL;
|
||||
case Never:
|
||||
return GL.NEVER;
|
||||
case NotEqual:
|
||||
return GL.NOTEQUAL;
|
||||
}
|
||||
}
|
||||
|
||||
public function setStencilParameters(front: Bool, compareMode: CompareMode, bothPass: StencilAction, depthFail: StencilAction, stencilFail: StencilAction,
|
||||
referenceValue: StencilValue, readMask: Int = 0xff, writeMask: Int = 0xff): Void {
|
||||
var stencilFunc = convertCompareMode(compareMode);
|
||||
SystemImpl.gl.stencilMaskSeparate(front ? GL.FRONT : GL.BACK, writeMask);
|
||||
SystemImpl.gl.stencilOpSeparate(front ? GL.FRONT : GL.BACK, convertStencilAction(stencilFail), convertStencilAction(depthFail),
|
||||
convertStencilAction(bothPass));
|
||||
switch (referenceValue) {
|
||||
case Static(value):
|
||||
SystemImpl.gl.stencilFuncSeparate(front ? GL.FRONT : GL.BACK, stencilFunc, value, readMask);
|
||||
case Dynamic:
|
||||
SystemImpl.gl.stencilFuncSeparate(front ? GL.FRONT : GL.BACK, stencilFunc, 0, readMask);
|
||||
}
|
||||
}
|
||||
|
||||
public function drawIndexedVerticesInstanced(instanceCount: Int, start: Int = 0, count: Int = -1) {
|
||||
if (instancedRenderingAvailable()) {
|
||||
var type = SystemImpl.elementIndexUint == null ? GL.UNSIGNED_SHORT : GL.UNSIGNED_INT;
|
||||
var typeSize = SystemImpl.elementIndexUint == null ? 2 : 4;
|
||||
if (SystemImpl.gl2) {
|
||||
untyped SystemImpl.gl.drawElementsInstanced(GL.TRIANGLES, count == -1 ? indicesCount : count, type, start * typeSize, instanceCount);
|
||||
}
|
||||
else {
|
||||
instancedExtension.drawElementsInstancedANGLE(GL.TRIANGLES, count == -1 ? indicesCount : count, type, start * typeSize, instanceCount);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public function instancedRenderingAvailable(): Bool {
|
||||
return instancedExtension;
|
||||
}
|
||||
}
|
25
Kha/Backends/HTML5/kha/js/graphics4/Graphics2.hx
Normal file
25
Kha/Backends/HTML5/kha/js/graphics4/Graphics2.hx
Normal file
@ -0,0 +1,25 @@
|
||||
package kha.js.graphics4;
|
||||
|
||||
import js.html.webgl.GL;
|
||||
import kha.Color;
|
||||
|
||||
class Graphics2 extends kha.graphics4.Graphics2 {
|
||||
public function new(canvas: Canvas) {
|
||||
super(canvas);
|
||||
}
|
||||
|
||||
override public function drawVideoInternal(video: kha.Video, x: Float, y: Float, width: Float, height: Float): Void {
|
||||
var v = cast(video, Video);
|
||||
drawScaledSubImage(v.texture, 0, 0, v.texture.width, v.texture.height, x, y, width, height);
|
||||
}
|
||||
|
||||
override public function begin(clear: Bool = true, clearColor: Color = null): Void {
|
||||
SystemImpl.gl.colorMask(true, true, true, true);
|
||||
|
||||
// Disable depth test so that everything is just overpainted as determined by the order of function calls2
|
||||
SystemImpl.gl.disable(GL.DEPTH_TEST);
|
||||
SystemImpl.gl.depthFunc(GL.ALWAYS);
|
||||
|
||||
super.begin(clear, clearColor);
|
||||
}
|
||||
}
|
9
Kha/Backends/HTML5/kha/js/graphics4/TextureUnit.hx
Normal file
9
Kha/Backends/HTML5/kha/js/graphics4/TextureUnit.hx
Normal file
@ -0,0 +1,9 @@
|
||||
package kha.js.graphics4;
|
||||
|
||||
class TextureUnit implements kha.graphics4.TextureUnit {
|
||||
public var value: Int;
|
||||
|
||||
public function new(value: Int) {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
249
Kha/Backends/HTML5/kha/js/vr/VrInterface.hx
Executable file
249
Kha/Backends/HTML5/kha/js/vr/VrInterface.hx
Executable file
@ -0,0 +1,249 @@
|
||||
package kha.js.vr;
|
||||
|
||||
import js.Syntax;
|
||||
import js.lib.Float32Array;
|
||||
import kha.vr.Pose;
|
||||
import kha.vr.PoseState;
|
||||
import kha.vr.SensorState;
|
||||
import kha.vr.TimeWarpParms;
|
||||
import kha.math.FastMatrix4;
|
||||
import kha.math.Vector3;
|
||||
import kha.math.Quaternion;
|
||||
import kha.SystemImpl;
|
||||
|
||||
class VrInterface extends kha.vr.VrInterface {
|
||||
var vrEnabled: Bool = false;
|
||||
|
||||
var vrDisplay: Dynamic;
|
||||
var frameData: Dynamic;
|
||||
|
||||
var leftProjectionMatrix: FastMatrix4 = FastMatrix4.identity();
|
||||
var rightProjectionMatrix: FastMatrix4 = FastMatrix4.identity();
|
||||
var leftViewMatrix: FastMatrix4 = FastMatrix4.identity();
|
||||
var rightViewMatrix: FastMatrix4 = FastMatrix4.identity();
|
||||
|
||||
var width: Int = 0;
|
||||
var height: Int = 0;
|
||||
var vrWidth: Int = 0;
|
||||
var vrHeight: Int = 0;
|
||||
|
||||
public function new() {
|
||||
super();
|
||||
#if kha_webvr
|
||||
var displayEnabled: Bool = Syntax.code("navigator.getVRDisplays");
|
||||
#else
|
||||
var displayEnabled = false;
|
||||
#end
|
||||
if (displayEnabled) {
|
||||
vrEnabled = true;
|
||||
getVRDisplays();
|
||||
trace("Display enabled.");
|
||||
}
|
||||
}
|
||||
|
||||
function getVRDisplays() {
|
||||
var vrDisplayInstance = Syntax.code("navigator.getVRDisplays()");
|
||||
vrDisplayInstance.then(function(displays) {
|
||||
if (displays.length > 0) {
|
||||
frameData = Syntax.code("new VRFrameData()");
|
||||
vrDisplay = Syntax.code("displays[0]");
|
||||
vrDisplay.depthNear = 0.1;
|
||||
vrDisplay.depthFar = 1024.0;
|
||||
|
||||
var leftEye = vrDisplay.getEyeParameters("left");
|
||||
var rightEye = vrDisplay.getEyeParameters("right");
|
||||
width = SystemImpl.khanvas.width;
|
||||
height = SystemImpl.khanvas.height;
|
||||
vrWidth = Std.int(Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2);
|
||||
vrHeight = Std.int(Math.max(leftEye.renderHeight, rightEye.renderHeight));
|
||||
}
|
||||
else {
|
||||
trace("There are no VR displays connected.");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public override function onVRRequestPresent() {
|
||||
try {
|
||||
vrDisplay.requestPresent([{source: SystemImpl.khanvas}]).then(function() {
|
||||
onResize();
|
||||
vrDisplay.requestAnimationFrame(onAnimationFrame);
|
||||
});
|
||||
}
|
||||
catch (err:Dynamic) {
|
||||
trace("Failed to requestPresent.");
|
||||
trace(err);
|
||||
}
|
||||
}
|
||||
|
||||
public override function onVRExitPresent() {
|
||||
try {
|
||||
vrDisplay.exitPresent([{source: SystemImpl.khanvas}]).then(function() {
|
||||
onResize();
|
||||
});
|
||||
}
|
||||
catch (err:Dynamic) {
|
||||
trace("Failed to exitPresent.");
|
||||
trace(err);
|
||||
}
|
||||
}
|
||||
|
||||
public override function onResetPose() {
|
||||
try {
|
||||
vrDisplay.resetPose();
|
||||
}
|
||||
catch (err:Dynamic) {
|
||||
trace("Failed to resetPose");
|
||||
trace(err);
|
||||
}
|
||||
}
|
||||
|
||||
function onAnimationFrame(timestamp: Float): Void {
|
||||
if (vrDisplay != null && vrDisplay.isPresenting) {
|
||||
vrDisplay.requestAnimationFrame(onAnimationFrame);
|
||||
|
||||
vrDisplay.getFrameData(frameData);
|
||||
|
||||
leftProjectionMatrix = createMatrixFromArray(untyped frameData.leftProjectionMatrix);
|
||||
leftViewMatrix = createMatrixFromArray(untyped frameData.leftViewMatrix);
|
||||
|
||||
rightProjectionMatrix = createMatrixFromArray(untyped frameData.rightProjectionMatrix);
|
||||
rightViewMatrix = createMatrixFromArray(untyped frameData.rightViewMatrix);
|
||||
|
||||
// Submit the newly rendered layer to be presented by the VRDisplay
|
||||
vrDisplay.submitFrame();
|
||||
}
|
||||
}
|
||||
|
||||
function onResize() {
|
||||
if (vrDisplay != null && vrDisplay.isPresenting) {
|
||||
SystemImpl.khanvas.width = vrWidth;
|
||||
SystemImpl.khanvas.height = vrHeight;
|
||||
}
|
||||
else {
|
||||
SystemImpl.khanvas.width = width;
|
||||
SystemImpl.khanvas.height = height;
|
||||
}
|
||||
}
|
||||
|
||||
public override function GetSensorState(): SensorState {
|
||||
return GetPredictedSensorState(0.0);
|
||||
}
|
||||
|
||||
public override function GetPredictedSensorState(time: Float): SensorState {
|
||||
var result: SensorState = new SensorState();
|
||||
|
||||
result.Predicted = new PoseState();
|
||||
result.Recorded = result.Predicted;
|
||||
|
||||
result.Predicted.AngularAcceleration = new Vector3();
|
||||
result.Predicted.AngularVelocity = new Vector3();
|
||||
result.Predicted.LinearAcceleration = new Vector3();
|
||||
result.Predicted.LinearVelocity = new Vector3();
|
||||
result.Predicted.TimeInSeconds = time;
|
||||
result.Predicted.Pose = new Pose();
|
||||
result.Predicted.Pose.Orientation = new Quaternion();
|
||||
result.Predicted.Pose.Position = new Vector3();
|
||||
|
||||
var mPose = frameData.pose; // predicted pose of the vrDisplay
|
||||
if (mPose != null) {
|
||||
result.Predicted.AngularVelocity = createVectorFromArray(untyped mPose.angularVelocity);
|
||||
result.Predicted.AngularAcceleration = createVectorFromArray(untyped mPose.angularAcceleration);
|
||||
result.Predicted.LinearVelocity = createVectorFromArray(untyped mPose.linearVelocity);
|
||||
result.Predicted.LinearAcceleration = createVectorFromArray(untyped mPose.linearAcceleration);
|
||||
result.Predicted.Pose.Orientation = createQuaternion(untyped mPose.orientation);
|
||||
result.Predicted.Pose.Position = createVectorFromArray(untyped mPose.position);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Sends a black image to the warp swap thread
|
||||
public override function WarpSwapBlack(): Void {
|
||||
// TODO: Implement
|
||||
}
|
||||
|
||||
// Sends the Oculus loading symbol to the warp swap thread
|
||||
public override function WarpSwapLoadingIcon(): Void {
|
||||
// TODO: Implement
|
||||
}
|
||||
|
||||
// Sends the set of images to the warp swap thread
|
||||
public override function WarpSwap(parms: TimeWarpParms): Void {
|
||||
// TODO: Implement
|
||||
}
|
||||
|
||||
public override function IsPresenting(): Bool {
|
||||
if (vrDisplay != null)
|
||||
return vrDisplay.isPresenting;
|
||||
return false;
|
||||
}
|
||||
|
||||
public override function IsVrEnabled(): Bool {
|
||||
return vrEnabled;
|
||||
}
|
||||
|
||||
public override function GetTimeInSeconds(): Float {
|
||||
return Scheduler.time();
|
||||
}
|
||||
|
||||
public override function GetProjectionMatrix(eye: Int): FastMatrix4 {
|
||||
if (eye == 0) {
|
||||
return leftProjectionMatrix;
|
||||
}
|
||||
else {
|
||||
return rightProjectionMatrix;
|
||||
}
|
||||
}
|
||||
|
||||
public override function GetViewMatrix(eye: Int): FastMatrix4 {
|
||||
if (eye == 0) {
|
||||
return leftViewMatrix;
|
||||
}
|
||||
else {
|
||||
return rightViewMatrix;
|
||||
}
|
||||
}
|
||||
|
||||
function createMatrixFromArray(array: Float32Array): FastMatrix4 {
|
||||
var matrix: FastMatrix4 = FastMatrix4.identity();
|
||||
matrix._00 = array[0];
|
||||
matrix._01 = array[1];
|
||||
matrix._02 = array[2];
|
||||
matrix._03 = array[3];
|
||||
matrix._10 = array[4];
|
||||
matrix._11 = array[5];
|
||||
matrix._12 = array[6];
|
||||
matrix._13 = array[7];
|
||||
matrix._20 = array[8];
|
||||
matrix._21 = array[9];
|
||||
matrix._22 = array[10];
|
||||
matrix._23 = array[11];
|
||||
matrix._30 = array[12];
|
||||
matrix._31 = array[13];
|
||||
matrix._32 = array[14];
|
||||
matrix._33 = array[15];
|
||||
return matrix;
|
||||
}
|
||||
|
||||
function createVectorFromArray(array: Float32Array): Vector3 {
|
||||
var vector: Vector3 = new Vector3(0, 0, 0);
|
||||
if (array != null) {
|
||||
vector.x = array[0];
|
||||
vector.y = array[1];
|
||||
vector.z = array[2];
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
|
||||
function createQuaternion(array: Float32Array): Quaternion {
|
||||
var quaternion: Quaternion = new Quaternion(0, 0, 0, 0);
|
||||
if (array != null) {
|
||||
quaternion.x = array[0];
|
||||
quaternion.y = array[1];
|
||||
quaternion.z = array[2];
|
||||
quaternion.w = array[3];
|
||||
}
|
||||
return quaternion;
|
||||
}
|
||||
}
|
68
Kha/Backends/HTML5/kha/netsync/Network.hx
Normal file
68
Kha/Backends/HTML5/kha/netsync/Network.hx
Normal file
@ -0,0 +1,68 @@
|
||||
package kha.netsync;
|
||||
|
||||
import haxe.io.Bytes;
|
||||
import js.html.BinaryType;
|
||||
import js.html.WebSocket;
|
||||
|
||||
class Network {
|
||||
var socket: WebSocket;
|
||||
var open: Bool = false;
|
||||
|
||||
public function new(url: String, port: Int, errorCallback: Void->Void, closeCallback: Void->Void) {
|
||||
socket = new WebSocket("ws://" + url + ":" + port);
|
||||
socket.onerror = function(error) {
|
||||
trace("Network error.");
|
||||
errorCallback();
|
||||
}
|
||||
socket.binaryType = BinaryType.ARRAYBUFFER;
|
||||
socket.onopen = function() {
|
||||
open = true;
|
||||
};
|
||||
socket.onclose = function(event) {
|
||||
trace("Network connection closed. " + webSocketCloseReason(event.code) + " (" + event.reason + ").");
|
||||
closeCallback();
|
||||
}
|
||||
}
|
||||
|
||||
static function webSocketCloseReason(code: Int): String {
|
||||
switch (code) {
|
||||
case 1000:
|
||||
return "Normal Closure";
|
||||
case 1001:
|
||||
return "Going Away";
|
||||
case 1002:
|
||||
return "Protocol error";
|
||||
case 1003:
|
||||
return "Unsupported Data";
|
||||
case 1005:
|
||||
return "No Status Rcvd";
|
||||
case 1006:
|
||||
return "Abnormal Closure";
|
||||
case 1007:
|
||||
return "Invalid frame";
|
||||
case 1008:
|
||||
return "Policy Violation";
|
||||
case 1009:
|
||||
return "Message Too Big";
|
||||
case 1010:
|
||||
return "Mandatory Ext.";
|
||||
case 1011:
|
||||
return "Internal Server Error";
|
||||
case 1015:
|
||||
return "TLS handshake";
|
||||
default:
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
public function send(bytes: Bytes, mandatory: Bool): Void {
|
||||
if (open)
|
||||
socket.send(bytes.getData());
|
||||
}
|
||||
|
||||
public function listen(listener: Bytes->Void): Void {
|
||||
socket.onmessage = function(message) {
|
||||
listener(Bytes.ofData(message.data));
|
||||
};
|
||||
}
|
||||
}
|
40
Kha/Backends/HTML5/kha/network/Http.hx
Normal file
40
Kha/Backends/HTML5/kha/network/Http.hx
Normal file
@ -0,0 +1,40 @@
|
||||
package kha.network;
|
||||
|
||||
import js.html.XMLHttpRequest;
|
||||
|
||||
class Http {
|
||||
static function methodToString(method: HttpMethod): String {
|
||||
switch (method) {
|
||||
case Get:
|
||||
return "GET";
|
||||
case Post:
|
||||
return "POST";
|
||||
case Put:
|
||||
return "PUT";
|
||||
case Delete:
|
||||
return "DELETE";
|
||||
}
|
||||
}
|
||||
|
||||
public static function request(url: String, path: String, data: String, port: Int, secure: Bool, method: HttpMethod, headers: Map<String, String>,
|
||||
callback: Int->Int->String->Void /*error, response, body*/): Void {
|
||||
var req = new XMLHttpRequest("");
|
||||
var completeUrl = (secure ? "https://" : "http://") + url + ":" + port + "/" + path;
|
||||
req.open(methodToString(method), completeUrl, true);
|
||||
if (headers != null) {
|
||||
for (key in headers.keys()) {
|
||||
req.setRequestHeader(key, headers[key]);
|
||||
}
|
||||
}
|
||||
req.onreadystatechange = function() {
|
||||
if (req.readyState != 4)
|
||||
return;
|
||||
if (req.status != 200) {
|
||||
callback(1, req.status, null);
|
||||
return;
|
||||
}
|
||||
callback(0, req.status, req.responseText);
|
||||
}
|
||||
req.send(data);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user