Update Files

This commit is contained in:
2025-01-22 16:18:30 +01:00
parent ed4603cf95
commit a36294b518
16718 changed files with 2960346 additions and 0 deletions

View File

@ -0,0 +1,18 @@
#pragma once
#include <android_native_app_glue.h>
#ifdef __cplusplus
extern "C" {
#endif
// name in usual Java syntax (points, no slashes)
jclass kinc_android_find_class(JNIEnv *env, const char *name);
ANativeActivity *kinc_android_get_activity(void);
AAssetManager *kinc_android_get_asset_manager(void);
#ifdef __cplusplus
}
#endif

View File

@ -0,0 +1,385 @@
#if 0
#include "VrInterface.h"
#ifdef VR_GEAR_VR
#include <kha/Image.h>
#include <kha/math/Matrix4.h>
#include <kha/math/Quaternion.h>
#include <kha/math/Vector3.h>
#include <kha/vr/Pose.h>
#include <kha/vr/PoseState.h>
#include <kha/vr/TimeWarpImage.h>
#include <GlTexture.h>
#include <VrApi/VrApi.h>
#include <VrApi/VrApi_Helpers.h>
#include <LibOvr/Src/Kernel/OVR_Math.h>
#include <Kore/log.h>
#endif
namespace Kore {
//
namespace VrInterface {
// // Is Set during Initialize
#ifdef VR_GEAR_VR
static ovrMobile* ovr;
#endif
static JavaVM* cachedJVM;
static jobject instance;
static jclass koreActivity;
static float qx;
static float qy;
static float qz;
static float qw;
//
void SetJVM(JavaVM* jvm) {
cachedJVM = jvm;
// Grab the activity object
JNIEnv* env;
cachedJVM->AttachCurrentThread(&env, 0);
koreActivity = env->FindClass("tech/kode/kore/KoreActivity");
koreActivity = (jclass) env->NewGlobalRef(koreActivity);
jmethodID mid = env->GetStaticMethodID(koreActivity, "getInstance", "()Ltech/kode/kore/KoreActivity;");
instance = env->CallStaticObjectMethod(koreActivity, mid);
// Make sure that the garbage collector does not clean this up for us
instance = env->NewGlobalRef(instance);
}
#ifdef VR_CARDBOARD
void DistortionBefore() {
JNIEnv* env;
cachedJVM->AttachCurrentThread(&env, 0);
jmethodID mid = env->GetMethodID(koreActivity, "DistortionBeforeFrame", "()V");
env->CallObjectMethod(instance, mid);
}
void DistortionAfter() {
JNIEnv* env;
cachedJVM->AttachCurrentThread(&env, 0);
jmethodID mid = env->GetMethodID(koreActivity, "DistortionAfterFrame", "()V");
env->CallObjectMethod(instance, mid);
}
void DistortTexture(kha::Image_obj* image) {
JNIEnv* env;
cachedJVM->AttachCurrentThread(&env, 0);
jmethodID mid = env->GetMethodID(koreActivity, "DistortTexture", "(I)V");
env->CallVoidMethod(instance, mid, image->renderTarget->_texture);
}
void updateGaze(float x, float y, float z, float w) {
qx = x;
qy = y;
qz = z;
qw = w;
}
template<typename T> T* CreateEmpty() {
return dynamic_cast<T*>(T::__CreateEmpty().mPtr);
}
kha::math::Quaternion_obj* getGaze() {
kha::math::Quaternion_obj* result = CreateEmpty<kha::math::Quaternion_obj>();
result->__construct(qx, qy, qz, qw);
return result;
}
#endif
#ifdef VR_GEAR_VR
void Initialize() {
ovrModeParms parms;
parms.AsynchronousTimeWarp = true;
parms.AllowPowerSave = true;
parms.DistortionFileName = 0;
parms.EnableImageServer = false;
parms.SkipWindowFullscreenReset = true;
// Grab the activity object
JNIEnv* env;
cachedJVM->AttachCurrentThread(&env, 0);
jclass koreActivity = env->FindClass("tech/kode/kore/KoreActivity");
jmethodID mid = env->GetStaticMethodID(koreActivity, "getInstance", "()Ltech/kode/kore/KoreActivity;");
jobject instance = env->CallStaticObjectMethod(koreActivity, mid);
// Make sure that the garbage collector does not clean this up for us
instance = env->NewGlobalRef(instance);
parms.ActivityObject = instance;
parms.GameThreadTid = 0;
parms.CpuLevel = 2;
parms.GpuLevel = 2;
ovrHmdInfo returnedHmdInfo;
ovr = ovr_EnterVrMode(parms, &returnedHmdInfo );
}
void WarpSwapBlack() {
// TODO: Not in the API anymore :-(
//ovr_WarpSwapBlack(ovr);
}
void WarpSwapLoadingIcon() {
//ovr_WarpSwapLoadingIcon(ovr);
}
template<typename T> T* CreateEmpty() {
return dynamic_cast<T*>(T::__CreateEmpty().mPtr);
}
kha::math::Quaternion_obj* GetQuaternion(const ovrQuatf& q) {
kha::math::Quaternion_obj* quaternion = CreateEmpty<kha::math::Quaternion_obj>();
quaternion->__construct(0.0f, 0.0f, 0.0f, 0.0f);
quaternion->set_x(q.x);
quaternion->set_y(q.y);
quaternion->set_z(q.z);
quaternion->set_w(q.w);
return quaternion;
}
ovrQuatf GetQuaternion(kha::math::Quaternion_obj* quat) {
ovrQuatf result;
result.x = quat->get_x();
result.y = quat->get_y();
result.z = quat->get_z();
result.w = quat->get_w();
return result;
}
ovrMatrix4f GetMatrix(kha::math::Matrix4_obj* mat) {
ovrMatrix4f result;
for (int x = 0; x < 4; x++) {
for (int y = 0; y < 4; y++) {
float f = mat->get(x, y);
result.M[x][y] = f;
}
}
return result;
}
kha::math::Vector3_obj* GetVector3(const ovrVector3f& v) {
kha::math::Vector3_obj* vector = CreateEmpty<kha::math::Vector3_obj>();
vector->x = v.x;
vector->y = v.y;
vector->z = v.z;
return vector;
}
ovrVector3f GetVector3(kha::math::Vector3_obj* v) {
ovrVector3f result;
result.x = v->x;
result.y = v->y;
result.z = v->z;
return result;
}
kha::vr::Pose_obj* GetPose(const ovrPosef& nativePose) {
kha::vr::Pose_obj* pose = CreateEmpty<kha::vr::Pose_obj>();
pose->Position = GetVector3(nativePose.Position);
pose->Orientation = GetQuaternion(nativePose.Orientation);
return pose;
}
kha::vr::PoseState_obj* GetPoseState(const ovrPoseStatef& nativeState) {
kha::vr::PoseState_obj* poseState = CreateEmpty<kha::vr::PoseState_obj>();
poseState->TimeInSeconds = nativeState.TimeInSeconds;
poseState->AngularAcceleration = GetVector3(nativeState.AngularAcceleration);
poseState->AngularVelocity = GetVector3(nativeState.AngularVelocity);
poseState->LinearAcceleration = GetVector3(nativeState.LinearAcceleration);
poseState->LinearVelocity = GetVector3(nativeState.LinearVelocity);
poseState->Pose = GetPose(nativeState.Pose);
return poseState;
}
kha::vr::SensorState_obj* GetPredictedSensorState(const float time) {
kha::vr::SensorState_obj* state = dynamic_cast<kha::vr::SensorState_obj*>(kha::vr::SensorState_obj::__CreateEmpty().mPtr);
ovrSensorState nativeState = ovr_GetPredictedSensorState(ovr, time);
state->Temperature = nativeState.Temperature;
state->Status = nativeState.Status;
state->Predicted = GetPoseState(nativeState.Predicted);
state->Recorded = GetPoseState(nativeState.Recorded);
return state;
}
kha::vr::SensorState_obj* GetSensorState() {
// 0.0 gets the last reading
return GetPredictedSensorState(0.0f);
}
ovrPosef GetPose(kha::vr::Pose_obj* pose) {
ovrPosef result;
result.Orientation = GetQuaternion(pose->Orientation.mPtr);
result.Position = GetVector3(pose->Position.mPtr);
return result;
}
ovrPoseStatef GetPoseState(kha::vr::PoseState_obj* poseState) {
ovrPoseStatef result;
result.TimeInSeconds = poseState->TimeInSeconds;
result.AngularAcceleration = GetVector3(poseState->AngularAcceleration.mPtr);
result.AngularVelocity = GetVector3(poseState->AngularVelocity.mPtr);
result.LinearAcceleration = GetVector3(poseState->LinearAcceleration.mPtr);
result.LinearVelocity = GetVector3(poseState->LinearVelocity.mPtr);
result.Pose = GetPose(poseState->Pose.mPtr);
return result;
}
ovrTimeWarpImage GetTimeWarpImage(kha::vr::TimeWarpImage_obj* image) {
ovrTimeWarpImage result;
if (image == 0) {
result.TexId = 0;
return result;
}
if (image->Image->renderTarget != 0) {
result.TexId = image->Image->renderTarget->_texture;
} else {
result.TexId = image->Image->texture->texture;
}
result.Pose = GetPoseState(image->Pose.mPtr);
result.TexCoordsFromTanAngles = GetMatrix(image->TexCoordsFromTanAngles.mPtr);
result.TexCoordsFromTanAngles = //TanAngleMatrixFromProjection(&result.TexCoordsFromTanAngles);
TanAngleMatrixFromFov(90.0f);
return result;
}
bool AreDifferent(ovrMatrix4f& lhs, ovrMatrix4f& rhs) {
for (int x = 0; x < 4; x++) {
for (int y = 0; y < 4; y++) {
if (Kore::abs(lhs.M[x][y] - rhs.M[x][y]) > 0.1f) return true;
}
}
return false;
}
void WarpSwap(kha::vr::TimeWarpParms_obj* parms) {
ovrTimeWarpParms nativeParms = InitTimeWarpParms();
const double predictedTime = ovr_GetPredictedDisplayTime( ovr, 1, 1 );
const ovrSensorState state = ovr_GetPredictedSensorState( ovr, predictedTime );
ovrTimeWarpImage leftImage = GetTimeWarpImage(parms->LeftImage.mPtr);
ovrTimeWarpImage rightImage = GetTimeWarpImage(parms->RightImage.mPtr);
ovrTimeWarpImage leftOverlay = GetTimeWarpImage(parms->LeftOverlay.mPtr);
ovrTimeWarpImage rightOverlay = GetTimeWarpImage(parms->RightOverlay.mPtr);
leftImage.Pose = state.Predicted;
leftOverlay.TexId = 0;
rightOverlay.TexId = 0;
//nativeParms->WarpProgram = WP_SIMPLE;
nativeParms.Images[0][0] = leftImage;
nativeParms.Images[0][1] = leftOverlay;
nativeParms.Images[1][0] = rightImage;
nativeParms.Images[1][1] = rightOverlay;
// nativeParms->WarpProgram = WP_OVERLAY_PLANE;
/*ovrMatrix4f comparison = OVR::Matrix4f::Translation(1.0f, 2.0f, 3.0f);
if (AreDifferent(comparison, nativeParms->Images[0][0].TexCoordsFromTanAngles)) {
Kore::log(Kore::Info, "Matrices are different!");
} else {
Kore::log(Kore::Info, "Matrices are identical");
} */
//ovrTimeWarpParms testParms = InitTimeWarpParms( WARP_INIT_LOADING_ICON);
ovr_WarpSwap(ovr, &nativeParms);
// TODO: What about memory - who deletes What?
}
double GetTimeInSeconds() {
return ovr_GetTimeInSeconds();
}
#endif
}
//
}
#endif

View File

@ -0,0 +1,55 @@
#pragma once
#ifdef ANDROID
#include <jni.h>
#endif
#include <kha/vr/SensorState.h>
#include <kha/vr/TimeWarpParms.h>
#include <kha/Image.h>
#include <kha/math/Quaternion.h>
namespace Kore {
namespace VrInterface {
#ifdef ANDROID
// Save the JVM. Must be called before Initialize().
// TODO: Can this be handled better?
void SetJVM(JavaVM *jvm);
#endif
#ifdef VR_CARDBOARD
void DistortionBefore();
void DistortionAfter();
void DistortTexture(kha::Image_obj *image);
void updateGaze(float x, float y, float z, float w);
kha::math::Quaternion_obj *getGaze();
#endif
#ifdef VR_GEAR_VR
// Calls ovr_enterVrMode
void Initialize();
void WarpSwapBlack();
void WarpSwapLoadingIcon();
kha::vr::SensorState_obj *GetSensorState();
kha::vr::SensorState_obj *GetPredictedSensorState(float time);
double GetTimeInSeconds();
void WarpSwap(kha::vr::TimeWarpParms_obj *parms);
#endif
}
}

View File

@ -0,0 +1,5 @@
#include "audio.c.h"
#include "display.c.h"
#include "system.c.h"
#include "window.c.h"
#include "video.c.h"

View File

@ -0,0 +1,133 @@
#include <kinc/audio2/audio.h>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <stdlib.h>
#include <string.h>
static kinc_a2_buffer_t a2_buffer;
static SLObjectItf engineObject;
static SLEngineItf engineEngine;
static SLObjectItf outputMixObject;
static SLObjectItf bqPlayerObject;
static SLPlayItf bqPlayerPlay = NULL;
static SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
#define AUDIO_BUFFER_SIZE 1 * 1024
static int16_t tempBuffer[AUDIO_BUFFER_SIZE];
static void copySample(void *buffer) {
float left_value = *(float *)&a2_buffer.channels[0][a2_buffer.read_location];
float right_value = *(float *)&a2_buffer.channels[1][a2_buffer.read_location];
a2_buffer.read_location += 1;
if (a2_buffer.read_location >= a2_buffer.data_size) {
a2_buffer.read_location = 0;
}
((int16_t *)buffer)[0] = (int16_t)(left_value * 32767);
((int16_t *)buffer)[1] = (int16_t)(right_value * 32767);
}
static void bqPlayerCallback(SLAndroidSimpleBufferQueueItf caller, void *context) {
if (kinc_a2_internal_callback(&a2_buffer, AUDIO_BUFFER_SIZE / 2)) {
for (int i = 0; i < AUDIO_BUFFER_SIZE; i += 2) {
copySample(&tempBuffer[i]);
}
}
else {
memset(tempBuffer, 0, sizeof(tempBuffer));
}
SLresult result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, tempBuffer, AUDIO_BUFFER_SIZE * 2);
}
static bool initialized = false;
void kinc_a2_init() {
if (initialized) {
return;
}
kinc_a2_internal_init();
initialized = true;
a2_buffer.read_location = 0;
a2_buffer.write_location = 0;
a2_buffer.data_size = 128 * 1024;
a2_buffer.channel_count = 2;
a2_buffer.channels[0] = (float*)malloc(a2_buffer.data_size * sizeof(float));
a2_buffer.channels[1] = (float*)malloc(a2_buffer.data_size * sizeof(float));
SLresult result;
result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
const SLInterfaceID ids[] = {SL_IID_VOLUME};
const SLboolean req[] = {SL_BOOLEAN_FALSE};
result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
SLDataFormat_PCM format_pcm = {SL_DATAFORMAT_PCM, 2,
SL_SAMPLINGRATE_44_1, SL_PCMSAMPLEFORMAT_FIXED_16,
SL_PCMSAMPLEFORMAT_FIXED_16, SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,
SL_BYTEORDER_LITTLEENDIAN};
SLDataSource audioSrc = {&loc_bufq, &format_pcm};
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&loc_outmix, NULL};
const SLInterfaceID ids1[] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE};
const SLboolean req1[] = {SL_BOOLEAN_TRUE};
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &(bqPlayerObject), &audioSrc, &audioSnk, 1, ids1, req1);
result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &(bqPlayerPlay));
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &(bqPlayerBufferQueue));
result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
memset(tempBuffer, 0, sizeof(tempBuffer));
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, tempBuffer, AUDIO_BUFFER_SIZE * 2);
}
void pauseAudio() {
if (bqPlayerPlay == NULL) {
return;
}
SLresult result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PAUSED);
}
void resumeAudio() {
if (bqPlayerPlay == NULL) {
return;
}
SLresult result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
}
void kinc_a2_update() {}
void kinc_a2_shutdown() {
if (bqPlayerObject != NULL) {
(*bqPlayerObject)->Destroy(bqPlayerObject);
bqPlayerObject = NULL;
bqPlayerPlay = NULL;
bqPlayerBufferQueue = NULL;
}
if (outputMixObject != NULL) {
(*outputMixObject)->Destroy(outputMixObject);
outputMixObject = NULL;
}
if (engineObject != NULL) {
(*engineObject)->Destroy(engineObject);
engineObject = NULL;
engineEngine = NULL;
}
}
uint32_t kinc_a2_samples_per_second(void) {
return 44100;
}

View File

@ -0,0 +1,106 @@
#include <kinc/backend/Android.h>
#include <kinc/display.h>
#include <kinc/log.h>
typedef struct {
bool available;
int x;
int y;
int width;
int height;
bool primary;
int number;
} kinc_display_t;
static kinc_display_t display;
int kinc_count_displays(void) {
return 1;
}
int kinc_primary_display(void) {
return 0;
}
static int width() {
JNIEnv *env;
JavaVM *vm = kinc_android_get_activity()->vm;
(*vm)->AttachCurrentThread(vm, &env, NULL);
jclass koreActivityClass = kinc_android_find_class(env, "tech.kinc.KincActivity");
jmethodID koreActivityGetScreenDpi = (*env)->GetStaticMethodID(env, koreActivityClass, "getDisplayWidth", "()I");
int width = (*env)->CallStaticIntMethod(env, koreActivityClass, koreActivityGetScreenDpi);
(*vm)->DetachCurrentThread(vm);
return width;
}
static int height() {
JNIEnv *env;
JavaVM *vm = kinc_android_get_activity()->vm;
(*vm)->AttachCurrentThread(vm, &env, NULL);
jclass koreActivityClass = kinc_android_find_class(env, "tech.kinc.KincActivity");
jmethodID koreActivityGetScreenDpi = (*env)->GetStaticMethodID(env, koreActivityClass, "getDisplayHeight", "()I");
int height = (*env)->CallStaticIntMethod(env, koreActivityClass, koreActivityGetScreenDpi);
(*vm)->DetachCurrentThread(vm);
return height;
}
static int pixelsPerInch() {
JNIEnv *env;
JavaVM *vm = kinc_android_get_activity()->vm;
(*vm)->AttachCurrentThread(vm, &env, NULL);
jclass koreActivityClass = kinc_android_find_class(env, "tech.kinc.KincActivity");
jmethodID koreActivityGetScreenDpi = (*env)->GetStaticMethodID(env, koreActivityClass, "getScreenDpi", "()I");
int dpi = (*env)->CallStaticIntMethod(env, koreActivityClass, koreActivityGetScreenDpi);
(*vm)->DetachCurrentThread(vm);
return dpi;
}
static int refreshRate() {
JNIEnv *env;
JavaVM *vm = kinc_android_get_activity()->vm;
(*vm)->AttachCurrentThread(vm, &env, NULL);
jclass koreActivityClass = kinc_android_find_class(env, "tech.kinc.KincActivity");
jmethodID koreActivityGetScreenDpi = (*env)->GetStaticMethodID(env, koreActivityClass, "getRefreshRate", "()I");
int dpi = (*env)->CallStaticIntMethod(env, koreActivityClass, koreActivityGetScreenDpi);
(*vm)->DetachCurrentThread(vm);
return dpi;
}
void kinc_display_init() {}
kinc_display_mode_t kinc_display_available_mode(int display_index, int mode_index) {
kinc_display_mode_t mode;
mode.x = 0;
mode.y = 0;
mode.width = width();
mode.height = height();
mode.frequency = refreshRate();
mode.bits_per_pixel = 32;
mode.pixels_per_inch = pixelsPerInch();
return mode;
}
int kinc_display_count_available_modes(int display_index) {
return 1;
}
kinc_display_mode_t kinc_display_current_mode(int display) {
kinc_display_mode_t mode;
mode.x = 0;
mode.y = 0;
mode.width = width();
mode.height = height();
mode.frequency = refreshRate();
mode.bits_per_pixel = 32;
mode.pixels_per_inch = pixelsPerInch();
return mode;
}
const char *kinc_display_name(int display) {
return "Display";
}
bool kinc_display_available(int display) {
return display == 0;
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,581 @@
#include <kinc/video.h>
#include <kinc/audio1/audio.h>
#include <kinc/graphics4/texture.h>
#include <kinc/io/filereader.h>
#include <kinc/log.h>
#include <kinc/system.h>
#include <android_native_app_glue.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
#include <OMXAL/OpenMAXAL.h>
#include <OMXAL/OpenMAXAL_Android.h>
#endif
#include <assert.h>
#include <jni.h>
#include <kinc/backend/Android.h>
#include <pthread.h>
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
#include <android/asset_manager.h>
#include <android/asset_manager_jni.h>
#include <android/native_window_jni.h>
#endif
void kinc_video_sound_stream_impl_init(kinc_internal_video_sound_stream_t *stream, int channel_count, int frequency) {
stream->bufferSize = 1;
stream->bufferReadPosition = 0;
stream->bufferWritePosition = 0;
stream->read = 0;
stream->written = 0;
}
void kinc_video_sound_stream_impl_destroy(kinc_internal_video_sound_stream_t *stream) {}
void kinc_video_sound_stream_impl_insert_data(kinc_internal_video_sound_stream_t *stream, float *data, int sample_count) {}
static float samples[2] = {0};
float *kinc_internal_video_sound_stream_next_frame(kinc_internal_video_sound_stream_t *stream) {
return samples;
}
bool kinc_internal_video_sound_stream_ended(kinc_internal_video_sound_stream_t *stream) {
return false;
}
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
#define videosCount 10
static kinc_video_t *videos[videosCount] = {NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL};
#define NB_MAXAL_INTERFACES 3 // XAAndroidBufferQueueItf, XAStreamInformationItf and XAPlayItf
#define NB_BUFFERS 8
#define MPEG2_TS_PACKET_SIZE 188
#define PACKETS_PER_BUFFER 10
#define BUFFER_SIZE (PACKETS_PER_BUFFER * MPEG2_TS_PACKET_SIZE)
static const int kEosBufferCntxt = 1980; // a magic value we can compare against
typedef struct kinc_android_video {
XAObjectItf engineObject;
XAEngineItf engineEngine;
XAObjectItf outputMixObject;
const char *path;
AAsset *file;
XAObjectItf playerObj;
XAPlayItf playerPlayItf;
XAAndroidBufferQueueItf playerBQItf;
XAStreamInformationItf playerStreamInfoItf;
XAVolumeItf playerVolItf;
char dataCache[BUFFER_SIZE * NB_BUFFERS];
ANativeWindow *theNativeWindow;
jboolean reachedEof;
pthread_mutex_t mutex;
pthread_cond_t cond;
bool discontinuity;
} kinc_android_video_t;
void kinc_android_video_init(kinc_android_video_t *video) {
video->engineObject = NULL;
video->engineEngine = NULL;
video->outputMixObject = NULL;
video->file = NULL;
video->playerObj = NULL;
video->playerPlayItf = NULL;
video->playerBQItf = NULL;
video->playerStreamInfoItf = NULL;
video->playerVolItf = NULL;
video->theNativeWindow = NULL;
video->reachedEof = JNI_FALSE;
memset(&video->mutex, 0, sizeof(video->mutex)); // mutex = PTHREAD_MUTEX_INITIALIZER; // simple assign stopped working in Android Studio 2.2
memset(&video->cond, 0, sizeof(video->cond)); // cond = PTHREAD_COND_INITIALIZER; // simple assign stopped working in Android Studio 2.2
video->discontinuity = false;
}
bool kinc_android_video_enqueue_initial_buffers(kinc_android_video_t *video, bool discontinuity) {
// Fill our cache.
// We want to read whole packets (integral multiples of MPEG2_TS_PACKET_SIZE).
// fread returns units of "elements" not bytes, so we ask for 1-byte elements
// and then check that the number of elements is a multiple of the packet size.
//
size_t bytesRead;
// bytesRead = fread(dataCache, 1, BUFFER_SIZE * NB_BUFFERS, file);
bytesRead = AAsset_read(video->file, video->dataCache, BUFFER_SIZE * NB_BUFFERS);
if (bytesRead <= 0) {
// could be premature EOF or I/O error
return false;
}
if ((bytesRead % MPEG2_TS_PACKET_SIZE) != 0) {
kinc_log(KINC_LOG_LEVEL_INFO, "Dropping last packet because it is not whole");
}
size_t packetsRead = bytesRead / MPEG2_TS_PACKET_SIZE;
kinc_log(KINC_LOG_LEVEL_INFO, "Initially queueing %zu packets", packetsRead);
// Enqueue the content of our cache before starting to play,
// we don't want to starve the player
size_t i;
for (i = 0; i < NB_BUFFERS && packetsRead > 0; i++) {
// compute size of this buffer
size_t packetsThisBuffer = packetsRead;
if (packetsThisBuffer > PACKETS_PER_BUFFER) {
packetsThisBuffer = PACKETS_PER_BUFFER;
}
size_t bufferSize = packetsThisBuffer * MPEG2_TS_PACKET_SIZE;
XAresult res;
if (discontinuity) {
// signal discontinuity
XAAndroidBufferItem items[1];
items[0].itemKey = XA_ANDROID_ITEMKEY_DISCONTINUITY;
items[0].itemSize = 0;
// DISCONTINUITY message has no parameters,
// so the total size of the message is the size of the key
// plus the size if itemSize, both XAuint32
res = (*video->playerBQItf)
->Enqueue(video->playerBQItf, NULL /*pBufferContext*/, video->dataCache + i * BUFFER_SIZE, bufferSize, items /*pMsg*/,
sizeof(XAuint32) * 2 /*msgLength*/);
discontinuity = JNI_FALSE;
}
else {
res = (*video->playerBQItf)->Enqueue(video->playerBQItf, NULL /*pBufferContext*/, video->dataCache + i * BUFFER_SIZE, bufferSize, NULL, 0);
}
assert(XA_RESULT_SUCCESS == res);
packetsRead -= packetsThisBuffer;
}
return true;
}
static XAresult AndroidBufferQueueCallback(XAAndroidBufferQueueItf caller, void *pCallbackContext, /* input */
void *pBufferContext, /* input */
void *pBufferData, /* input */
XAuint32 dataSize, /* input */
XAuint32 dataUsed, /* input */
const XAAndroidBufferItem *pItems, /* input */
XAuint32 itemsLength /* input */) {
kinc_android_video_t *self = (kinc_android_video_t *)pCallbackContext;
XAresult res;
int ok;
// pCallbackContext was specified as NULL at RegisterCallback and is unused here
// assert(NULL == pCallbackContext);
// note there is never any contention on this mutex unless a discontinuity request is active
ok = pthread_mutex_lock(&self->mutex);
assert(0 == ok);
// was a discontinuity requested?
if (self->discontinuity) {
// Note: can't rewind after EOS, which we send when reaching EOF
// (don't send EOS if you plan to play more content through the same player)
if (!self->reachedEof) {
// clear the buffer queue
res = (*self->playerBQItf)->Clear(self->playerBQItf);
assert(XA_RESULT_SUCCESS == res);
// rewind the data source so we are guaranteed to be at an appropriate point
// rewind(file);
AAsset_seek(self->file, 0, SEEK_SET);
// Enqueue the initial buffers, with a discontinuity indicator on first buffer
kinc_android_video_enqueue_initial_buffers(self, JNI_TRUE);
}
// acknowledge the discontinuity request
self->discontinuity = JNI_FALSE;
ok = pthread_cond_signal(&self->cond);
assert(0 == ok);
goto exit;
}
if ((pBufferData == NULL) && (pBufferContext != NULL)) {
const int processedCommand = *(int *)pBufferContext;
if (kEosBufferCntxt == processedCommand) {
kinc_log(KINC_LOG_LEVEL_INFO, "EOS was processed");
// our buffer with the EOS message has been consumed
assert(0 == dataSize);
goto exit;
}
}
// pBufferData is a pointer to a buffer that we previously Enqueued
assert((dataSize > 0) && ((dataSize % MPEG2_TS_PACKET_SIZE) == 0));
assert(self->dataCache <= (char *)pBufferData && (char *)pBufferData < &self->dataCache[BUFFER_SIZE * NB_BUFFERS]);
assert(0 == (((char *)pBufferData - self->dataCache) % BUFFER_SIZE));
// don't bother trying to read more data once we've hit EOF
if (self->reachedEof) {
goto exit;
}
size_t nbRead;
// note we do call fread from multiple threads, but never concurrently
size_t bytesRead;
// bytesRead = fread(pBufferData, 1, BUFFER_SIZE, file);
bytesRead = AAsset_read(self->file, pBufferData, BUFFER_SIZE);
if (bytesRead > 0) {
if ((bytesRead % MPEG2_TS_PACKET_SIZE) != 0) {
kinc_log(KINC_LOG_LEVEL_INFO, "Dropping last packet because it is not whole");
}
size_t packetsRead = bytesRead / MPEG2_TS_PACKET_SIZE;
size_t bufferSize = packetsRead * MPEG2_TS_PACKET_SIZE;
res = (*caller)->Enqueue(caller, NULL /*pBufferContext*/, pBufferData /*pData*/, bufferSize /*dataLength*/, NULL /*pMsg*/, 0 /*msgLength*/);
assert(XA_RESULT_SUCCESS == res);
}
else {
// EOF or I/O error, signal EOS
XAAndroidBufferItem msgEos[1];
msgEos[0].itemKey = XA_ANDROID_ITEMKEY_EOS;
msgEos[0].itemSize = 0;
// EOS message has no parameters, so the total size of the message is the size of the key
// plus the size if itemSize, both XAuint32
res = (*caller)->Enqueue(caller, (void *)&kEosBufferCntxt /*pBufferContext*/, NULL /*pData*/, 0 /*dataLength*/, msgEos /*pMsg*/,
sizeof(XAuint32) * 2 /*msgLength*/);
assert(XA_RESULT_SUCCESS == res);
self->reachedEof = JNI_TRUE;
}
exit:
ok = pthread_mutex_unlock(&self->mutex);
assert(0 == ok);
return XA_RESULT_SUCCESS;
}
static void StreamChangeCallback(XAStreamInformationItf caller, XAuint32 eventId, XAuint32 streamIndex, void *pEventData, void *pContext) {
kinc_log(KINC_LOG_LEVEL_INFO, "StreamChangeCallback called for stream %u", streamIndex);
kinc_android_video_t *self = (kinc_android_video_t *)pContext;
// pContext was specified as NULL at RegisterStreamChangeCallback and is unused here
// assert(NULL == pContext);
switch (eventId) {
case XA_STREAMCBEVENT_PROPERTYCHANGE: {
// From spec 1.0.1:
// "This event indicates that stream property change has occurred.
// The streamIndex parameter identifies the stream with the property change.
// The pEventData parameter for this event is not used and shall be ignored."
//
XAresult res;
XAuint32 domain;
res = (*caller)->QueryStreamType(caller, streamIndex, &domain);
assert(XA_RESULT_SUCCESS == res);
switch (domain) {
case XA_DOMAINTYPE_VIDEO: {
XAVideoStreamInformation videoInfo;
res = (*caller)->QueryStreamInformation(caller, streamIndex, &videoInfo);
assert(XA_RESULT_SUCCESS == res);
kinc_log(KINC_LOG_LEVEL_INFO, "Found video size %u x %u, codec ID=%u, frameRate=%u, bitRate=%u, duration=%u ms", videoInfo.width, videoInfo.height,
videoInfo.codecId, videoInfo.frameRate, videoInfo.bitRate, videoInfo.duration);
} break;
default:
kinc_log(KINC_LOG_LEVEL_ERROR, "Unexpected domain %u\n", domain);
break;
}
} break;
default:
kinc_log(KINC_LOG_LEVEL_ERROR, "Unexpected stream event ID %u\n", eventId);
break;
}
}
bool kinc_android_video_open(kinc_android_video_t *video, const char *filename) {
XAresult res;
// create engine
res = xaCreateEngine(&video->engineObject, 0, NULL, 0, NULL, NULL);
assert(XA_RESULT_SUCCESS == res);
// realize the engine
res = (*video->engineObject)->Realize(video->engineObject, XA_BOOLEAN_FALSE);
assert(XA_RESULT_SUCCESS == res);
// get the engine interface, which is needed in order to create other objects
res = (*video->engineObject)->GetInterface(video->engineObject, XA_IID_ENGINE, &video->engineEngine);
assert(XA_RESULT_SUCCESS == res);
// create output mix
res = (*video->engineEngine)->CreateOutputMix(video->engineEngine, &video->outputMixObject, 0, NULL, NULL);
assert(XA_RESULT_SUCCESS == res);
// realize the output mix
res = (*video->outputMixObject)->Realize(video->outputMixObject, XA_BOOLEAN_FALSE);
assert(XA_RESULT_SUCCESS == res);
// open the file to play
video->file = AAssetManager_open(kinc_android_get_asset_manager(), filename, AASSET_MODE_STREAMING);
if (video->file == NULL) {
kinc_log(KINC_LOG_LEVEL_INFO, "Could not find video file.");
return false;
}
// configure data source
XADataLocator_AndroidBufferQueue loc_abq = {XA_DATALOCATOR_ANDROIDBUFFERQUEUE, NB_BUFFERS};
XADataFormat_MIME format_mime = {XA_DATAFORMAT_MIME, XA_ANDROID_MIME_MP2TS, XA_CONTAINERTYPE_MPEG_TS};
XADataSource dataSrc = {&loc_abq, &format_mime};
// configure audio sink
XADataLocator_OutputMix loc_outmix = {XA_DATALOCATOR_OUTPUTMIX, video->outputMixObject};
XADataSink audioSnk = {&loc_outmix, NULL};
// configure image video sink
XADataLocator_NativeDisplay loc_nd = {
XA_DATALOCATOR_NATIVEDISPLAY, // locatorType
// the video sink must be an ANativeWindow created from a Surface or SurfaceTexture
(void *)video->theNativeWindow, // hWindow
// must be NULL
NULL // hDisplay
};
XADataSink imageVideoSink = {&loc_nd, NULL};
// declare interfaces to use
XAboolean required[NB_MAXAL_INTERFACES] = {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE};
XAInterfaceID iidArray[NB_MAXAL_INTERFACES] = {XA_IID_PLAY, XA_IID_ANDROIDBUFFERQUEUESOURCE, XA_IID_STREAMINFORMATION};
// create media player
res = (*video->engineEngine)
->CreateMediaPlayer(video->engineEngine, &video->playerObj, &dataSrc, NULL, &audioSnk, &imageVideoSink, NULL, NULL,
NB_MAXAL_INTERFACES /*XAuint32 numInterfaces*/, iidArray /*const XAInterfaceID *pInterfaceIds*/,
required /*const XAboolean *pInterfaceRequired*/);
assert(XA_RESULT_SUCCESS == res);
// realize the player
res = (*video->playerObj)->Realize(video->playerObj, XA_BOOLEAN_FALSE);
assert(XA_RESULT_SUCCESS == res);
// get the play interface
res = (*video->playerObj)->GetInterface(video->playerObj, XA_IID_PLAY, &video->playerPlayItf);
assert(XA_RESULT_SUCCESS == res);
// get the stream information interface (for video size)
res = (*video->playerObj)->GetInterface(video->playerObj, XA_IID_STREAMINFORMATION, &video->playerStreamInfoItf);
assert(XA_RESULT_SUCCESS == res);
// get the volume interface
res = (*video->playerObj)->GetInterface(video->playerObj, XA_IID_VOLUME, &video->playerVolItf);
assert(XA_RESULT_SUCCESS == res);
// get the Android buffer queue interface
res = (*video->playerObj)->GetInterface(video->playerObj, XA_IID_ANDROIDBUFFERQUEUESOURCE, &video->playerBQItf);
assert(XA_RESULT_SUCCESS == res);
// specify which events we want to be notified of
res = (*video->playerBQItf)->SetCallbackEventsMask(video->playerBQItf, XA_ANDROIDBUFFERQUEUEEVENT_PROCESSED);
assert(XA_RESULT_SUCCESS == res);
// register the callback from which OpenMAX AL can retrieve the data to play
res = (*video->playerBQItf)->RegisterCallback(video->playerBQItf, AndroidBufferQueueCallback, video);
assert(XA_RESULT_SUCCESS == res);
// we want to be notified of the video size once it's found, so we register a callback for that
res = (*video->playerStreamInfoItf)->RegisterStreamChangeCallback(video->playerStreamInfoItf, StreamChangeCallback, video);
assert(XA_RESULT_SUCCESS == res);
// enqueue the initial buffers
if (!kinc_android_video_enqueue_initial_buffers(video, false)) {
kinc_log(KINC_LOG_LEVEL_INFO, "Could not enqueue initial buffers for video decoding.");
return false;
}
// prepare the player
res = (*video->playerPlayItf)->SetPlayState(video->playerPlayItf, XA_PLAYSTATE_PAUSED);
assert(XA_RESULT_SUCCESS == res);
// set the volume
res = (*video->playerVolItf)->SetVolumeLevel(video->playerVolItf, 0);
assert(XA_RESULT_SUCCESS == res);
// start the playback
res = (*video->playerPlayItf)->SetPlayState(video->playerPlayItf, XA_PLAYSTATE_PLAYING);
assert(XA_RESULT_SUCCESS == res);
kinc_log(KINC_LOG_LEVEL_INFO, "Successfully loaded video.");
return true;
}
void kinc_android_video_shutdown(kinc_android_video_t *video) {
// destroy streaming media player object, and invalidate all associated interfaces
if (video->playerObj != NULL) {
(*video->playerObj)->Destroy(video->playerObj);
video->playerObj = NULL;
video->playerPlayItf = NULL;
video->playerBQItf = NULL;
video->playerStreamInfoItf = NULL;
video->playerVolItf = NULL;
}
// destroy output mix object, and invalidate all associated interfaces
if (video->outputMixObject != NULL) {
(*video->outputMixObject)->Destroy(video->outputMixObject);
video->outputMixObject = NULL;
}
// destroy engine object, and invalidate all associated interfaces
if (video->engineObject != NULL) {
(*video->engineObject)->Destroy(video->engineObject);
video->engineObject = NULL;
video->engineEngine = NULL;
}
// close the file
if (video->file != NULL) {
AAsset_close(video->file);
video->file = NULL;
}
// make sure we don't leak native windows
if (video->theNativeWindow != NULL) {
ANativeWindow_release(video->theNativeWindow);
video->theNativeWindow = NULL;
}
}
#endif
JNIEXPORT void JNICALL Java_tech_kinc_KincMoviePlayer_nativeCreate(JNIEnv *env, jobject jobj, jstring jpath, jobject surface, jint id) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
const char *path = (*env)->GetStringUTFChars(env, jpath, NULL);
kinc_android_video_t *av = malloc(sizeof *av);
kinc_android_video_init(av);
av->theNativeWindow = ANativeWindow_fromSurface(env, surface);
kinc_android_video_open(av, path);
for (int i = 0; i < 10; ++i) {
if (videos[i] != NULL && videos[i]->impl.id == id) {
videos[i]->impl.androidVideo = av;
break;
}
}
(*env)->ReleaseStringUTFChars(env, jpath, path);
#endif
}
void KoreAndroidVideoInit() {
JNIEnv *env;
(*kinc_android_get_activity()->vm)->AttachCurrentThread(kinc_android_get_activity()->vm, &env, NULL);
jclass clazz = kinc_android_find_class(env, "tech.kinc.KincMoviePlayer");
// String path, Surface surface, int id
JNINativeMethod methodTable[] = {{"nativeCreate", "(Ljava/lang/String;Landroid/view/Surface;I)V", (void *)Java_tech_kinc_KincMoviePlayer_nativeCreate}};
int methodTableSize = sizeof(methodTable) / sizeof(methodTable[0]);
int failure = (*env)->RegisterNatives(env, clazz, methodTable, methodTableSize);
if (failure != 0) {
kinc_log(KINC_LOG_LEVEL_WARNING, "Failed to register KincMoviePlayer.nativeCreate");
}
(*kinc_android_get_activity()->vm)->DetachCurrentThread(kinc_android_get_activity()->vm);
}
void kinc_video_init(kinc_video_t *video, const char *filename) {
video->impl.playing = false;
video->impl.sound = NULL;
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
kinc_log(KINC_LOG_LEVEL_INFO, "Opening video %s.", filename);
video->impl.myWidth = 1023;
video->impl.myHeight = 684;
video->impl.next = 0;
video->impl.audioTime = 0;
JNIEnv *env = NULL;
(*kinc_android_get_activity()->vm)->AttachCurrentThread(kinc_android_get_activity()->vm, &env, NULL);
jclass koreMoviePlayerClass = kinc_android_find_class(env, "tech.kinc.KincMoviePlayer");
jmethodID constructor = (*env)->GetMethodID(env, koreMoviePlayerClass, "<init>", "(Ljava/lang/String;)V");
jobject object = (*env)->NewObject(env, koreMoviePlayerClass, constructor, (*env)->NewStringUTF(env, filename));
jmethodID getId = (*env)->GetMethodID(env, koreMoviePlayerClass, "getId", "()I");
video->impl.id = (*env)->CallIntMethod(env, object, getId);
for (int i = 0; i < videosCount; ++i) {
if (videos[i] == NULL) {
videos[i] = video;
break;
}
}
jmethodID jinit = (*env)->GetMethodID(env, koreMoviePlayerClass, "init", "()V");
(*env)->CallVoidMethod(env, object, jinit);
jmethodID getTextureId = (*env)->GetMethodID(env, koreMoviePlayerClass, "getTextureId", "()I");
int texid = (*env)->CallIntMethod(env, object, getTextureId);
(*kinc_android_get_activity()->vm)->DetachCurrentThread(kinc_android_get_activity()->vm);
kinc_g4_texture_init_from_id(&video->impl.image, texid);
#endif
}
void kinc_video_destroy(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
kinc_video_stop(video);
kinc_android_video_t *av = (kinc_android_video_t *)video->impl.androidVideo;
kinc_android_video_shutdown(av);
for (int i = 0; i < 10; ++i) {
if (videos[i] == video) {
videos[i] = NULL;
break;
}
}
#endif
}
void kinc_video_play(kinc_video_t *video, bool loop) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
video->impl.playing = true;
video->impl.start = kinc_time();
#endif
}
void kinc_video_pause(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
video->impl.playing = false;
#endif
}
void kinc_video_stop(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
kinc_video_pause(video);
#endif
}
void kinc_video_update(kinc_video_t *video, double time) {}
int kinc_video_width(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
return video->impl.myWidth;
#else
return 512;
#endif
}
int kinc_video_height(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
return video->impl.myHeight;
#else
return 512;
#endif
}
kinc_g4_texture_t *kinc_video_current_image(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
return &video->impl.image;
#else
return NULL;
#endif
}
double kinc_video_duration(kinc_video_t *video) {
return 0.0;
}
double kinc_video_position(kinc_video_t *video) {
return 0.0;
}
bool kinc_video_finished(kinc_video_t *video) {
return false;
}
bool kinc_video_paused(kinc_video_t *video) {
return !video->impl.playing;
}

View File

@ -0,0 +1,49 @@
#pragma once
#include <kinc/graphics4/texture.h>
#ifdef __cplusplus
extern "C" {
#endif
typedef struct {
void *assetReader;
void *videoTrackOutput;
void *audioTrackOutput;
double start;
double next;
// double audioTime;
unsigned long long audioTime;
bool playing;
void *sound;
void *androidVideo;
int id;
kinc_g4_texture_t image;
double lastTime;
int myWidth;
int myHeight;
} kinc_video_impl_t;
typedef struct kinc_internal_video_sound_stream {
void *audioTrackOutput;
float *buffer;
int bufferSize;
int bufferWritePosition;
int bufferReadPosition;
uint64_t read;
uint64_t written;
} kinc_internal_video_sound_stream_t;
void kinc_internal_video_sound_stream_init(kinc_internal_video_sound_stream_t *stream, int channel_count, int frequency);
void kinc_internal_video_sound_stream_destroy(kinc_internal_video_sound_stream_t *stream);
void kinc_internal_video_sound_stream_insert_data(kinc_internal_video_sound_stream_t *stream, float *data, int sample_count);
float *kinc_internal_video_sound_stream_next_frame(kinc_internal_video_sound_stream_t *stream);
bool kinc_internal_video_sound_stream_ended(kinc_internal_video_sound_stream_t *stream);
#ifdef __cplusplus
}
#endif

View File

@ -0,0 +1,79 @@
#include <kinc/display.h>
#include <kinc/graphics4/graphics.h>
#include <kinc/window.h>
static void (*resizeCallback)(int x, int y, void *data) = NULL;
static void *resizeCallbackData = NULL;
int kinc_count_windows(void) {
return 1;
}
int kinc_window_x(int window_index) {
return 0;
}
int kinc_window_y(int window_index) {
return 0;
}
int kinc_android_width();
int kinc_window_width(int window_index) {
return kinc_android_width();
}
int kinc_android_height();
int kinc_window_height(int window_index) {
return kinc_android_height();
}
void kinc_window_resize(int window_index, int width, int height) {}
void kinc_window_move(int window_index, int x, int y) {}
void kinc_internal_change_framebuffer(int window, struct kinc_framebuffer_options *frame);
void kinc_window_change_framebuffer(int window_index, kinc_framebuffer_options_t *frame) {
kinc_internal_change_framebuffer(0, frame);
}
void kinc_window_change_features(int window_index, int features) {}
void kinc_window_change_mode(int window_index, kinc_window_mode_t mode) {}
void kinc_window_destroy(int window_index) {}
void kinc_window_show(int window_index) {}
void kinc_window_hide(int window_index) {}
void kinc_window_set_title(int window_index, const char *title) {}
int kinc_window_create(kinc_window_options_t *win, kinc_framebuffer_options_t *frame) {
return 0;
}
void kinc_window_set_resize_callback(int window_index, void (*callback)(int x, int y, void *data), void *data) {
resizeCallback = callback;
resizeCallbackData = data;
}
void kinc_internal_call_resize_callback(int window_index, int width, int height) {
if (resizeCallback != NULL) {
resizeCallback(width, height, resizeCallbackData);
}
}
void kinc_window_set_ppi_changed_callback(int window_index, void (*callback)(int ppi, void *data), void *data) {}
void kinc_window_set_close_callback(int window, bool (*callback)(void *), void *data) {}
kinc_window_mode_t kinc_window_get_mode(int window_index) {
return KINC_WINDOW_MODE_FULLSCREEN;
}
int kinc_window_display(int window) {
return 0;
}

View File

@ -0,0 +1 @@
#pragma once