Update Files

This commit is contained in:
2025-01-22 16:18:30 +01:00
parent ed4603cf95
commit a36294b518
16718 changed files with 2960346 additions and 0 deletions

View File

@ -0,0 +1,165 @@
package tech.kinc
import android.app.NativeActivity
import android.content.Context
import android.content.Intent
import android.content.pm.ApplicationInfo
import android.content.pm.PackageManager
import android.net.Uri
import android.os.Bundle
import android.os.Handler
import android.os.Message
import android.os.Vibrator
import android.os.VibrationEffect
import android.os.Build
import android.view.KeyEvent
import android.view.View
import android.view.WindowManager
import android.view.inputmethod.InputMethodManager
import kotlin.system.exitProcess
class KincActivity: NativeActivity(), KeyEvent.Callback {
companion object {
var instance: KincActivity? = null
@JvmStatic
fun showKeyboard() {
instance!!.inputManager!!.showSoftInput(instance!!.window.decorView, 0)
}
@JvmStatic
fun hideKeyboard() {
instance!!.inputManager!!.hideSoftInputFromWindow(instance!!.window.decorView.windowToken, 0)
instance!!.delayedHideSystemUI()
}
@JvmStatic
fun loadURL(url: String) {
val i = Intent(Intent.ACTION_VIEW, Uri.parse(url))
instance!!.startActivity(i)
}
@JvmStatic
fun getLanguage(): String {
return java.util.Locale.getDefault().language
}
@JvmStatic
fun vibrate(ms: Int) {
val v: Vibrator = instance!!.getSystemService(Context.VIBRATOR_SERVICE) as Vibrator
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
v.vibrate(VibrationEffect.createOneShot(ms.toLong(), VibrationEffect.DEFAULT_AMPLITUDE))
}
else {
// deprecated in API 26
v.vibrate(ms.toLong())
}
}
@JvmStatic
fun getRotation(): Int {
val context: Context = instance!!.applicationContext
val manager: WindowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
return manager.defaultDisplay.rotation
}
@JvmStatic
fun getScreenDpi(): Int {
val context: Context = instance!!.applicationContext
val manager: WindowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val metrics: android.util.DisplayMetrics = android.util.DisplayMetrics()
manager.defaultDisplay.getMetrics(metrics)
return metrics.xdpi.toInt()
}
@JvmStatic
fun getRefreshRate(): Int {
val context: Context = instance!!.applicationContext
val manager: WindowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
return manager.defaultDisplay.refreshRate.toInt()
}
@JvmStatic
fun getDisplayWidth(): Int {
val context: Context = instance!!.applicationContext
val manager: WindowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val size: android.graphics.Point = android.graphics.Point()
manager.defaultDisplay.getRealSize(size)
return size.x
}
@JvmStatic
fun getDisplayHeight(): Int {
val context: Context = instance!!.applicationContext
val manager: WindowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
val size: android.graphics.Point = android.graphics.Point()
manager.defaultDisplay.getRealSize(size)
return size.y
}
@JvmStatic
fun stop() {
instance!!.runOnUiThread {
fun run() {
instance!!.finish()
exitProcess(0)
}
}
}
class MyHandler(private val kincActivity: KincActivity) : Handler() {
override fun handleMessage(msg: Message) {
kincActivity.hideSystemUI()
}
}
}
var inputManager: InputMethodManager? = null
private var isDisabledStickyImmersiveMode = false
private val hideSystemUIHandler = MyHandler(this)
override fun onCreate(state: Bundle?) {
super.onCreate(state)
hideSystemUI()
instance = this
inputManager = getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
isDisabledStickyImmersiveMode = try {
val ai: ApplicationInfo = packageManager.getApplicationInfo(packageName, PackageManager.GET_META_DATA)
val bundle: Bundle = ai.metaData
bundle.getBoolean("disableStickyImmersiveMode")
} catch (e: PackageManager.NameNotFoundException) {
false
} catch (e: NullPointerException) {
false
}
}
private fun hideSystemUI() {
window.decorView.systemUiVisibility = View.SYSTEM_UI_FLAG_LAYOUT_STABLE or View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION or View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN or View.SYSTEM_UI_FLAG_HIDE_NAVIGATION or View.SYSTEM_UI_FLAG_FULLSCREEN or View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
}
private fun delayedHideSystemUI() {
hideSystemUIHandler.removeMessages(0)
if (!isDisabledStickyImmersiveMode) {
hideSystemUIHandler.sendEmptyMessageDelayed(0, 300)
}
}
override fun onWindowFocusChanged(hasFocus: Boolean) {
super.onWindowFocusChanged(hasFocus)
if (hasFocus) {
delayedHideSystemUI()
}
else {
hideSystemUIHandler.removeMessages(0)
}
}
override fun onKeyMultiple(keyCode: Int, count: Int, event: KeyEvent): Boolean {
this.nativeKincKeyPress(event.characters)
return false
}
private external fun nativeKincKeyPress(chars: String)
}

View File

@ -0,0 +1,50 @@
package tech.kinc
import java.util.ArrayList
import android.view.Surface
class KincMoviePlayer(var path: String) {
companion object {
var players = ArrayList<KincMoviePlayer?>()
@JvmStatic
fun updateAll() {
for (player in KincMoviePlayer.players) {
player!!.update()
}
}
fun remove(id: Int) {
players[id] = null
}
}
private var movieTexture: KincMovieTexture? = null
var id: Int = players.size
init {
players.add(this)
}
fun init() {
movieTexture = KincMovieTexture()
val surface = Surface(movieTexture!!.surfaceTexture)
nativeCreate(path, surface, id)
surface.release()
}
fun getMovieTexture(): KincMovieTexture? {
return movieTexture
}
fun update(): Boolean {
return movieTexture!!.update()
}
fun getTextureId(): Int {
return movieTexture!!.textureId
}
private external fun nativeCreate(path: String, surface: Surface, id: Int)
}

View File

@ -0,0 +1,62 @@
package tech.kinc
import android.graphics.SurfaceTexture
import android.graphics.SurfaceTexture.OnFrameAvailableListener
import android.opengl.GLES20
class KincMovieTexture: OnFrameAvailableListener {
private val GL_TEXTURE_EXTERNAL_OES: Int = 0x8D65
var textureId: Int = 0
init {
val textures = IntArray(1)
GLES20.glGenTextures(1, textures, 0)
textureId = textures[0]
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId)
GLES20.glTexParameteri(
GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST
)
GLES20.glTexParameteri(
GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR
)
GLES20.glTexParameteri(
GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE
)
GLES20.glTexParameteri(
GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE
)
}
var surfaceTexture = SurfaceTexture(textureId)
init {
surfaceTexture.setOnFrameAvailableListener(this)
}
private var updateTexture = false
fun update(): Boolean {
val ret = updateTexture
if (updateTexture) {
surfaceTexture.updateTexImage()
updateTexture = false
}
return ret
}
override fun onFrameAvailable(surface: SurfaceTexture) {
if (surfaceTexture == surface) {
updateTexture = true
}
}
}

View File

@ -0,0 +1,441 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <jni.h>
#include <errno.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <sys/resource.h>
#include "android_native_app_glue.h"
#include <android/log.h>
#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, "threaded_app", __VA_ARGS__))
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, "threaded_app", __VA_ARGS__))
/* For debug builds, always enable the debug traces in this library */
#ifndef NDEBUG
# define LOGV(...) ((void)__android_log_print(ANDROID_LOG_VERBOSE, "threaded_app", __VA_ARGS__))
#else
# define LOGV(...) ((void)0)
#endif
static void free_saved_state(struct android_app* android_app) {
pthread_mutex_lock(&android_app->mutex);
if (android_app->savedState != NULL) {
free(android_app->savedState);
android_app->savedState = NULL;
android_app->savedStateSize = 0;
}
pthread_mutex_unlock(&android_app->mutex);
}
int8_t android_app_read_cmd(struct android_app* android_app) {
int8_t cmd;
if (read(android_app->msgread, &cmd, sizeof(cmd)) == sizeof(cmd)) {
switch (cmd) {
case APP_CMD_SAVE_STATE:
free_saved_state(android_app);
break;
}
return cmd;
} else {
LOGE("No data on command pipe!");
}
return -1;
}
static void print_cur_config(struct android_app* android_app) {
char lang[2], country[2];
AConfiguration_getLanguage(android_app->config, lang);
AConfiguration_getCountry(android_app->config, country);
LOGV("Config: mcc=%d mnc=%d lang=%c%c cnt=%c%c orien=%d touch=%d dens=%d "
"keys=%d nav=%d keysHid=%d navHid=%d sdk=%d size=%d long=%d "
"modetype=%d modenight=%d",
AConfiguration_getMcc(android_app->config),
AConfiguration_getMnc(android_app->config),
lang[0], lang[1], country[0], country[1],
AConfiguration_getOrientation(android_app->config),
AConfiguration_getTouchscreen(android_app->config),
AConfiguration_getDensity(android_app->config),
AConfiguration_getKeyboard(android_app->config),
AConfiguration_getNavigation(android_app->config),
AConfiguration_getKeysHidden(android_app->config),
AConfiguration_getNavHidden(android_app->config),
AConfiguration_getSdkVersion(android_app->config),
AConfiguration_getScreenSize(android_app->config),
AConfiguration_getScreenLong(android_app->config),
AConfiguration_getUiModeType(android_app->config),
AConfiguration_getUiModeNight(android_app->config));
}
void android_app_pre_exec_cmd(struct android_app* android_app, int8_t cmd) {
switch (cmd) {
case APP_CMD_INPUT_CHANGED:
LOGV("APP_CMD_INPUT_CHANGED\n");
pthread_mutex_lock(&android_app->mutex);
if (android_app->inputQueue != NULL) {
AInputQueue_detachLooper(android_app->inputQueue);
}
android_app->inputQueue = android_app->pendingInputQueue;
if (android_app->inputQueue != NULL) {
LOGV("Attaching input queue to looper");
AInputQueue_attachLooper(android_app->inputQueue,
android_app->looper, LOOPER_ID_INPUT, NULL,
&android_app->inputPollSource);
}
pthread_cond_broadcast(&android_app->cond);
pthread_mutex_unlock(&android_app->mutex);
break;
case APP_CMD_INIT_WINDOW:
LOGV("APP_CMD_INIT_WINDOW\n");
pthread_mutex_lock(&android_app->mutex);
android_app->window = android_app->pendingWindow;
pthread_cond_broadcast(&android_app->cond);
pthread_mutex_unlock(&android_app->mutex);
break;
case APP_CMD_TERM_WINDOW:
LOGV("APP_CMD_TERM_WINDOW\n");
pthread_cond_broadcast(&android_app->cond);
break;
case APP_CMD_RESUME:
case APP_CMD_START:
case APP_CMD_PAUSE:
case APP_CMD_STOP:
LOGV("activityState=%d\n", cmd);
pthread_mutex_lock(&android_app->mutex);
android_app->activityState = cmd;
pthread_cond_broadcast(&android_app->cond);
pthread_mutex_unlock(&android_app->mutex);
break;
case APP_CMD_CONFIG_CHANGED:
LOGV("APP_CMD_CONFIG_CHANGED\n");
AConfiguration_fromAssetManager(android_app->config,
android_app->activity->assetManager);
print_cur_config(android_app);
break;
case APP_CMD_DESTROY:
LOGV("APP_CMD_DESTROY\n");
android_app->destroyRequested = 1;
break;
}
}
void android_app_post_exec_cmd(struct android_app* android_app, int8_t cmd) {
switch (cmd) {
case APP_CMD_TERM_WINDOW:
LOGV("APP_CMD_TERM_WINDOW\n");
pthread_mutex_lock(&android_app->mutex);
android_app->window = NULL;
pthread_cond_broadcast(&android_app->cond);
pthread_mutex_unlock(&android_app->mutex);
break;
case APP_CMD_SAVE_STATE:
LOGV("APP_CMD_SAVE_STATE\n");
pthread_mutex_lock(&android_app->mutex);
android_app->stateSaved = 1;
pthread_cond_broadcast(&android_app->cond);
pthread_mutex_unlock(&android_app->mutex);
break;
case APP_CMD_RESUME:
free_saved_state(android_app);
break;
}
}
void app_dummy() {
}
static void android_app_destroy(struct android_app* android_app) {
LOGV("android_app_destroy!");
free_saved_state(android_app);
pthread_mutex_lock(&android_app->mutex);
if (android_app->inputQueue != NULL) {
AInputQueue_detachLooper(android_app->inputQueue);
}
AConfiguration_delete(android_app->config);
android_app->destroyed = 1;
pthread_cond_broadcast(&android_app->cond);
pthread_mutex_unlock(&android_app->mutex);
// Can't touch android_app object after this.
}
static void process_input(struct android_app* app, struct android_poll_source* source) {
AInputEvent* event = NULL;
while (AInputQueue_getEvent(app->inputQueue, &event) >= 0) {
LOGV("New input event: type=%d\n", AInputEvent_getType(event));
if (AInputQueue_preDispatchEvent(app->inputQueue, event)) {
continue;
}
int32_t handled = 0;
if (app->onInputEvent != NULL) handled = app->onInputEvent(app, event);
AInputQueue_finishEvent(app->inputQueue, event, handled);
}
}
static void process_cmd(struct android_app* app, struct android_poll_source* source) {
int8_t cmd = android_app_read_cmd(app);
android_app_pre_exec_cmd(app, cmd);
if (app->onAppCmd != NULL) app->onAppCmd(app, cmd);
android_app_post_exec_cmd(app, cmd);
}
static void* android_app_entry(void* param) {
struct android_app* android_app = (struct android_app*)param;
android_app->config = AConfiguration_new();
AConfiguration_fromAssetManager(android_app->config, android_app->activity->assetManager);
print_cur_config(android_app);
android_app->cmdPollSource.id = LOOPER_ID_MAIN;
android_app->cmdPollSource.app = android_app;
android_app->cmdPollSource.process = process_cmd;
android_app->inputPollSource.id = LOOPER_ID_INPUT;
android_app->inputPollSource.app = android_app;
android_app->inputPollSource.process = process_input;
ALooper* looper = ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS);
ALooper_addFd(looper, android_app->msgread, LOOPER_ID_MAIN, ALOOPER_EVENT_INPUT, NULL,
&android_app->cmdPollSource);
android_app->looper = looper;
pthread_mutex_lock(&android_app->mutex);
android_app->running = 1;
pthread_cond_broadcast(&android_app->cond);
pthread_mutex_unlock(&android_app->mutex);
android_main(android_app);
android_app_destroy(android_app);
return NULL;
}
// --------------------------------------------------------------------
// Native activity interaction (called from main thread)
// --------------------------------------------------------------------
static struct android_app* android_app_create(ANativeActivity* activity,
void* savedState, size_t savedStateSize) {
struct android_app* android_app = (struct android_app*)malloc(sizeof(struct android_app));
memset(android_app, 0, sizeof(struct android_app));
android_app->activity = activity;
pthread_mutex_init(&android_app->mutex, NULL);
pthread_cond_init(&android_app->cond, NULL);
if (savedState != NULL) {
android_app->savedState = malloc(savedStateSize);
android_app->savedStateSize = savedStateSize;
memcpy(android_app->savedState, savedState, savedStateSize);
}
int msgpipe[2];
if (pipe(msgpipe)) {
LOGE("could not create pipe: %s", strerror(errno));
return NULL;
}
android_app->msgread = msgpipe[0];
android_app->msgwrite = msgpipe[1];
pthread_attr_t attr;
pthread_attr_init(&attr);
pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
pthread_create(&android_app->thread, &attr, android_app_entry, android_app);
// Wait for thread to start.
pthread_mutex_lock(&android_app->mutex);
while (!android_app->running) {
pthread_cond_wait(&android_app->cond, &android_app->mutex);
}
pthread_mutex_unlock(&android_app->mutex);
return android_app;
}
static void android_app_write_cmd(struct android_app* android_app, int8_t cmd) {
if (write(android_app->msgwrite, &cmd, sizeof(cmd)) != sizeof(cmd)) {
LOGE("Failure writing android_app cmd: %s\n", strerror(errno));
}
}
static void android_app_set_input(struct android_app* android_app, AInputQueue* inputQueue) {
pthread_mutex_lock(&android_app->mutex);
android_app->pendingInputQueue = inputQueue;
android_app_write_cmd(android_app, APP_CMD_INPUT_CHANGED);
while (android_app->inputQueue != android_app->pendingInputQueue) {
pthread_cond_wait(&android_app->cond, &android_app->mutex);
}
pthread_mutex_unlock(&android_app->mutex);
}
static void android_app_set_window(struct android_app* android_app, ANativeWindow* window) {
pthread_mutex_lock(&android_app->mutex);
if (android_app->pendingWindow != NULL) {
android_app_write_cmd(android_app, APP_CMD_TERM_WINDOW);
}
android_app->pendingWindow = window;
if (window != NULL) {
android_app_write_cmd(android_app, APP_CMD_INIT_WINDOW);
}
while (android_app->window != android_app->pendingWindow) {
pthread_cond_wait(&android_app->cond, &android_app->mutex);
}
pthread_mutex_unlock(&android_app->mutex);
}
static void android_app_set_activity_state(struct android_app* android_app, int8_t cmd) {
pthread_mutex_lock(&android_app->mutex);
android_app_write_cmd(android_app, cmd);
while (android_app->activityState != cmd) {
pthread_cond_wait(&android_app->cond, &android_app->mutex);
}
pthread_mutex_unlock(&android_app->mutex);
}
static void android_app_free(struct android_app* android_app) {
pthread_mutex_lock(&android_app->mutex);
android_app_write_cmd(android_app, APP_CMD_DESTROY);
while (!android_app->destroyed) {
pthread_cond_wait(&android_app->cond, &android_app->mutex);
}
pthread_mutex_unlock(&android_app->mutex);
close(android_app->msgread);
close(android_app->msgwrite);
pthread_cond_destroy(&android_app->cond);
pthread_mutex_destroy(&android_app->mutex);
free(android_app);
}
static void onDestroy(ANativeActivity* activity) {
LOGV("Destroy: %p\n", activity);
android_app_free((struct android_app*)activity->instance);
}
static void onStart(ANativeActivity* activity) {
LOGV("Start: %p\n", activity);
android_app_set_activity_state((struct android_app*)activity->instance, APP_CMD_START);
}
static void onResume(ANativeActivity* activity) {
LOGV("Resume: %p\n", activity);
android_app_set_activity_state((struct android_app*)activity->instance, APP_CMD_RESUME);
}
static void* onSaveInstanceState(ANativeActivity* activity, size_t* outLen) {
struct android_app* android_app = (struct android_app*)activity->instance;
void* savedState = NULL;
LOGV("SaveInstanceState: %p\n", activity);
pthread_mutex_lock(&android_app->mutex);
android_app->stateSaved = 0;
android_app_write_cmd(android_app, APP_CMD_SAVE_STATE);
while (!android_app->stateSaved) {
pthread_cond_wait(&android_app->cond, &android_app->mutex);
}
if (android_app->savedState != NULL) {
savedState = android_app->savedState;
*outLen = android_app->savedStateSize;
android_app->savedState = NULL;
android_app->savedStateSize = 0;
}
pthread_mutex_unlock(&android_app->mutex);
return savedState;
}
static void onPause(ANativeActivity* activity) {
LOGV("Pause: %p\n", activity);
android_app_set_activity_state((struct android_app*)activity->instance, APP_CMD_PAUSE);
}
static void onStop(ANativeActivity* activity) {
LOGV("Stop: %p\n", activity);
android_app_set_activity_state((struct android_app*)activity->instance, APP_CMD_STOP);
}
static void onConfigurationChanged(ANativeActivity* activity) {
struct android_app* android_app = (struct android_app*)activity->instance;
LOGV("ConfigurationChanged: %p\n", activity);
android_app_write_cmd(android_app, APP_CMD_CONFIG_CHANGED);
}
static void onLowMemory(ANativeActivity* activity) {
struct android_app* android_app = (struct android_app*)activity->instance;
LOGV("LowMemory: %p\n", activity);
android_app_write_cmd(android_app, APP_CMD_LOW_MEMORY);
}
static void onWindowFocusChanged(ANativeActivity* activity, int focused) {
LOGV("WindowFocusChanged: %p -- %d\n", activity, focused);
android_app_write_cmd((struct android_app*)activity->instance,
focused ? APP_CMD_GAINED_FOCUS : APP_CMD_LOST_FOCUS);
}
static void onNativeWindowCreated(ANativeActivity* activity, ANativeWindow* window) {
LOGV("NativeWindowCreated: %p -- %p\n", activity, window);
android_app_set_window((struct android_app*)activity->instance, window);
}
static void onNativeWindowDestroyed(ANativeActivity* activity, ANativeWindow* window) {
LOGV("NativeWindowDestroyed: %p -- %p\n", activity, window);
android_app_set_window((struct android_app*)activity->instance, NULL);
}
static void onInputQueueCreated(ANativeActivity* activity, AInputQueue* queue) {
LOGV("InputQueueCreated: %p -- %p\n", activity, queue);
android_app_set_input((struct android_app*)activity->instance, queue);
}
static void onInputQueueDestroyed(ANativeActivity* activity, AInputQueue* queue) {
LOGV("InputQueueDestroyed: %p -- %p\n", activity, queue);
android_app_set_input((struct android_app*)activity->instance, NULL);
}
void ANativeActivity_onCreate(ANativeActivity* activity,
void* savedState, size_t savedStateSize) {
LOGV("Creating: %p\n", activity);
activity->callbacks->onDestroy = onDestroy;
activity->callbacks->onStart = onStart;
activity->callbacks->onResume = onResume;
activity->callbacks->onSaveInstanceState = onSaveInstanceState;
activity->callbacks->onPause = onPause;
activity->callbacks->onStop = onStop;
activity->callbacks->onConfigurationChanged = onConfigurationChanged;
activity->callbacks->onLowMemory = onLowMemory;
activity->callbacks->onWindowFocusChanged = onWindowFocusChanged;
activity->callbacks->onNativeWindowCreated = onNativeWindowCreated;
activity->callbacks->onNativeWindowDestroyed = onNativeWindowDestroyed;
activity->callbacks->onInputQueueCreated = onInputQueueCreated;
activity->callbacks->onInputQueueDestroyed = onInputQueueDestroyed;
activity->instance = android_app_create(activity, savedState, savedStateSize);
}

View File

@ -0,0 +1,349 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef _ANDROID_NATIVE_APP_GLUE_H
#define _ANDROID_NATIVE_APP_GLUE_H
#include <poll.h>
#include <pthread.h>
#include <sched.h>
#include <android/configuration.h>
#include <android/looper.h>
#include <android/native_activity.h>
#ifdef __cplusplus
extern "C" {
#endif
/**
* The native activity interface provided by <android/native_activity.h>
* is based on a set of application-provided callbacks that will be called
* by the Activity's main thread when certain events occur.
*
* This means that each one of this callbacks _should_ _not_ block, or they
* risk having the system force-close the application. This programming
* model is direct, lightweight, but constraining.
*
* The 'android_native_app_glue' static library is used to provide a different
* execution model where the application can implement its own main event
* loop in a different thread instead. Here's how it works:
*
* 1/ The application must provide a function named "android_main()" that
* will be called when the activity is created, in a new thread that is
* distinct from the activity's main thread.
*
* 2/ android_main() receives a pointer to a valid "android_app" structure
* that contains references to other important objects, e.g. the
* ANativeActivity obejct instance the application is running in.
*
* 3/ the "android_app" object holds an ALooper instance that already
* listens to two important things:
*
* - activity lifecycle events (e.g. "pause", "resume"). See APP_CMD_XXX
* declarations below.
*
* - input events coming from the AInputQueue attached to the activity.
*
* Each of these correspond to an ALooper identifier returned by
* ALooper_pollOnce with values of LOOPER_ID_MAIN and LOOPER_ID_INPUT,
* respectively.
*
* Your application can use the same ALooper to listen to additional
* file-descriptors. They can either be callback based, or with return
* identifiers starting with LOOPER_ID_USER.
*
* 4/ Whenever you receive a LOOPER_ID_MAIN or LOOPER_ID_INPUT event,
* the returned data will point to an android_poll_source structure. You
* can call the process() function on it, and fill in android_app->onAppCmd
* and android_app->onInputEvent to be called for your own processing
* of the event.
*
* Alternatively, you can call the low-level functions to read and process
* the data directly... look at the process_cmd() and process_input()
* implementations in the glue to see how to do this.
*
* See the sample named "native-activity" that comes with the NDK with a
* full usage example. Also look at the JavaDoc of NativeActivity.
*/
struct android_app;
/**
* Data associated with an ALooper fd that will be returned as the "outData"
* when that source has data ready.
*/
struct android_poll_source {
// The identifier of this source. May be LOOPER_ID_MAIN or
// LOOPER_ID_INPUT.
int32_t id;
// The android_app this ident is associated with.
struct android_app* app;
// Function to call to perform the standard processing of data from
// this source.
void (*process)(struct android_app* app, struct android_poll_source* source);
};
/**
* This is the interface for the standard glue code of a threaded
* application. In this model, the application's code is running
* in its own thread separate from the main thread of the process.
* It is not required that this thread be associated with the Java
* VM, although it will need to be in order to make JNI calls any
* Java objects.
*/
struct android_app {
// The application can place a pointer to its own state object
// here if it likes.
void* userData;
// Fill this in with the function to process main app commands (APP_CMD_*)
void (*onAppCmd)(struct android_app* app, int32_t cmd);
// Fill this in with the function to process input events. At this point
// the event has already been pre-dispatched, and it will be finished upon
// return. Return 1 if you have handled the event, 0 for any default
// dispatching.
int32_t (*onInputEvent)(struct android_app* app, AInputEvent* event);
// The ANativeActivity object instance that this app is running in.
ANativeActivity* activity;
// The current configuration the app is running in.
AConfiguration* config;
// This is the last instance's saved state, as provided at creation time.
// It is NULL if there was no state. You can use this as you need; the
// memory will remain around until you call android_app_exec_cmd() for
// APP_CMD_RESUME, at which point it will be freed and savedState set to NULL.
// These variables should only be changed when processing a APP_CMD_SAVE_STATE,
// at which point they will be initialized to NULL and you can malloc your
// state and place the information here. In that case the memory will be
// freed for you later.
void* savedState;
size_t savedStateSize;
// The ALooper associated with the app's thread.
ALooper* looper;
// When non-NULL, this is the input queue from which the app will
// receive user input events.
AInputQueue* inputQueue;
// When non-NULL, this is the window surface that the app can draw in.
ANativeWindow* window;
// Current content rectangle of the window; this is the area where the
// window's content should be placed to be seen by the user.
ARect contentRect;
// Current state of the app's activity. May be either APP_CMD_START,
// APP_CMD_RESUME, APP_CMD_PAUSE, or APP_CMD_STOP; see below.
int activityState;
// This is non-zero when the application's NativeActivity is being
// destroyed and waiting for the app thread to complete.
int destroyRequested;
// -------------------------------------------------
// Below are "private" implementation of the glue code.
pthread_mutex_t mutex;
pthread_cond_t cond;
int msgread;
int msgwrite;
pthread_t thread;
struct android_poll_source cmdPollSource;
struct android_poll_source inputPollSource;
int running;
int stateSaved;
int destroyed;
int redrawNeeded;
AInputQueue* pendingInputQueue;
ANativeWindow* pendingWindow;
ARect pendingContentRect;
};
enum {
/**
* Looper data ID of commands coming from the app's main thread, which
* is returned as an identifier from ALooper_pollOnce(). The data for this
* identifier is a pointer to an android_poll_source structure.
* These can be retrieved and processed with android_app_read_cmd()
* and android_app_exec_cmd().
*/
LOOPER_ID_MAIN = 1,
/**
* Looper data ID of events coming from the AInputQueue of the
* application's window, which is returned as an identifier from
* ALooper_pollOnce(). The data for this identifier is a pointer to an
* android_poll_source structure. These can be read via the inputQueue
* object of android_app.
*/
LOOPER_ID_INPUT = 2,
/**
* Start of user-defined ALooper identifiers.
*/
LOOPER_ID_USER = 3,
};
enum {
/**
* Command from main thread: the AInputQueue has changed. Upon processing
* this command, android_app->inputQueue will be updated to the new queue
* (or NULL).
*/
APP_CMD_INPUT_CHANGED,
/**
* Command from main thread: a new ANativeWindow is ready for use. Upon
* receiving this command, android_app->window will contain the new window
* surface.
*/
APP_CMD_INIT_WINDOW,
/**
* Command from main thread: the existing ANativeWindow needs to be
* terminated. Upon receiving this command, android_app->window still
* contains the existing window; after calling android_app_exec_cmd
* it will be set to NULL.
*/
APP_CMD_TERM_WINDOW,
/**
* Command from main thread: the current ANativeWindow has been resized.
* Please redraw with its new size.
*/
APP_CMD_WINDOW_RESIZED,
/**
* Command from main thread: the system needs that the current ANativeWindow
* be redrawn. You should redraw the window before handing this to
* android_app_exec_cmd() in order to avoid transient drawing glitches.
*/
APP_CMD_WINDOW_REDRAW_NEEDED,
/**
* Command from main thread: the content area of the window has changed,
* such as from the soft input window being shown or hidden. You can
* find the new content rect in android_app::contentRect.
*/
APP_CMD_CONTENT_RECT_CHANGED,
/**
* Command from main thread: the app's activity window has gained
* input focus.
*/
APP_CMD_GAINED_FOCUS,
/**
* Command from main thread: the app's activity window has lost
* input focus.
*/
APP_CMD_LOST_FOCUS,
/**
* Command from main thread: the current device configuration has changed.
*/
APP_CMD_CONFIG_CHANGED,
/**
* Command from main thread: the system is running low on memory.
* Try to reduce your memory use.
*/
APP_CMD_LOW_MEMORY,
/**
* Command from main thread: the app's activity has been started.
*/
APP_CMD_START,
/**
* Command from main thread: the app's activity has been resumed.
*/
APP_CMD_RESUME,
/**
* Command from main thread: the app should generate a new saved state
* for itself, to restore from later if needed. If you have saved state,
* allocate it with malloc and place it in android_app.savedState with
* the size in android_app.savedStateSize. The will be freed for you
* later.
*/
APP_CMD_SAVE_STATE,
/**
* Command from main thread: the app's activity has been paused.
*/
APP_CMD_PAUSE,
/**
* Command from main thread: the app's activity has been stopped.
*/
APP_CMD_STOP,
/**
* Command from main thread: the app's activity is being destroyed,
* and waiting for the app thread to clean up and exit before proceeding.
*/
APP_CMD_DESTROY,
};
/**
* Call when ALooper_pollAll() returns LOOPER_ID_MAIN, reading the next
* app command message.
*/
int8_t android_app_read_cmd(struct android_app* android_app);
/**
* Call with the command returned by android_app_read_cmd() to do the
* initial pre-processing of the given command. You can perform your own
* actions for the command after calling this function.
*/
void android_app_pre_exec_cmd(struct android_app* android_app, int8_t cmd);
/**
* Call with the command returned by android_app_read_cmd() to do the
* final post-processing of the given command. You must have done your own
* actions for the command before calling this function.
*/
void android_app_post_exec_cmd(struct android_app* android_app, int8_t cmd);
/**
* Dummy function you can call to ensure glue code isn't stripped.
*/
void app_dummy();
/**
* This is the function that application code must implement, representing
* the main entry to the app.
*/
extern void android_main(struct android_app* app);
#ifdef __cplusplus
}
#endif
#endif /* _ANDROID_NATIVE_APP_GLUE_H */

View File

@ -0,0 +1,18 @@
#pragma once
#include <android_native_app_glue.h>
#ifdef __cplusplus
extern "C" {
#endif
// name in usual Java syntax (points, no slashes)
jclass kinc_android_find_class(JNIEnv *env, const char *name);
ANativeActivity *kinc_android_get_activity(void);
AAssetManager *kinc_android_get_asset_manager(void);
#ifdef __cplusplus
}
#endif

View File

@ -0,0 +1,385 @@
#if 0
#include "VrInterface.h"
#ifdef VR_GEAR_VR
#include <kha/Image.h>
#include <kha/math/Matrix4.h>
#include <kha/math/Quaternion.h>
#include <kha/math/Vector3.h>
#include <kha/vr/Pose.h>
#include <kha/vr/PoseState.h>
#include <kha/vr/TimeWarpImage.h>
#include <GlTexture.h>
#include <VrApi/VrApi.h>
#include <VrApi/VrApi_Helpers.h>
#include <LibOvr/Src/Kernel/OVR_Math.h>
#include <Kore/log.h>
#endif
namespace Kore {
//
namespace VrInterface {
// // Is Set during Initialize
#ifdef VR_GEAR_VR
static ovrMobile* ovr;
#endif
static JavaVM* cachedJVM;
static jobject instance;
static jclass koreActivity;
static float qx;
static float qy;
static float qz;
static float qw;
//
void SetJVM(JavaVM* jvm) {
cachedJVM = jvm;
// Grab the activity object
JNIEnv* env;
cachedJVM->AttachCurrentThread(&env, 0);
koreActivity = env->FindClass("tech/kode/kore/KoreActivity");
koreActivity = (jclass) env->NewGlobalRef(koreActivity);
jmethodID mid = env->GetStaticMethodID(koreActivity, "getInstance", "()Ltech/kode/kore/KoreActivity;");
instance = env->CallStaticObjectMethod(koreActivity, mid);
// Make sure that the garbage collector does not clean this up for us
instance = env->NewGlobalRef(instance);
}
#ifdef VR_CARDBOARD
void DistortionBefore() {
JNIEnv* env;
cachedJVM->AttachCurrentThread(&env, 0);
jmethodID mid = env->GetMethodID(koreActivity, "DistortionBeforeFrame", "()V");
env->CallObjectMethod(instance, mid);
}
void DistortionAfter() {
JNIEnv* env;
cachedJVM->AttachCurrentThread(&env, 0);
jmethodID mid = env->GetMethodID(koreActivity, "DistortionAfterFrame", "()V");
env->CallObjectMethod(instance, mid);
}
void DistortTexture(kha::Image_obj* image) {
JNIEnv* env;
cachedJVM->AttachCurrentThread(&env, 0);
jmethodID mid = env->GetMethodID(koreActivity, "DistortTexture", "(I)V");
env->CallVoidMethod(instance, mid, image->renderTarget->_texture);
}
void updateGaze(float x, float y, float z, float w) {
qx = x;
qy = y;
qz = z;
qw = w;
}
template<typename T> T* CreateEmpty() {
return dynamic_cast<T*>(T::__CreateEmpty().mPtr);
}
kha::math::Quaternion_obj* getGaze() {
kha::math::Quaternion_obj* result = CreateEmpty<kha::math::Quaternion_obj>();
result->__construct(qx, qy, qz, qw);
return result;
}
#endif
#ifdef VR_GEAR_VR
void Initialize() {
ovrModeParms parms;
parms.AsynchronousTimeWarp = true;
parms.AllowPowerSave = true;
parms.DistortionFileName = 0;
parms.EnableImageServer = false;
parms.SkipWindowFullscreenReset = true;
// Grab the activity object
JNIEnv* env;
cachedJVM->AttachCurrentThread(&env, 0);
jclass koreActivity = env->FindClass("tech/kode/kore/KoreActivity");
jmethodID mid = env->GetStaticMethodID(koreActivity, "getInstance", "()Ltech/kode/kore/KoreActivity;");
jobject instance = env->CallStaticObjectMethod(koreActivity, mid);
// Make sure that the garbage collector does not clean this up for us
instance = env->NewGlobalRef(instance);
parms.ActivityObject = instance;
parms.GameThreadTid = 0;
parms.CpuLevel = 2;
parms.GpuLevel = 2;
ovrHmdInfo returnedHmdInfo;
ovr = ovr_EnterVrMode(parms, &returnedHmdInfo );
}
void WarpSwapBlack() {
// TODO: Not in the API anymore :-(
//ovr_WarpSwapBlack(ovr);
}
void WarpSwapLoadingIcon() {
//ovr_WarpSwapLoadingIcon(ovr);
}
template<typename T> T* CreateEmpty() {
return dynamic_cast<T*>(T::__CreateEmpty().mPtr);
}
kha::math::Quaternion_obj* GetQuaternion(const ovrQuatf& q) {
kha::math::Quaternion_obj* quaternion = CreateEmpty<kha::math::Quaternion_obj>();
quaternion->__construct(0.0f, 0.0f, 0.0f, 0.0f);
quaternion->set_x(q.x);
quaternion->set_y(q.y);
quaternion->set_z(q.z);
quaternion->set_w(q.w);
return quaternion;
}
ovrQuatf GetQuaternion(kha::math::Quaternion_obj* quat) {
ovrQuatf result;
result.x = quat->get_x();
result.y = quat->get_y();
result.z = quat->get_z();
result.w = quat->get_w();
return result;
}
ovrMatrix4f GetMatrix(kha::math::Matrix4_obj* mat) {
ovrMatrix4f result;
for (int x = 0; x < 4; x++) {
for (int y = 0; y < 4; y++) {
float f = mat->get(x, y);
result.M[x][y] = f;
}
}
return result;
}
kha::math::Vector3_obj* GetVector3(const ovrVector3f& v) {
kha::math::Vector3_obj* vector = CreateEmpty<kha::math::Vector3_obj>();
vector->x = v.x;
vector->y = v.y;
vector->z = v.z;
return vector;
}
ovrVector3f GetVector3(kha::math::Vector3_obj* v) {
ovrVector3f result;
result.x = v->x;
result.y = v->y;
result.z = v->z;
return result;
}
kha::vr::Pose_obj* GetPose(const ovrPosef& nativePose) {
kha::vr::Pose_obj* pose = CreateEmpty<kha::vr::Pose_obj>();
pose->Position = GetVector3(nativePose.Position);
pose->Orientation = GetQuaternion(nativePose.Orientation);
return pose;
}
kha::vr::PoseState_obj* GetPoseState(const ovrPoseStatef& nativeState) {
kha::vr::PoseState_obj* poseState = CreateEmpty<kha::vr::PoseState_obj>();
poseState->TimeInSeconds = nativeState.TimeInSeconds;
poseState->AngularAcceleration = GetVector3(nativeState.AngularAcceleration);
poseState->AngularVelocity = GetVector3(nativeState.AngularVelocity);
poseState->LinearAcceleration = GetVector3(nativeState.LinearAcceleration);
poseState->LinearVelocity = GetVector3(nativeState.LinearVelocity);
poseState->Pose = GetPose(nativeState.Pose);
return poseState;
}
kha::vr::SensorState_obj* GetPredictedSensorState(const float time) {
kha::vr::SensorState_obj* state = dynamic_cast<kha::vr::SensorState_obj*>(kha::vr::SensorState_obj::__CreateEmpty().mPtr);
ovrSensorState nativeState = ovr_GetPredictedSensorState(ovr, time);
state->Temperature = nativeState.Temperature;
state->Status = nativeState.Status;
state->Predicted = GetPoseState(nativeState.Predicted);
state->Recorded = GetPoseState(nativeState.Recorded);
return state;
}
kha::vr::SensorState_obj* GetSensorState() {
// 0.0 gets the last reading
return GetPredictedSensorState(0.0f);
}
ovrPosef GetPose(kha::vr::Pose_obj* pose) {
ovrPosef result;
result.Orientation = GetQuaternion(pose->Orientation.mPtr);
result.Position = GetVector3(pose->Position.mPtr);
return result;
}
ovrPoseStatef GetPoseState(kha::vr::PoseState_obj* poseState) {
ovrPoseStatef result;
result.TimeInSeconds = poseState->TimeInSeconds;
result.AngularAcceleration = GetVector3(poseState->AngularAcceleration.mPtr);
result.AngularVelocity = GetVector3(poseState->AngularVelocity.mPtr);
result.LinearAcceleration = GetVector3(poseState->LinearAcceleration.mPtr);
result.LinearVelocity = GetVector3(poseState->LinearVelocity.mPtr);
result.Pose = GetPose(poseState->Pose.mPtr);
return result;
}
ovrTimeWarpImage GetTimeWarpImage(kha::vr::TimeWarpImage_obj* image) {
ovrTimeWarpImage result;
if (image == 0) {
result.TexId = 0;
return result;
}
if (image->Image->renderTarget != 0) {
result.TexId = image->Image->renderTarget->_texture;
} else {
result.TexId = image->Image->texture->texture;
}
result.Pose = GetPoseState(image->Pose.mPtr);
result.TexCoordsFromTanAngles = GetMatrix(image->TexCoordsFromTanAngles.mPtr);
result.TexCoordsFromTanAngles = //TanAngleMatrixFromProjection(&result.TexCoordsFromTanAngles);
TanAngleMatrixFromFov(90.0f);
return result;
}
bool AreDifferent(ovrMatrix4f& lhs, ovrMatrix4f& rhs) {
for (int x = 0; x < 4; x++) {
for (int y = 0; y < 4; y++) {
if (Kore::abs(lhs.M[x][y] - rhs.M[x][y]) > 0.1f) return true;
}
}
return false;
}
void WarpSwap(kha::vr::TimeWarpParms_obj* parms) {
ovrTimeWarpParms nativeParms = InitTimeWarpParms();
const double predictedTime = ovr_GetPredictedDisplayTime( ovr, 1, 1 );
const ovrSensorState state = ovr_GetPredictedSensorState( ovr, predictedTime );
ovrTimeWarpImage leftImage = GetTimeWarpImage(parms->LeftImage.mPtr);
ovrTimeWarpImage rightImage = GetTimeWarpImage(parms->RightImage.mPtr);
ovrTimeWarpImage leftOverlay = GetTimeWarpImage(parms->LeftOverlay.mPtr);
ovrTimeWarpImage rightOverlay = GetTimeWarpImage(parms->RightOverlay.mPtr);
leftImage.Pose = state.Predicted;
leftOverlay.TexId = 0;
rightOverlay.TexId = 0;
//nativeParms->WarpProgram = WP_SIMPLE;
nativeParms.Images[0][0] = leftImage;
nativeParms.Images[0][1] = leftOverlay;
nativeParms.Images[1][0] = rightImage;
nativeParms.Images[1][1] = rightOverlay;
// nativeParms->WarpProgram = WP_OVERLAY_PLANE;
/*ovrMatrix4f comparison = OVR::Matrix4f::Translation(1.0f, 2.0f, 3.0f);
if (AreDifferent(comparison, nativeParms->Images[0][0].TexCoordsFromTanAngles)) {
Kore::log(Kore::Info, "Matrices are different!");
} else {
Kore::log(Kore::Info, "Matrices are identical");
} */
//ovrTimeWarpParms testParms = InitTimeWarpParms( WARP_INIT_LOADING_ICON);
ovr_WarpSwap(ovr, &nativeParms);
// TODO: What about memory - who deletes What?
}
double GetTimeInSeconds() {
return ovr_GetTimeInSeconds();
}
#endif
}
//
}
#endif

View File

@ -0,0 +1,55 @@
#pragma once
#ifdef ANDROID
#include <jni.h>
#endif
#include <kha/vr/SensorState.h>
#include <kha/vr/TimeWarpParms.h>
#include <kha/Image.h>
#include <kha/math/Quaternion.h>
namespace Kore {
namespace VrInterface {
#ifdef ANDROID
// Save the JVM. Must be called before Initialize().
// TODO: Can this be handled better?
void SetJVM(JavaVM *jvm);
#endif
#ifdef VR_CARDBOARD
void DistortionBefore();
void DistortionAfter();
void DistortTexture(kha::Image_obj *image);
void updateGaze(float x, float y, float z, float w);
kha::math::Quaternion_obj *getGaze();
#endif
#ifdef VR_GEAR_VR
// Calls ovr_enterVrMode
void Initialize();
void WarpSwapBlack();
void WarpSwapLoadingIcon();
kha::vr::SensorState_obj *GetSensorState();
kha::vr::SensorState_obj *GetPredictedSensorState(float time);
double GetTimeInSeconds();
void WarpSwap(kha::vr::TimeWarpParms_obj *parms);
#endif
}
}

View File

@ -0,0 +1,5 @@
#include "audio.c.h"
#include "display.c.h"
#include "system.c.h"
#include "window.c.h"
#include "video.c.h"

View File

@ -0,0 +1,133 @@
#include <kinc/audio2/audio.h>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include <stdlib.h>
#include <string.h>
static kinc_a2_buffer_t a2_buffer;
static SLObjectItf engineObject;
static SLEngineItf engineEngine;
static SLObjectItf outputMixObject;
static SLObjectItf bqPlayerObject;
static SLPlayItf bqPlayerPlay = NULL;
static SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
#define AUDIO_BUFFER_SIZE 1 * 1024
static int16_t tempBuffer[AUDIO_BUFFER_SIZE];
static void copySample(void *buffer) {
float left_value = *(float *)&a2_buffer.channels[0][a2_buffer.read_location];
float right_value = *(float *)&a2_buffer.channels[1][a2_buffer.read_location];
a2_buffer.read_location += 1;
if (a2_buffer.read_location >= a2_buffer.data_size) {
a2_buffer.read_location = 0;
}
((int16_t *)buffer)[0] = (int16_t)(left_value * 32767);
((int16_t *)buffer)[1] = (int16_t)(right_value * 32767);
}
static void bqPlayerCallback(SLAndroidSimpleBufferQueueItf caller, void *context) {
if (kinc_a2_internal_callback(&a2_buffer, AUDIO_BUFFER_SIZE / 2)) {
for (int i = 0; i < AUDIO_BUFFER_SIZE; i += 2) {
copySample(&tempBuffer[i]);
}
}
else {
memset(tempBuffer, 0, sizeof(tempBuffer));
}
SLresult result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, tempBuffer, AUDIO_BUFFER_SIZE * 2);
}
static bool initialized = false;
void kinc_a2_init() {
if (initialized) {
return;
}
kinc_a2_internal_init();
initialized = true;
a2_buffer.read_location = 0;
a2_buffer.write_location = 0;
a2_buffer.data_size = 128 * 1024;
a2_buffer.channel_count = 2;
a2_buffer.channels[0] = (float*)malloc(a2_buffer.data_size * sizeof(float));
a2_buffer.channels[1] = (float*)malloc(a2_buffer.data_size * sizeof(float));
SLresult result;
result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
const SLInterfaceID ids[] = {SL_IID_VOLUME};
const SLboolean req[] = {SL_BOOLEAN_FALSE};
result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
SLDataFormat_PCM format_pcm = {SL_DATAFORMAT_PCM, 2,
SL_SAMPLINGRATE_44_1, SL_PCMSAMPLEFORMAT_FIXED_16,
SL_PCMSAMPLEFORMAT_FIXED_16, SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,
SL_BYTEORDER_LITTLEENDIAN};
SLDataSource audioSrc = {&loc_bufq, &format_pcm};
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&loc_outmix, NULL};
const SLInterfaceID ids1[] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE};
const SLboolean req1[] = {SL_BOOLEAN_TRUE};
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &(bqPlayerObject), &audioSrc, &audioSnk, 1, ids1, req1);
result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &(bqPlayerPlay));
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_ANDROIDSIMPLEBUFFERQUEUE, &(bqPlayerBufferQueue));
result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
memset(tempBuffer, 0, sizeof(tempBuffer));
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, tempBuffer, AUDIO_BUFFER_SIZE * 2);
}
void pauseAudio() {
if (bqPlayerPlay == NULL) {
return;
}
SLresult result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PAUSED);
}
void resumeAudio() {
if (bqPlayerPlay == NULL) {
return;
}
SLresult result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
}
void kinc_a2_update() {}
void kinc_a2_shutdown() {
if (bqPlayerObject != NULL) {
(*bqPlayerObject)->Destroy(bqPlayerObject);
bqPlayerObject = NULL;
bqPlayerPlay = NULL;
bqPlayerBufferQueue = NULL;
}
if (outputMixObject != NULL) {
(*outputMixObject)->Destroy(outputMixObject);
outputMixObject = NULL;
}
if (engineObject != NULL) {
(*engineObject)->Destroy(engineObject);
engineObject = NULL;
engineEngine = NULL;
}
}
uint32_t kinc_a2_samples_per_second(void) {
return 44100;
}

View File

@ -0,0 +1,106 @@
#include <kinc/backend/Android.h>
#include <kinc/display.h>
#include <kinc/log.h>
typedef struct {
bool available;
int x;
int y;
int width;
int height;
bool primary;
int number;
} kinc_display_t;
static kinc_display_t display;
int kinc_count_displays(void) {
return 1;
}
int kinc_primary_display(void) {
return 0;
}
static int width() {
JNIEnv *env;
JavaVM *vm = kinc_android_get_activity()->vm;
(*vm)->AttachCurrentThread(vm, &env, NULL);
jclass koreActivityClass = kinc_android_find_class(env, "tech.kinc.KincActivity");
jmethodID koreActivityGetScreenDpi = (*env)->GetStaticMethodID(env, koreActivityClass, "getDisplayWidth", "()I");
int width = (*env)->CallStaticIntMethod(env, koreActivityClass, koreActivityGetScreenDpi);
(*vm)->DetachCurrentThread(vm);
return width;
}
static int height() {
JNIEnv *env;
JavaVM *vm = kinc_android_get_activity()->vm;
(*vm)->AttachCurrentThread(vm, &env, NULL);
jclass koreActivityClass = kinc_android_find_class(env, "tech.kinc.KincActivity");
jmethodID koreActivityGetScreenDpi = (*env)->GetStaticMethodID(env, koreActivityClass, "getDisplayHeight", "()I");
int height = (*env)->CallStaticIntMethod(env, koreActivityClass, koreActivityGetScreenDpi);
(*vm)->DetachCurrentThread(vm);
return height;
}
static int pixelsPerInch() {
JNIEnv *env;
JavaVM *vm = kinc_android_get_activity()->vm;
(*vm)->AttachCurrentThread(vm, &env, NULL);
jclass koreActivityClass = kinc_android_find_class(env, "tech.kinc.KincActivity");
jmethodID koreActivityGetScreenDpi = (*env)->GetStaticMethodID(env, koreActivityClass, "getScreenDpi", "()I");
int dpi = (*env)->CallStaticIntMethod(env, koreActivityClass, koreActivityGetScreenDpi);
(*vm)->DetachCurrentThread(vm);
return dpi;
}
static int refreshRate() {
JNIEnv *env;
JavaVM *vm = kinc_android_get_activity()->vm;
(*vm)->AttachCurrentThread(vm, &env, NULL);
jclass koreActivityClass = kinc_android_find_class(env, "tech.kinc.KincActivity");
jmethodID koreActivityGetScreenDpi = (*env)->GetStaticMethodID(env, koreActivityClass, "getRefreshRate", "()I");
int dpi = (*env)->CallStaticIntMethod(env, koreActivityClass, koreActivityGetScreenDpi);
(*vm)->DetachCurrentThread(vm);
return dpi;
}
void kinc_display_init() {}
kinc_display_mode_t kinc_display_available_mode(int display_index, int mode_index) {
kinc_display_mode_t mode;
mode.x = 0;
mode.y = 0;
mode.width = width();
mode.height = height();
mode.frequency = refreshRate();
mode.bits_per_pixel = 32;
mode.pixels_per_inch = pixelsPerInch();
return mode;
}
int kinc_display_count_available_modes(int display_index) {
return 1;
}
kinc_display_mode_t kinc_display_current_mode(int display) {
kinc_display_mode_t mode;
mode.x = 0;
mode.y = 0;
mode.width = width();
mode.height = height();
mode.frequency = refreshRate();
mode.bits_per_pixel = 32;
mode.pixels_per_inch = pixelsPerInch();
return mode;
}
const char *kinc_display_name(int display) {
return "Display";
}
bool kinc_display_available(int display) {
return display == 0;
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,581 @@
#include <kinc/video.h>
#include <kinc/audio1/audio.h>
#include <kinc/graphics4/texture.h>
#include <kinc/io/filereader.h>
#include <kinc/log.h>
#include <kinc/system.h>
#include <android_native_app_glue.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
#include <OMXAL/OpenMAXAL.h>
#include <OMXAL/OpenMAXAL_Android.h>
#endif
#include <assert.h>
#include <jni.h>
#include <kinc/backend/Android.h>
#include <pthread.h>
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
#include <android/asset_manager.h>
#include <android/asset_manager_jni.h>
#include <android/native_window_jni.h>
#endif
void kinc_video_sound_stream_impl_init(kinc_internal_video_sound_stream_t *stream, int channel_count, int frequency) {
stream->bufferSize = 1;
stream->bufferReadPosition = 0;
stream->bufferWritePosition = 0;
stream->read = 0;
stream->written = 0;
}
void kinc_video_sound_stream_impl_destroy(kinc_internal_video_sound_stream_t *stream) {}
void kinc_video_sound_stream_impl_insert_data(kinc_internal_video_sound_stream_t *stream, float *data, int sample_count) {}
static float samples[2] = {0};
float *kinc_internal_video_sound_stream_next_frame(kinc_internal_video_sound_stream_t *stream) {
return samples;
}
bool kinc_internal_video_sound_stream_ended(kinc_internal_video_sound_stream_t *stream) {
return false;
}
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
#define videosCount 10
static kinc_video_t *videos[videosCount] = {NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL};
#define NB_MAXAL_INTERFACES 3 // XAAndroidBufferQueueItf, XAStreamInformationItf and XAPlayItf
#define NB_BUFFERS 8
#define MPEG2_TS_PACKET_SIZE 188
#define PACKETS_PER_BUFFER 10
#define BUFFER_SIZE (PACKETS_PER_BUFFER * MPEG2_TS_PACKET_SIZE)
static const int kEosBufferCntxt = 1980; // a magic value we can compare against
typedef struct kinc_android_video {
XAObjectItf engineObject;
XAEngineItf engineEngine;
XAObjectItf outputMixObject;
const char *path;
AAsset *file;
XAObjectItf playerObj;
XAPlayItf playerPlayItf;
XAAndroidBufferQueueItf playerBQItf;
XAStreamInformationItf playerStreamInfoItf;
XAVolumeItf playerVolItf;
char dataCache[BUFFER_SIZE * NB_BUFFERS];
ANativeWindow *theNativeWindow;
jboolean reachedEof;
pthread_mutex_t mutex;
pthread_cond_t cond;
bool discontinuity;
} kinc_android_video_t;
void kinc_android_video_init(kinc_android_video_t *video) {
video->engineObject = NULL;
video->engineEngine = NULL;
video->outputMixObject = NULL;
video->file = NULL;
video->playerObj = NULL;
video->playerPlayItf = NULL;
video->playerBQItf = NULL;
video->playerStreamInfoItf = NULL;
video->playerVolItf = NULL;
video->theNativeWindow = NULL;
video->reachedEof = JNI_FALSE;
memset(&video->mutex, 0, sizeof(video->mutex)); // mutex = PTHREAD_MUTEX_INITIALIZER; // simple assign stopped working in Android Studio 2.2
memset(&video->cond, 0, sizeof(video->cond)); // cond = PTHREAD_COND_INITIALIZER; // simple assign stopped working in Android Studio 2.2
video->discontinuity = false;
}
bool kinc_android_video_enqueue_initial_buffers(kinc_android_video_t *video, bool discontinuity) {
// Fill our cache.
// We want to read whole packets (integral multiples of MPEG2_TS_PACKET_SIZE).
// fread returns units of "elements" not bytes, so we ask for 1-byte elements
// and then check that the number of elements is a multiple of the packet size.
//
size_t bytesRead;
// bytesRead = fread(dataCache, 1, BUFFER_SIZE * NB_BUFFERS, file);
bytesRead = AAsset_read(video->file, video->dataCache, BUFFER_SIZE * NB_BUFFERS);
if (bytesRead <= 0) {
// could be premature EOF or I/O error
return false;
}
if ((bytesRead % MPEG2_TS_PACKET_SIZE) != 0) {
kinc_log(KINC_LOG_LEVEL_INFO, "Dropping last packet because it is not whole");
}
size_t packetsRead = bytesRead / MPEG2_TS_PACKET_SIZE;
kinc_log(KINC_LOG_LEVEL_INFO, "Initially queueing %zu packets", packetsRead);
// Enqueue the content of our cache before starting to play,
// we don't want to starve the player
size_t i;
for (i = 0; i < NB_BUFFERS && packetsRead > 0; i++) {
// compute size of this buffer
size_t packetsThisBuffer = packetsRead;
if (packetsThisBuffer > PACKETS_PER_BUFFER) {
packetsThisBuffer = PACKETS_PER_BUFFER;
}
size_t bufferSize = packetsThisBuffer * MPEG2_TS_PACKET_SIZE;
XAresult res;
if (discontinuity) {
// signal discontinuity
XAAndroidBufferItem items[1];
items[0].itemKey = XA_ANDROID_ITEMKEY_DISCONTINUITY;
items[0].itemSize = 0;
// DISCONTINUITY message has no parameters,
// so the total size of the message is the size of the key
// plus the size if itemSize, both XAuint32
res = (*video->playerBQItf)
->Enqueue(video->playerBQItf, NULL /*pBufferContext*/, video->dataCache + i * BUFFER_SIZE, bufferSize, items /*pMsg*/,
sizeof(XAuint32) * 2 /*msgLength*/);
discontinuity = JNI_FALSE;
}
else {
res = (*video->playerBQItf)->Enqueue(video->playerBQItf, NULL /*pBufferContext*/, video->dataCache + i * BUFFER_SIZE, bufferSize, NULL, 0);
}
assert(XA_RESULT_SUCCESS == res);
packetsRead -= packetsThisBuffer;
}
return true;
}
static XAresult AndroidBufferQueueCallback(XAAndroidBufferQueueItf caller, void *pCallbackContext, /* input */
void *pBufferContext, /* input */
void *pBufferData, /* input */
XAuint32 dataSize, /* input */
XAuint32 dataUsed, /* input */
const XAAndroidBufferItem *pItems, /* input */
XAuint32 itemsLength /* input */) {
kinc_android_video_t *self = (kinc_android_video_t *)pCallbackContext;
XAresult res;
int ok;
// pCallbackContext was specified as NULL at RegisterCallback and is unused here
// assert(NULL == pCallbackContext);
// note there is never any contention on this mutex unless a discontinuity request is active
ok = pthread_mutex_lock(&self->mutex);
assert(0 == ok);
// was a discontinuity requested?
if (self->discontinuity) {
// Note: can't rewind after EOS, which we send when reaching EOF
// (don't send EOS if you plan to play more content through the same player)
if (!self->reachedEof) {
// clear the buffer queue
res = (*self->playerBQItf)->Clear(self->playerBQItf);
assert(XA_RESULT_SUCCESS == res);
// rewind the data source so we are guaranteed to be at an appropriate point
// rewind(file);
AAsset_seek(self->file, 0, SEEK_SET);
// Enqueue the initial buffers, with a discontinuity indicator on first buffer
kinc_android_video_enqueue_initial_buffers(self, JNI_TRUE);
}
// acknowledge the discontinuity request
self->discontinuity = JNI_FALSE;
ok = pthread_cond_signal(&self->cond);
assert(0 == ok);
goto exit;
}
if ((pBufferData == NULL) && (pBufferContext != NULL)) {
const int processedCommand = *(int *)pBufferContext;
if (kEosBufferCntxt == processedCommand) {
kinc_log(KINC_LOG_LEVEL_INFO, "EOS was processed");
// our buffer with the EOS message has been consumed
assert(0 == dataSize);
goto exit;
}
}
// pBufferData is a pointer to a buffer that we previously Enqueued
assert((dataSize > 0) && ((dataSize % MPEG2_TS_PACKET_SIZE) == 0));
assert(self->dataCache <= (char *)pBufferData && (char *)pBufferData < &self->dataCache[BUFFER_SIZE * NB_BUFFERS]);
assert(0 == (((char *)pBufferData - self->dataCache) % BUFFER_SIZE));
// don't bother trying to read more data once we've hit EOF
if (self->reachedEof) {
goto exit;
}
size_t nbRead;
// note we do call fread from multiple threads, but never concurrently
size_t bytesRead;
// bytesRead = fread(pBufferData, 1, BUFFER_SIZE, file);
bytesRead = AAsset_read(self->file, pBufferData, BUFFER_SIZE);
if (bytesRead > 0) {
if ((bytesRead % MPEG2_TS_PACKET_SIZE) != 0) {
kinc_log(KINC_LOG_LEVEL_INFO, "Dropping last packet because it is not whole");
}
size_t packetsRead = bytesRead / MPEG2_TS_PACKET_SIZE;
size_t bufferSize = packetsRead * MPEG2_TS_PACKET_SIZE;
res = (*caller)->Enqueue(caller, NULL /*pBufferContext*/, pBufferData /*pData*/, bufferSize /*dataLength*/, NULL /*pMsg*/, 0 /*msgLength*/);
assert(XA_RESULT_SUCCESS == res);
}
else {
// EOF or I/O error, signal EOS
XAAndroidBufferItem msgEos[1];
msgEos[0].itemKey = XA_ANDROID_ITEMKEY_EOS;
msgEos[0].itemSize = 0;
// EOS message has no parameters, so the total size of the message is the size of the key
// plus the size if itemSize, both XAuint32
res = (*caller)->Enqueue(caller, (void *)&kEosBufferCntxt /*pBufferContext*/, NULL /*pData*/, 0 /*dataLength*/, msgEos /*pMsg*/,
sizeof(XAuint32) * 2 /*msgLength*/);
assert(XA_RESULT_SUCCESS == res);
self->reachedEof = JNI_TRUE;
}
exit:
ok = pthread_mutex_unlock(&self->mutex);
assert(0 == ok);
return XA_RESULT_SUCCESS;
}
static void StreamChangeCallback(XAStreamInformationItf caller, XAuint32 eventId, XAuint32 streamIndex, void *pEventData, void *pContext) {
kinc_log(KINC_LOG_LEVEL_INFO, "StreamChangeCallback called for stream %u", streamIndex);
kinc_android_video_t *self = (kinc_android_video_t *)pContext;
// pContext was specified as NULL at RegisterStreamChangeCallback and is unused here
// assert(NULL == pContext);
switch (eventId) {
case XA_STREAMCBEVENT_PROPERTYCHANGE: {
// From spec 1.0.1:
// "This event indicates that stream property change has occurred.
// The streamIndex parameter identifies the stream with the property change.
// The pEventData parameter for this event is not used and shall be ignored."
//
XAresult res;
XAuint32 domain;
res = (*caller)->QueryStreamType(caller, streamIndex, &domain);
assert(XA_RESULT_SUCCESS == res);
switch (domain) {
case XA_DOMAINTYPE_VIDEO: {
XAVideoStreamInformation videoInfo;
res = (*caller)->QueryStreamInformation(caller, streamIndex, &videoInfo);
assert(XA_RESULT_SUCCESS == res);
kinc_log(KINC_LOG_LEVEL_INFO, "Found video size %u x %u, codec ID=%u, frameRate=%u, bitRate=%u, duration=%u ms", videoInfo.width, videoInfo.height,
videoInfo.codecId, videoInfo.frameRate, videoInfo.bitRate, videoInfo.duration);
} break;
default:
kinc_log(KINC_LOG_LEVEL_ERROR, "Unexpected domain %u\n", domain);
break;
}
} break;
default:
kinc_log(KINC_LOG_LEVEL_ERROR, "Unexpected stream event ID %u\n", eventId);
break;
}
}
bool kinc_android_video_open(kinc_android_video_t *video, const char *filename) {
XAresult res;
// create engine
res = xaCreateEngine(&video->engineObject, 0, NULL, 0, NULL, NULL);
assert(XA_RESULT_SUCCESS == res);
// realize the engine
res = (*video->engineObject)->Realize(video->engineObject, XA_BOOLEAN_FALSE);
assert(XA_RESULT_SUCCESS == res);
// get the engine interface, which is needed in order to create other objects
res = (*video->engineObject)->GetInterface(video->engineObject, XA_IID_ENGINE, &video->engineEngine);
assert(XA_RESULT_SUCCESS == res);
// create output mix
res = (*video->engineEngine)->CreateOutputMix(video->engineEngine, &video->outputMixObject, 0, NULL, NULL);
assert(XA_RESULT_SUCCESS == res);
// realize the output mix
res = (*video->outputMixObject)->Realize(video->outputMixObject, XA_BOOLEAN_FALSE);
assert(XA_RESULT_SUCCESS == res);
// open the file to play
video->file = AAssetManager_open(kinc_android_get_asset_manager(), filename, AASSET_MODE_STREAMING);
if (video->file == NULL) {
kinc_log(KINC_LOG_LEVEL_INFO, "Could not find video file.");
return false;
}
// configure data source
XADataLocator_AndroidBufferQueue loc_abq = {XA_DATALOCATOR_ANDROIDBUFFERQUEUE, NB_BUFFERS};
XADataFormat_MIME format_mime = {XA_DATAFORMAT_MIME, XA_ANDROID_MIME_MP2TS, XA_CONTAINERTYPE_MPEG_TS};
XADataSource dataSrc = {&loc_abq, &format_mime};
// configure audio sink
XADataLocator_OutputMix loc_outmix = {XA_DATALOCATOR_OUTPUTMIX, video->outputMixObject};
XADataSink audioSnk = {&loc_outmix, NULL};
// configure image video sink
XADataLocator_NativeDisplay loc_nd = {
XA_DATALOCATOR_NATIVEDISPLAY, // locatorType
// the video sink must be an ANativeWindow created from a Surface or SurfaceTexture
(void *)video->theNativeWindow, // hWindow
// must be NULL
NULL // hDisplay
};
XADataSink imageVideoSink = {&loc_nd, NULL};
// declare interfaces to use
XAboolean required[NB_MAXAL_INTERFACES] = {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE};
XAInterfaceID iidArray[NB_MAXAL_INTERFACES] = {XA_IID_PLAY, XA_IID_ANDROIDBUFFERQUEUESOURCE, XA_IID_STREAMINFORMATION};
// create media player
res = (*video->engineEngine)
->CreateMediaPlayer(video->engineEngine, &video->playerObj, &dataSrc, NULL, &audioSnk, &imageVideoSink, NULL, NULL,
NB_MAXAL_INTERFACES /*XAuint32 numInterfaces*/, iidArray /*const XAInterfaceID *pInterfaceIds*/,
required /*const XAboolean *pInterfaceRequired*/);
assert(XA_RESULT_SUCCESS == res);
// realize the player
res = (*video->playerObj)->Realize(video->playerObj, XA_BOOLEAN_FALSE);
assert(XA_RESULT_SUCCESS == res);
// get the play interface
res = (*video->playerObj)->GetInterface(video->playerObj, XA_IID_PLAY, &video->playerPlayItf);
assert(XA_RESULT_SUCCESS == res);
// get the stream information interface (for video size)
res = (*video->playerObj)->GetInterface(video->playerObj, XA_IID_STREAMINFORMATION, &video->playerStreamInfoItf);
assert(XA_RESULT_SUCCESS == res);
// get the volume interface
res = (*video->playerObj)->GetInterface(video->playerObj, XA_IID_VOLUME, &video->playerVolItf);
assert(XA_RESULT_SUCCESS == res);
// get the Android buffer queue interface
res = (*video->playerObj)->GetInterface(video->playerObj, XA_IID_ANDROIDBUFFERQUEUESOURCE, &video->playerBQItf);
assert(XA_RESULT_SUCCESS == res);
// specify which events we want to be notified of
res = (*video->playerBQItf)->SetCallbackEventsMask(video->playerBQItf, XA_ANDROIDBUFFERQUEUEEVENT_PROCESSED);
assert(XA_RESULT_SUCCESS == res);
// register the callback from which OpenMAX AL can retrieve the data to play
res = (*video->playerBQItf)->RegisterCallback(video->playerBQItf, AndroidBufferQueueCallback, video);
assert(XA_RESULT_SUCCESS == res);
// we want to be notified of the video size once it's found, so we register a callback for that
res = (*video->playerStreamInfoItf)->RegisterStreamChangeCallback(video->playerStreamInfoItf, StreamChangeCallback, video);
assert(XA_RESULT_SUCCESS == res);
// enqueue the initial buffers
if (!kinc_android_video_enqueue_initial_buffers(video, false)) {
kinc_log(KINC_LOG_LEVEL_INFO, "Could not enqueue initial buffers for video decoding.");
return false;
}
// prepare the player
res = (*video->playerPlayItf)->SetPlayState(video->playerPlayItf, XA_PLAYSTATE_PAUSED);
assert(XA_RESULT_SUCCESS == res);
// set the volume
res = (*video->playerVolItf)->SetVolumeLevel(video->playerVolItf, 0);
assert(XA_RESULT_SUCCESS == res);
// start the playback
res = (*video->playerPlayItf)->SetPlayState(video->playerPlayItf, XA_PLAYSTATE_PLAYING);
assert(XA_RESULT_SUCCESS == res);
kinc_log(KINC_LOG_LEVEL_INFO, "Successfully loaded video.");
return true;
}
void kinc_android_video_shutdown(kinc_android_video_t *video) {
// destroy streaming media player object, and invalidate all associated interfaces
if (video->playerObj != NULL) {
(*video->playerObj)->Destroy(video->playerObj);
video->playerObj = NULL;
video->playerPlayItf = NULL;
video->playerBQItf = NULL;
video->playerStreamInfoItf = NULL;
video->playerVolItf = NULL;
}
// destroy output mix object, and invalidate all associated interfaces
if (video->outputMixObject != NULL) {
(*video->outputMixObject)->Destroy(video->outputMixObject);
video->outputMixObject = NULL;
}
// destroy engine object, and invalidate all associated interfaces
if (video->engineObject != NULL) {
(*video->engineObject)->Destroy(video->engineObject);
video->engineObject = NULL;
video->engineEngine = NULL;
}
// close the file
if (video->file != NULL) {
AAsset_close(video->file);
video->file = NULL;
}
// make sure we don't leak native windows
if (video->theNativeWindow != NULL) {
ANativeWindow_release(video->theNativeWindow);
video->theNativeWindow = NULL;
}
}
#endif
JNIEXPORT void JNICALL Java_tech_kinc_KincMoviePlayer_nativeCreate(JNIEnv *env, jobject jobj, jstring jpath, jobject surface, jint id) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
const char *path = (*env)->GetStringUTFChars(env, jpath, NULL);
kinc_android_video_t *av = malloc(sizeof *av);
kinc_android_video_init(av);
av->theNativeWindow = ANativeWindow_fromSurface(env, surface);
kinc_android_video_open(av, path);
for (int i = 0; i < 10; ++i) {
if (videos[i] != NULL && videos[i]->impl.id == id) {
videos[i]->impl.androidVideo = av;
break;
}
}
(*env)->ReleaseStringUTFChars(env, jpath, path);
#endif
}
void KoreAndroidVideoInit() {
JNIEnv *env;
(*kinc_android_get_activity()->vm)->AttachCurrentThread(kinc_android_get_activity()->vm, &env, NULL);
jclass clazz = kinc_android_find_class(env, "tech.kinc.KincMoviePlayer");
// String path, Surface surface, int id
JNINativeMethod methodTable[] = {{"nativeCreate", "(Ljava/lang/String;Landroid/view/Surface;I)V", (void *)Java_tech_kinc_KincMoviePlayer_nativeCreate}};
int methodTableSize = sizeof(methodTable) / sizeof(methodTable[0]);
int failure = (*env)->RegisterNatives(env, clazz, methodTable, methodTableSize);
if (failure != 0) {
kinc_log(KINC_LOG_LEVEL_WARNING, "Failed to register KincMoviePlayer.nativeCreate");
}
(*kinc_android_get_activity()->vm)->DetachCurrentThread(kinc_android_get_activity()->vm);
}
void kinc_video_init(kinc_video_t *video, const char *filename) {
video->impl.playing = false;
video->impl.sound = NULL;
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
kinc_log(KINC_LOG_LEVEL_INFO, "Opening video %s.", filename);
video->impl.myWidth = 1023;
video->impl.myHeight = 684;
video->impl.next = 0;
video->impl.audioTime = 0;
JNIEnv *env = NULL;
(*kinc_android_get_activity()->vm)->AttachCurrentThread(kinc_android_get_activity()->vm, &env, NULL);
jclass koreMoviePlayerClass = kinc_android_find_class(env, "tech.kinc.KincMoviePlayer");
jmethodID constructor = (*env)->GetMethodID(env, koreMoviePlayerClass, "<init>", "(Ljava/lang/String;)V");
jobject object = (*env)->NewObject(env, koreMoviePlayerClass, constructor, (*env)->NewStringUTF(env, filename));
jmethodID getId = (*env)->GetMethodID(env, koreMoviePlayerClass, "getId", "()I");
video->impl.id = (*env)->CallIntMethod(env, object, getId);
for (int i = 0; i < videosCount; ++i) {
if (videos[i] == NULL) {
videos[i] = video;
break;
}
}
jmethodID jinit = (*env)->GetMethodID(env, koreMoviePlayerClass, "init", "()V");
(*env)->CallVoidMethod(env, object, jinit);
jmethodID getTextureId = (*env)->GetMethodID(env, koreMoviePlayerClass, "getTextureId", "()I");
int texid = (*env)->CallIntMethod(env, object, getTextureId);
(*kinc_android_get_activity()->vm)->DetachCurrentThread(kinc_android_get_activity()->vm);
kinc_g4_texture_init_from_id(&video->impl.image, texid);
#endif
}
void kinc_video_destroy(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
kinc_video_stop(video);
kinc_android_video_t *av = (kinc_android_video_t *)video->impl.androidVideo;
kinc_android_video_shutdown(av);
for (int i = 0; i < 10; ++i) {
if (videos[i] == video) {
videos[i] = NULL;
break;
}
}
#endif
}
void kinc_video_play(kinc_video_t *video, bool loop) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
video->impl.playing = true;
video->impl.start = kinc_time();
#endif
}
void kinc_video_pause(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
video->impl.playing = false;
#endif
}
void kinc_video_stop(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
kinc_video_pause(video);
#endif
}
void kinc_video_update(kinc_video_t *video, double time) {}
int kinc_video_width(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
return video->impl.myWidth;
#else
return 512;
#endif
}
int kinc_video_height(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
return video->impl.myHeight;
#else
return 512;
#endif
}
kinc_g4_texture_t *kinc_video_current_image(kinc_video_t *video) {
#if KINC_ANDROID_API >= 15 && !defined(KINC_VULKAN)
return &video->impl.image;
#else
return NULL;
#endif
}
double kinc_video_duration(kinc_video_t *video) {
return 0.0;
}
double kinc_video_position(kinc_video_t *video) {
return 0.0;
}
bool kinc_video_finished(kinc_video_t *video) {
return false;
}
bool kinc_video_paused(kinc_video_t *video) {
return !video->impl.playing;
}

View File

@ -0,0 +1,49 @@
#pragma once
#include <kinc/graphics4/texture.h>
#ifdef __cplusplus
extern "C" {
#endif
typedef struct {
void *assetReader;
void *videoTrackOutput;
void *audioTrackOutput;
double start;
double next;
// double audioTime;
unsigned long long audioTime;
bool playing;
void *sound;
void *androidVideo;
int id;
kinc_g4_texture_t image;
double lastTime;
int myWidth;
int myHeight;
} kinc_video_impl_t;
typedef struct kinc_internal_video_sound_stream {
void *audioTrackOutput;
float *buffer;
int bufferSize;
int bufferWritePosition;
int bufferReadPosition;
uint64_t read;
uint64_t written;
} kinc_internal_video_sound_stream_t;
void kinc_internal_video_sound_stream_init(kinc_internal_video_sound_stream_t *stream, int channel_count, int frequency);
void kinc_internal_video_sound_stream_destroy(kinc_internal_video_sound_stream_t *stream);
void kinc_internal_video_sound_stream_insert_data(kinc_internal_video_sound_stream_t *stream, float *data, int sample_count);
float *kinc_internal_video_sound_stream_next_frame(kinc_internal_video_sound_stream_t *stream);
bool kinc_internal_video_sound_stream_ended(kinc_internal_video_sound_stream_t *stream);
#ifdef __cplusplus
}
#endif

View File

@ -0,0 +1,79 @@
#include <kinc/display.h>
#include <kinc/graphics4/graphics.h>
#include <kinc/window.h>
static void (*resizeCallback)(int x, int y, void *data) = NULL;
static void *resizeCallbackData = NULL;
int kinc_count_windows(void) {
return 1;
}
int kinc_window_x(int window_index) {
return 0;
}
int kinc_window_y(int window_index) {
return 0;
}
int kinc_android_width();
int kinc_window_width(int window_index) {
return kinc_android_width();
}
int kinc_android_height();
int kinc_window_height(int window_index) {
return kinc_android_height();
}
void kinc_window_resize(int window_index, int width, int height) {}
void kinc_window_move(int window_index, int x, int y) {}
void kinc_internal_change_framebuffer(int window, struct kinc_framebuffer_options *frame);
void kinc_window_change_framebuffer(int window_index, kinc_framebuffer_options_t *frame) {
kinc_internal_change_framebuffer(0, frame);
}
void kinc_window_change_features(int window_index, int features) {}
void kinc_window_change_mode(int window_index, kinc_window_mode_t mode) {}
void kinc_window_destroy(int window_index) {}
void kinc_window_show(int window_index) {}
void kinc_window_hide(int window_index) {}
void kinc_window_set_title(int window_index, const char *title) {}
int kinc_window_create(kinc_window_options_t *win, kinc_framebuffer_options_t *frame) {
return 0;
}
void kinc_window_set_resize_callback(int window_index, void (*callback)(int x, int y, void *data), void *data) {
resizeCallback = callback;
resizeCallbackData = data;
}
void kinc_internal_call_resize_callback(int window_index, int width, int height) {
if (resizeCallback != NULL) {
resizeCallback(width, height, resizeCallbackData);
}
}
void kinc_window_set_ppi_changed_callback(int window_index, void (*callback)(int ppi, void *data), void *data) {}
void kinc_window_set_close_callback(int window, bool (*callback)(void *), void *data) {}
kinc_window_mode_t kinc_window_get_mode(int window_index) {
return KINC_WINDOW_MODE_FULLSCREEN;
}
int kinc_window_display(int window) {
return 0;
}

View File

@ -0,0 +1 @@
#pragma once