forked from LeenkxTeam/LNXSDK
Update Files
This commit is contained in:
@ -0,0 +1,4 @@
|
||||
#include "http.m.h"
|
||||
#include "system.m.h"
|
||||
#include "thread.m.h"
|
||||
#include "video.m.h"
|
53
Kha/Kinc/Backends/System/Apple/Sources/kinc/backend/http.m.h
Normal file
53
Kha/Kinc/Backends/System/Apple/Sources/kinc/backend/http.m.h
Normal file
@ -0,0 +1,53 @@
|
||||
#include <kinc/network/http.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
void kinc_http_request(const char *url, const char *path, const char *data, int port, bool secure, int method, const char *header,
|
||||
kinc_http_callback_t callback, void *callbackdata) {
|
||||
NSString *urlstring = secure ? @"https://" : @"http://";
|
||||
urlstring = [urlstring stringByAppendingString:[NSString stringWithUTF8String:url]];
|
||||
urlstring = [urlstring stringByAppendingString:@":"];
|
||||
urlstring = [urlstring stringByAppendingString:[[NSNumber numberWithInt:port] stringValue]];
|
||||
urlstring = [urlstring stringByAppendingString:@"/"];
|
||||
urlstring = [urlstring stringByAppendingString:[NSString stringWithUTF8String:path]];
|
||||
|
||||
NSURL *aUrl = [NSURL URLWithString:urlstring];
|
||||
|
||||
NSURLSessionConfiguration *sessionConfiguration = [NSURLSessionConfiguration defaultSessionConfiguration];
|
||||
sessionConfiguration.HTTPAdditionalHeaders = @{@"Content-Type" : @"application/json"};
|
||||
NSURLSession *session = [NSURLSession sessionWithConfiguration:sessionConfiguration];
|
||||
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:aUrl];
|
||||
if (data != 0) {
|
||||
// printf("Sending %s\n\n", data);
|
||||
NSString *datastring = [NSString stringWithUTF8String:data];
|
||||
request.HTTPBody = [datastring dataUsingEncoding:NSUTF8StringEncoding];
|
||||
}
|
||||
|
||||
switch (method) {
|
||||
case KINC_HTTP_GET:
|
||||
request.HTTPMethod = @"GET";
|
||||
break;
|
||||
case KINC_HTTP_POST:
|
||||
request.HTTPMethod = @"POST";
|
||||
break;
|
||||
case KINC_HTTP_PUT:
|
||||
request.HTTPMethod = @"PUT";
|
||||
break;
|
||||
case KINC_HTTP_DELETE:
|
||||
request.HTTPMethod = @"DELETE";
|
||||
break;
|
||||
}
|
||||
|
||||
NSURLSessionDataTask *dataTask = [session dataTaskWithRequest:request
|
||||
completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
|
||||
NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response;
|
||||
int statusCode = (int)[httpResponse statusCode];
|
||||
|
||||
NSMutableData *responseData = [[NSMutableData alloc] init];
|
||||
[responseData appendData:data];
|
||||
[responseData appendBytes:"\0" length:1];
|
||||
|
||||
callback(error == nil ? 0 : 1, statusCode, (const char *)[responseData bytes], callbackdata);
|
||||
}];
|
||||
[dataTask resume];
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
#include <kinc/system.h>
|
||||
|
||||
int kinc_hardware_threads(void) {
|
||||
return (int)[[NSProcessInfo processInfo] processorCount];
|
||||
}
|
||||
|
||||
#ifdef KINC_APPLE_SOC
|
||||
|
||||
int kinc_cpu_cores(void) {
|
||||
return kinc_hardware_threads();
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
#include <sys/sysctl.h>
|
||||
|
||||
int kinc_cpu_cores(void) {
|
||||
uint32_t proper_cpu_count = 1;
|
||||
size_t count_length = sizeof(count_length);
|
||||
sysctlbyname("hw.physicalcpu", &proper_cpu_count, &count_length, 0, 0);
|
||||
return (int)proper_cpu_count;
|
||||
}
|
||||
|
||||
#endif
|
@ -0,0 +1,50 @@
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
#include <Foundation/Foundation.h>
|
||||
|
||||
#include <kinc/threads/mutex.h>
|
||||
#include <kinc/threads/thread.h>
|
||||
|
||||
#include <pthread.h>
|
||||
#include <stdio.h>
|
||||
#include <wchar.h>
|
||||
|
||||
static void *ThreadProc(void *arg) {
|
||||
@autoreleasepool {
|
||||
kinc_thread_t *t = (kinc_thread_t *)arg;
|
||||
t->impl.thread(t->impl.param);
|
||||
pthread_exit(NULL);
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void kinc_thread_init(kinc_thread_t *t, void (*thread)(void *param), void *param) {
|
||||
t->impl.param = param;
|
||||
t->impl.thread = thread;
|
||||
pthread_attr_t attr;
|
||||
pthread_attr_init(&attr);
|
||||
// pthread_attr_setstacksize(&attr, 1024 * 64);
|
||||
struct sched_param sp;
|
||||
memset(&sp, 0, sizeof(sp));
|
||||
sp.sched_priority = 0;
|
||||
pthread_attr_setschedparam(&attr, &sp);
|
||||
pthread_create(&t->impl.pthread, &attr, &ThreadProc, t);
|
||||
// Kt::affirmD(ret == 0);
|
||||
pthread_attr_destroy(&attr);
|
||||
}
|
||||
|
||||
void kinc_thread_wait_and_destroy(kinc_thread_t *thread) {
|
||||
int ret;
|
||||
do {
|
||||
ret = pthread_join(thread->impl.pthread, NULL);
|
||||
} while (ret != 0);
|
||||
}
|
||||
|
||||
bool kinc_thread_try_to_destroy(kinc_thread_t *thread) {
|
||||
return pthread_join(thread->impl.pthread, NULL) == 0;
|
||||
}
|
||||
|
||||
void kinc_threads_init(void) {}
|
||||
|
||||
void kinc_threads_quit(void) {}
|
56
Kha/Kinc/Backends/System/Apple/Sources/kinc/backend/video.h
Normal file
56
Kha/Kinc/Backends/System/Apple/Sources/kinc/backend/video.h
Normal file
@ -0,0 +1,56 @@
|
||||
#pragma once
|
||||
|
||||
#include <objc/runtime.h>
|
||||
|
||||
#include <kinc/graphics4/texture.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
typedef struct {
|
||||
double start;
|
||||
double videoStart;
|
||||
double next;
|
||||
// double audioTime;
|
||||
unsigned long long audioTime;
|
||||
bool playing;
|
||||
bool loop;
|
||||
void *sound;
|
||||
bool image_initialized;
|
||||
kinc_g4_texture_t image;
|
||||
double lastTime;
|
||||
float duration;
|
||||
bool finished;
|
||||
int myWidth;
|
||||
int myHeight;
|
||||
|
||||
id videoAsset;
|
||||
id assetReader;
|
||||
id videoTrackOutput;
|
||||
id audioTrackOutput;
|
||||
id url;
|
||||
} kinc_video_impl_t;
|
||||
|
||||
typedef struct kinc_internal_video_sound_stream {
|
||||
float *buffer;
|
||||
int bufferSize;
|
||||
int bufferWritePosition;
|
||||
int bufferReadPosition;
|
||||
uint64_t read;
|
||||
uint64_t written;
|
||||
} kinc_internal_video_sound_stream_t;
|
||||
|
||||
void kinc_internal_video_sound_stream_init(kinc_internal_video_sound_stream_t *stream, int channel_count, int frequency);
|
||||
|
||||
void kinc_internal_video_sound_stream_destroy(kinc_internal_video_sound_stream_t *stream);
|
||||
|
||||
void kinc_internal_video_sound_stream_insert_data(kinc_internal_video_sound_stream_t *stream, float *data, int sample_count);
|
||||
|
||||
float *kinc_internal_video_sound_stream_next_frame(kinc_internal_video_sound_stream_t *stream);
|
||||
|
||||
bool kinc_internal_video_sound_stream_ended(kinc_internal_video_sound_stream_t *stream);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
311
Kha/Kinc/Backends/System/Apple/Sources/kinc/backend/video.m.h
Normal file
311
Kha/Kinc/Backends/System/Apple/Sources/kinc/backend/video.m.h
Normal file
@ -0,0 +1,311 @@
|
||||
#include <kinc/video.h>
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#include <kinc/audio1/audio.h>
|
||||
#include <kinc/graphics4/texture.h>
|
||||
#include <kinc/io/filereader.h>
|
||||
#include <kinc/log.h>
|
||||
#include <kinc/system.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
extern const char *iphonegetresourcepath(void);
|
||||
extern const char *macgetresourcepath(void);
|
||||
|
||||
void kinc_internal_video_sound_stream_init(kinc_internal_video_sound_stream_t *stream, int channel_count, int frequency) {
|
||||
stream->bufferSize = 1024 * 100;
|
||||
stream->bufferReadPosition = 0;
|
||||
stream->bufferWritePosition = 0;
|
||||
stream->read = 0;
|
||||
stream->written = 0;
|
||||
stream->buffer = (float *)malloc(stream->bufferSize * sizeof(float));
|
||||
}
|
||||
|
||||
void kinc_internal_video_sound_stream_destroy(kinc_internal_video_sound_stream_t *stream) {
|
||||
free(stream->buffer);
|
||||
}
|
||||
|
||||
void kinc_internal_video_sound_stream_insert_data(kinc_internal_video_sound_stream_t *stream, float *data, int sample_count) {
|
||||
for (int i = 0; i < sample_count; ++i) {
|
||||
float value = data[i]; // / 32767.0;
|
||||
stream->buffer[stream->bufferWritePosition++] = value;
|
||||
++stream->written;
|
||||
if (stream->bufferWritePosition >= stream->bufferSize) {
|
||||
stream->bufferWritePosition = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static float samples[2] = {0};
|
||||
|
||||
float *kinc_internal_video_sound_stream_next_frame(kinc_internal_video_sound_stream_t *stream) {
|
||||
++stream->read;
|
||||
if (stream->written <= stream->read) {
|
||||
kinc_log(KINC_LOG_LEVEL_WARNING, "Out of audio\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (stream->bufferReadPosition >= stream->bufferSize) {
|
||||
stream->bufferReadPosition = 0;
|
||||
kinc_log(KINC_LOG_LEVEL_INFO, "buffer read back - %i\n", (int)(stream->written - stream->read));
|
||||
}
|
||||
samples[0] = stream->buffer[stream->bufferReadPosition++];
|
||||
|
||||
if (stream->bufferReadPosition >= stream->bufferSize) {
|
||||
stream->bufferReadPosition = 0;
|
||||
kinc_log(KINC_LOG_LEVEL_INFO, "buffer read back - %i\n", (int)(stream->written - stream->read));
|
||||
}
|
||||
samples[1] = stream->buffer[stream->bufferReadPosition++];
|
||||
|
||||
return samples;
|
||||
}
|
||||
|
||||
bool kinc_internal_video_sound_stream_ended(kinc_internal_video_sound_stream_t *stream) {
|
||||
return false;
|
||||
}
|
||||
|
||||
static void load(kinc_video_t *video, double startTime) {
|
||||
video->impl.videoStart = startTime;
|
||||
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:video->impl.url options:nil];
|
||||
video->impl.videoAsset = asset;
|
||||
|
||||
video->impl.duration = [asset duration].value / [asset duration].timescale;
|
||||
|
||||
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
|
||||
NSDictionary *videoOutputSettings =
|
||||
[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
|
||||
AVAssetReaderTrackOutput *videoOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:videoOutputSettings];
|
||||
[videoOutput setSupportsRandomAccess:YES];
|
||||
|
||||
bool hasAudio = [[asset tracksWithMediaType:AVMediaTypeAudio] count] > 0;
|
||||
AVAssetReaderAudioMixOutput *audioOutput = NULL;
|
||||
if (hasAudio) {
|
||||
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
|
||||
NSDictionary *audioOutputSettings = [NSDictionary
|
||||
dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, [NSNumber numberWithFloat:44100.0], AVSampleRateKey,
|
||||
[NSNumber numberWithInt:32], AVLinearPCMBitDepthKey, [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
|
||||
[NSNumber numberWithBool:YES], AVLinearPCMIsFloatKey, [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey, nil];
|
||||
audioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:@[ audioTrack ] audioSettings:audioOutputSettings];
|
||||
[audioOutput setSupportsRandomAccess:YES];
|
||||
}
|
||||
|
||||
AVAssetReader *reader = [AVAssetReader assetReaderWithAsset:asset error:nil];
|
||||
|
||||
if (startTime > 0) {
|
||||
CMTimeRange timeRange = CMTimeRangeMake(CMTimeMake(startTime * 1000, 1000), kCMTimePositiveInfinity);
|
||||
reader.timeRange = timeRange;
|
||||
}
|
||||
|
||||
[reader addOutput:videoOutput];
|
||||
if (hasAudio) {
|
||||
[reader addOutput:audioOutput];
|
||||
}
|
||||
|
||||
video->impl.assetReader = reader;
|
||||
video->impl.videoTrackOutput = videoOutput;
|
||||
if (hasAudio) {
|
||||
video->impl.audioTrackOutput = audioOutput;
|
||||
}
|
||||
else {
|
||||
video->impl.audioTrackOutput = NULL;
|
||||
}
|
||||
|
||||
if (video->impl.myWidth < 0)
|
||||
video->impl.myWidth = [videoTrack naturalSize].width;
|
||||
if (video->impl.myHeight < 0)
|
||||
video->impl.myHeight = [videoTrack naturalSize].height;
|
||||
int framerate = [videoTrack nominalFrameRate];
|
||||
kinc_log(KINC_LOG_LEVEL_INFO, "Framerate: %i\n", framerate);
|
||||
video->impl.next = video->impl.videoStart;
|
||||
video->impl.audioTime = video->impl.videoStart * 44100;
|
||||
}
|
||||
|
||||
void kinc_video_init(kinc_video_t *video, const char *filename) {
|
||||
video->impl.playing = false;
|
||||
video->impl.sound = NULL;
|
||||
video->impl.image_initialized = false;
|
||||
char name[2048];
|
||||
#ifdef KINC_IOS
|
||||
strcpy(name, iphonegetresourcepath());
|
||||
#else
|
||||
strcpy(name, macgetresourcepath());
|
||||
#endif
|
||||
strcat(name, "/");
|
||||
strcat(name, KINC_DEBUGDIR);
|
||||
strcat(name, "/");
|
||||
strcat(name, filename);
|
||||
video->impl.url = [NSURL fileURLWithPath:[NSString stringWithUTF8String:name]];
|
||||
video->impl.myWidth = -1;
|
||||
video->impl.myHeight = -1;
|
||||
video->impl.finished = false;
|
||||
video->impl.duration = 0;
|
||||
load(video, 0);
|
||||
}
|
||||
|
||||
void kinc_video_destroy(kinc_video_t *video) {
|
||||
kinc_video_stop(video);
|
||||
}
|
||||
|
||||
#ifdef KINC_IOS
|
||||
void iosPlayVideoSoundStream(kinc_internal_video_sound_stream_t *video);
|
||||
void iosStopVideoSoundStream(void);
|
||||
#else
|
||||
void macPlayVideoSoundStream(kinc_internal_video_sound_stream_t *video);
|
||||
void macStopVideoSoundStream(void);
|
||||
#endif
|
||||
|
||||
void kinc_video_play(kinc_video_t *video, bool loop) {
|
||||
AVAssetReader *reader = video->impl.assetReader;
|
||||
[reader startReading];
|
||||
|
||||
kinc_internal_video_sound_stream_t *stream = (kinc_internal_video_sound_stream_t *)malloc(sizeof(kinc_internal_video_sound_stream_t));
|
||||
kinc_internal_video_sound_stream_init(stream, 2, 44100);
|
||||
video->impl.sound = stream;
|
||||
#ifdef KINC_IOS
|
||||
iosPlayVideoSoundStream((kinc_internal_video_sound_stream_t *)video->impl.sound);
|
||||
#else
|
||||
macPlayVideoSoundStream((kinc_internal_video_sound_stream_t *)video->impl.sound);
|
||||
#endif
|
||||
|
||||
video->impl.playing = true;
|
||||
video->impl.start = kinc_time() - video->impl.videoStart;
|
||||
video->impl.loop = loop;
|
||||
}
|
||||
|
||||
void kinc_video_pause(kinc_video_t *video) {
|
||||
video->impl.playing = false;
|
||||
if (video->impl.sound != NULL) {
|
||||
// Mixer::stop(sound);
|
||||
#ifdef KINC_IOS
|
||||
iosStopVideoSoundStream();
|
||||
#else
|
||||
macStopVideoSoundStream();
|
||||
#endif
|
||||
kinc_internal_video_sound_stream_destroy((kinc_internal_video_sound_stream_t *)video->impl.sound);
|
||||
free(video->impl.sound);
|
||||
video->impl.sound = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void kinc_video_stop(kinc_video_t *video) {
|
||||
kinc_video_pause(video);
|
||||
video->impl.finished = true;
|
||||
}
|
||||
|
||||
static void updateImage(kinc_video_t *video) {
|
||||
if (!video->impl.playing)
|
||||
return;
|
||||
|
||||
{
|
||||
AVAssetReaderTrackOutput *videoOutput = video->impl.videoTrackOutput;
|
||||
CMSampleBufferRef buffer = [videoOutput copyNextSampleBuffer];
|
||||
if (!buffer) {
|
||||
if (video->impl.loop) {
|
||||
CMTimeRange timeRange = CMTimeRangeMake(CMTimeMake(0, 1000), kCMTimePositiveInfinity);
|
||||
[videoOutput resetForReadingTimeRanges:[NSArray arrayWithObject:[NSValue valueWithCMTimeRange:timeRange]]];
|
||||
|
||||
AVAssetReaderAudioMixOutput *audioOutput = video->impl.audioTrackOutput;
|
||||
CMSampleBufferRef audio_buffer = [audioOutput copyNextSampleBuffer];
|
||||
while (audio_buffer) {
|
||||
audio_buffer = [audioOutput copyNextSampleBuffer];
|
||||
}
|
||||
[audioOutput resetForReadingTimeRanges:[NSArray arrayWithObject:[NSValue valueWithCMTimeRange:timeRange]]];
|
||||
|
||||
buffer = [videoOutput copyNextSampleBuffer];
|
||||
|
||||
video->impl.start = kinc_time() - video->impl.videoStart;
|
||||
}
|
||||
else {
|
||||
kinc_video_stop(video);
|
||||
return;
|
||||
}
|
||||
}
|
||||
video->impl.next = CMTimeGetSeconds(CMSampleBufferGetOutputPresentationTimeStamp(buffer));
|
||||
|
||||
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(buffer);
|
||||
|
||||
if (!video->impl.image_initialized) {
|
||||
CGSize size = CVImageBufferGetDisplaySize(pixelBuffer);
|
||||
video->impl.myWidth = size.width;
|
||||
video->impl.myHeight = size.height;
|
||||
kinc_g4_texture_init(&video->impl.image, kinc_video_width(video), kinc_video_height(video), KINC_IMAGE_FORMAT_BGRA32);
|
||||
video->impl.image_initialized = true;
|
||||
}
|
||||
|
||||
if (pixelBuffer != NULL) {
|
||||
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
||||
#ifdef KINC_OPENGL
|
||||
kinc_g4_texture_upload(&video->impl.image, (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer),
|
||||
(int)(CVPixelBufferGetBytesPerRow(pixelBuffer) / 4));
|
||||
#else
|
||||
kinc_g4_texture_upload(&video->impl.image, (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer), (int)(CVPixelBufferGetBytesPerRow(pixelBuffer)));
|
||||
#endif
|
||||
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
||||
}
|
||||
CFRelease(buffer);
|
||||
}
|
||||
|
||||
if (video->impl.audioTrackOutput != NULL) {
|
||||
AVAssetReaderAudioMixOutput *audioOutput = video->impl.audioTrackOutput;
|
||||
while (video->impl.audioTime / 44100.0 < video->impl.next + 0.1) {
|
||||
CMSampleBufferRef buffer = [audioOutput copyNextSampleBuffer];
|
||||
if (!buffer)
|
||||
return;
|
||||
CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(buffer);
|
||||
AudioBufferList audioBufferList;
|
||||
CMBlockBufferRef blockBufferOut = nil;
|
||||
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(buffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL,
|
||||
kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBufferOut);
|
||||
for (int bufferCount = 0; bufferCount < audioBufferList.mNumberBuffers; ++bufferCount) {
|
||||
float *samples = (float *)audioBufferList.mBuffers[bufferCount].mData;
|
||||
kinc_internal_video_sound_stream_t *sound = (kinc_internal_video_sound_stream_t *)video->impl.sound;
|
||||
if (video->impl.audioTime / 44100.0 > video->impl.next - 0.1) {
|
||||
kinc_internal_video_sound_stream_insert_data(sound, samples, (int)numSamplesInBuffer * 2);
|
||||
}
|
||||
else {
|
||||
// Send some data anyway because the buffers are huge
|
||||
kinc_internal_video_sound_stream_insert_data(sound, samples, (int)numSamplesInBuffer);
|
||||
}
|
||||
video->impl.audioTime += numSamplesInBuffer;
|
||||
}
|
||||
CFRelease(blockBufferOut);
|
||||
CFRelease(buffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void kinc_video_update(kinc_video_t *video, double time) {
|
||||
if (video->impl.playing && time >= video->impl.start + video->impl.next) {
|
||||
updateImage(video);
|
||||
}
|
||||
}
|
||||
|
||||
int kinc_video_width(kinc_video_t *video) {
|
||||
return video->impl.myWidth;
|
||||
}
|
||||
|
||||
int kinc_video_height(kinc_video_t *video) {
|
||||
return video->impl.myHeight;
|
||||
}
|
||||
|
||||
kinc_g4_texture_t *kinc_video_current_image(kinc_video_t *video) {
|
||||
kinc_video_update(video, kinc_time());
|
||||
return &video->impl.image;
|
||||
}
|
||||
|
||||
double kinc_video_duration(kinc_video_t *video) {
|
||||
return video->impl.duration;
|
||||
}
|
||||
|
||||
bool kinc_video_finished(kinc_video_t *video) {
|
||||
return video->impl.finished;
|
||||
}
|
||||
|
||||
bool kinc_video_paused(kinc_video_t *video) {
|
||||
return !video->impl.playing;
|
||||
}
|
||||
|
||||
double kinc_video_position(kinc_video_t *video) {
|
||||
return video->impl.next - video->impl.start;
|
||||
}
|
Reference in New Issue
Block a user