New file |
| | |
| | | /* |
| | | * Copyright (C) 2014 The Android Open Source Project |
| | | * |
| | | * Licensed under the Apache License, Version 2.0 (the "License"); |
| | | * you may not use this file except in compliance with the License. |
| | | * You may obtain a copy of the License at |
| | | * |
| | | * http://www.apache.org/licenses/LICENSE-2.0 |
| | | * |
| | | * Unless required by applicable law or agreed to in writing, software |
| | | * distributed under the License is distributed on an "AS IS" BASIS, |
| | | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| | | * See the License for the specific language governing permissions and |
| | | * limitations under the License. |
| | | */ |
| | | |
| | | /* This is a JNI example where we use native methods to play video |
| | | * using the native AMedia* APIs. |
| | | * See the corresponding Java source file located at: |
| | | * |
| | | * src/com/example/nativecodec/NativeMedia.java |
| | | * |
| | | * In this example we use assert() for "impossible" error conditions, |
| | | * and explicit handling and recovery for more likely error conditions. |
| | | */ |
| | | |
| | | #include <assert.h> |
| | | #include <jni.h> |
| | | #include <stdio.h> |
| | | #include <string.h> |
| | | #include <unistd.h> |
| | | #include <sys/types.h> |
| | | #include <sys/stat.h> |
| | | #include <fcntl.h> |
| | | #include <errno.h> |
| | | #include <limits.h> |
| | | |
| | | #include "looper.h" |
| | | #include "media/NdkMediaCodec.h" |
| | | #include "media/NdkMediaExtractor.h" |
| | | |
| | | #ifndef LOG_TAG |
| | | #define LOG_TAG "NativeCodec" |
| | | #endif |
| | | |
| | | #include <logger.h> |
| | | |
| | | #include <android/log.h> |
| | | |
| | | // for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message"); |
| | | #define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, __VA_ARGS__) |
| | | #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__) |
| | | |
| | | // for native window JNI |
| | | #include <android/native_window_jni.h> |
| | | #include <android/asset_manager.h> |
| | | #include <android/asset_manager_jni.h> |
| | | |
| | | typedef struct { |
| | | int fd; |
| | | ANativeWindow* window; |
| | | AMediaExtractor* ex; |
| | | AMediaCodec *codec; |
| | | int64_t renderstart; |
| | | bool sawInputEOS; |
| | | bool sawOutputEOS; |
| | | bool isPlaying; |
| | | bool renderonce; |
| | | } workerdata; |
| | | |
| | | workerdata data = {-1, NULL, NULL, NULL, 0, false, false, false, false}; |
| | | |
| | | enum { |
| | | kMsgCodecBuffer, |
| | | kMsgPause, |
| | | kMsgResume, |
| | | kMsgPauseAck, |
| | | kMsgDecodeDone, |
| | | kMsgSeek |
| | | }; |
| | | |
| | | |
| | | |
| | | |
| | | #include <iostream> |
| | | #include <sstream> |
| | | #include <Logger/src/logger.hpp> |
| | | std::stringstream logss; |
| | | Logger g_logger(logss); |
| | | |
| | | |
| | | #include <liveMedia/liveMedia.hh> |
| | | #include <BasicUsageEnvironment/BasicUsageEnvironment.hh> |
| | | |
| | | void rtsp_client_sdp_callback(void* arg, const char* val) |
| | | {} |
| | | |
| | | void rtsp_client_fmtp_callback(void* arg, const char* val) |
| | | { |
| | | |
| | | } |
| | | |
| | | void rtsp_client_frame_callback(void* arg, uint8_t* buffer, size_t buffSize, timeval presentationTime) |
| | | { |
| | | ssize_t bufidx = -1; |
| | | |
| | | bufidx = AMediaCodec_dequeueInputBuffer(data.codec, 2000); |
| | | static int framecount = 0; |
| | | LOGV("input buffer bufidx=%zd, framecount=%d", bufidx, framecount++); |
| | | |
| | | if (bufidx >= 0) { |
| | | size_t bufsize; |
| | | uint8_t* buf = AMediaCodec_getInputBuffer(data.codec, bufidx, &bufsize); |
| | | size_t sampleSize = std::min(bufsize, buffSize); |
| | | memcpy(buf, buffer, sampleSize); |
| | | //auto sampleSize = AMediaExtractor_readSampleData(d->ex, buf, bufsize); |
| | | //if (sampleSize < 0) { |
| | | // sampleSize = 0; |
| | | // d->sawInputEOS = true; |
| | | // LOGV("EOS"); |
| | | //} |
| | | //auto presentationTimeUs = AMediaExtractor_getSampleTime(d->ex); |
| | | uint64_t presentationTimeUs = presentationTime.tv_sec * 1000 * 1000 + presentationTime.tv_usec; //microseconds |
| | | |
| | | media_status_t ms = AMediaCodec_queueInputBuffer(data.codec, bufidx, 0, sampleSize, presentationTimeUs, 0); |
| | | //LOGV("media_status_t=%d", ms); |
| | | //AMediaExtractor_advance(d->ex); |
| | | } |
| | | |
| | | |
| | | } |
| | | |
| | | void rtsp_client_continue_callback(void* arg) |
| | | {} |
| | | |
| | | struct PL_RTSPClient_Config |
| | | { |
| | | std::string progName; |
| | | std::string rtspURL; |
| | | bool aux; // frame data start with 0x00000001 |
| | | int verbosityLevel; |
| | | int tunnelOverHTTPPortNum; // portNumBits |
| | | void* args; |
| | | |
| | | PL_RTSPClient_Config() : |
| | | progName(), rtspURL() ,aux(true), verbosityLevel(1), tunnelOverHTTPPortNum(0), args(nullptr) |
| | | { } |
| | | }; |
| | | static PL_RTSPClient_Config rtspConfig; |
| | | |
| | | #include <live555/testProgs/testRTSPClient.hpp> |
| | | |
| | | |
| | | |
| | | |
| | | |
| | | |
| | | class mylooper: public looper { |
| | | virtual void handle(int what, void* obj); |
| | | }; |
| | | |
| | | static mylooper *mlooper = NULL; |
| | | |
| | | int64_t systemnanotime() { |
| | | timespec now; |
| | | clock_gettime(CLOCK_MONOTONIC, &now); |
| | | return now.tv_sec * 1000000000LL + now.tv_nsec; |
| | | } |
| | | |
| | | void doCodecWork(workerdata *d) { |
| | | ssize_t bufidx = -1; |
| | | |
| | | // from file |
| | | if (!d->sawInputEOS) { |
| | | bufidx = AMediaCodec_dequeueInputBuffer(d->codec, 2000); |
| | | static int framecount = 0; |
| | | LOGV("input buffer bufidx=%zd, framecount=%d", bufidx, framecount++); |
| | | if (bufidx >= 0) { |
| | | size_t bufsize; |
| | | auto buf = AMediaCodec_getInputBuffer(d->codec, bufidx, &bufsize); |
| | | auto sampleSize = AMediaExtractor_readSampleData(d->ex, buf, bufsize); |
| | | if (sampleSize < 0) { |
| | | sampleSize = 0; |
| | | d->sawInputEOS = true; |
| | | LOGV("EOS"); |
| | | } |
| | | auto presentationTimeUs = AMediaExtractor_getSampleTime(d->ex); |
| | | |
| | | AMediaCodec_queueInputBuffer(d->codec, bufidx, 0, sampleSize, presentationTimeUs, |
| | | d->sawInputEOS ? AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM : 0); |
| | | AMediaExtractor_advance(d->ex); |
| | | } |
| | | } |
| | | |
| | | if (!d->sawOutputEOS) { |
| | | AMediaCodecBufferInfo info; |
| | | bufidx = AMediaCodec_dequeueOutputBuffer(d->codec, &info, 0); // ret buffer index |
| | | if (bufidx >= 0) { |
| | | if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) { |
| | | LOGV("output EOS"); |
| | | d->sawOutputEOS = true; |
| | | } |
| | | int64_t presentationNano = info.presentationTimeUs * 1000; |
| | | if (d->renderstart < 0) { |
| | | d->renderstart = systemnanotime() - presentationNano; |
| | | } |
| | | int64_t delay = (d->renderstart + presentationNano) - systemnanotime(); |
| | | if (delay > 0) { |
| | | usleep(delay / 1000); |
| | | } |
| | | |
| | | //AMediaCodec_getOutputBuffer |
| | | |
| | | AMediaCodec_releaseOutputBuffer(d->codec, bufidx, info.size != 0); |
| | | if (d->renderonce) { |
| | | d->renderonce = false; |
| | | return; |
| | | } |
| | | } else if (bufidx == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) { |
| | | LOGV("output buffers changed"); |
| | | } else if (bufidx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) { |
| | | auto format = AMediaCodec_getOutputFormat(d->codec); |
| | | LOGV("format changed to: %s", AMediaFormat_toString(format)); |
| | | AMediaFormat_delete(format); |
| | | } else if (bufidx == AMEDIACODEC_INFO_TRY_AGAIN_LATER) { |
| | | //LOGV("no output buffer right now"); |
| | | } else { |
| | | LOGV("unexpected info code: %zd", bufidx); |
| | | } |
| | | } |
| | | |
| | | if (!d->sawInputEOS || !d->sawOutputEOS) { |
| | | mlooper->post(kMsgCodecBuffer, d); |
| | | } |
| | | } |
| | | |
| | | void mylooper::handle(int what, void* obj) { |
| | | switch (what) { |
| | | case kMsgCodecBuffer: |
| | | doCodecWork((workerdata*)obj); |
| | | break; |
| | | |
| | | case kMsgDecodeDone: |
| | | { |
| | | workerdata *d = (workerdata*)obj; |
| | | AMediaCodec_stop(d->codec); |
| | | AMediaCodec_delete(d->codec); |
| | | AMediaExtractor_delete(d->ex); |
| | | d->sawInputEOS = true; |
| | | d->sawOutputEOS = true; |
| | | } |
| | | break; |
| | | |
| | | case kMsgSeek: |
| | | { |
| | | workerdata *d = (workerdata*)obj; |
| | | AMediaExtractor_seekTo(d->ex, 0, AMEDIAEXTRACTOR_SEEK_NEXT_SYNC); |
| | | AMediaCodec_flush(d->codec); |
| | | d->renderstart = -1; |
| | | d->sawInputEOS = false; |
| | | d->sawOutputEOS = false; |
| | | if (!d->isPlaying) { |
| | | d->renderonce = true; |
| | | post(kMsgCodecBuffer, d); |
| | | } |
| | | LOGV("seeked"); |
| | | } |
| | | break; |
| | | |
| | | case kMsgPause: |
| | | { |
| | | workerdata *d = (workerdata*)obj; |
| | | if (d->isPlaying) { |
| | | // flush all outstanding codecbuffer messages with a no-op message |
| | | d->isPlaying = false; |
| | | post(kMsgPauseAck, NULL, true); |
| | | } |
| | | } |
| | | break; |
| | | |
| | | case kMsgResume: |
| | | { |
| | | workerdata *d = (workerdata*)obj; |
| | | if (!d->isPlaying) { |
| | | d->renderstart = -1; |
| | | d->isPlaying = true; |
| | | post(kMsgCodecBuffer, d); |
| | | } |
| | | } |
| | | break; |
| | | } |
| | | } |
| | | |
| | | static void* live_daemon_thd(void* arg) |
| | | { |
| | | TaskScheduler* scheduler = BasicTaskScheduler::createNew(); |
| | | UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler); |
| | | |
| | | usage(*env, rtspConfig.progName.c_str()); |
| | | |
| | | openURL(*env, rtspConfig); |
| | | |
| | | char eventLoopWatchVariable = 0; |
| | | |
| | | bool live_daemon_running = true; |
| | | env->taskScheduler().doEventLoop(&eventLoopWatchVariable); |
| | | live_daemon_running = false; |
| | | } |
| | | |
| | | extern "C" { |
| | | |
| | | jboolean Java_com_example_nativecodec_NativeCodec_createStreamingMediaPlayer_rtsp(JNIEnv* env, |
| | | jclass clazz, jobject assetMgr, jstring filename) { |
| | | LOGV("@@@ create"); |
| | | |
| | | // convert Java string to UTF-8 |
| | | const char *utf8 = env->GetStringUTFChars(filename, NULL); |
| | | LOGV("opening %s", utf8); |
| | | |
| | | rtspConfig.progName = "RtspFace"; |
| | | rtspConfig.rtspURL = utf8; |
| | | rtspConfig.aux = true; // ffmpeg need aux, but live555 not |
| | | rtspConfig.verbosityLevel = 1; |
| | | rtspConfig.tunnelOverHTTPPortNum = 0; |
| | | rtspConfig.args = nullptr; |
| | | |
| | | env->ReleaseStringUTFChars(filename, utf8); |
| | | |
| | | //AMediaFormat *format = AMediaExtractor_getTrackFormat(ex, i); |
| | | AMediaFormat* format = AMediaFormat_new(); |
| | | //MediaFormat mediaFormat = MediaFormat.createVideoFormat(mime, width, height); |
| | | |
| | | AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, "video/avc"); |
| | | AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, 1080); |
| | | AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, 1920); |
| | | |
| | | //const uint8_t sps[] = {0x67, 0x4d, 0x00, 0x2a, 0x95, 0xa8, 0x1e, 0x00, 0x89, 0xf9, 0x61, 0x00, 0x00, 0x07, 0x08, 0x00, 0x01, 0x5f, 0x90, 0x04}; |
| | | //const uint8_t pps[] = {0x68, 0xee, 0x3c, 0x80}; |
| | | //AMediaFormat_setBuffer(format, "csd-0", (void*)sps, sizeof(sps)); // sps |
| | | //AMediaFormat_setBuffer(format, "csd-1", (void*)pps, sizeof(pps)); // pps |
| | | |
| | | // should like: |
| | | // mime: string(video/avc), durationUs: int64(10000000), width: int32(480), height: int32(360), max-input-size: int32(55067), csd-0: data, csd-1: data} |
| | | LOGV("AMediaFormat_toString: %s", AMediaFormat_toString(format)); |
| | | |
| | | AMediaCodec *codec = AMediaCodec_createDecoderByType("video/avc"); |
| | | if (AMediaCodec_configure(codec, format, data.window, NULL, 0) != AMEDIA_OK) |
| | | LOGE("AMediaCodec_configure error"); |
| | | data.ex = NULL; |
| | | data.codec = codec; |
| | | data.renderstart = -1; |
| | | data.sawInputEOS = true; // input from rtsp |
| | | data.sawOutputEOS = false; |
| | | data.isPlaying = false; |
| | | data.renderonce = true; |
| | | if (AMediaCodec_start(codec) != AMEDIA_OK) |
| | | LOGE("AMediaCodec_start error"); |
| | | |
| | | AMediaFormat_delete(format); |
| | | |
| | | mlooper = new mylooper(); |
| | | mlooper->post(kMsgCodecBuffer, &data); |
| | | |
| | | pthread_t live_daemon_thid; |
| | | int ret = pthread_create(&live_daemon_thid, NULL, live_daemon_thd, NULL); |
| | | |
| | | return JNI_TRUE; |
| | | } |
| | | |
| | | jboolean Java_com_example_nativecodec_NativeCodec_createStreamingMediaPlayer(JNIEnv* env, |
| | | jclass clazz, jobject assetMgr, jstring filename) |
| | | { |
| | | LOGV("@@@ create"); |
| | | |
| | | // convert Java string to UTF-8 |
| | | const char *utf8 = env->GetStringUTFChars(filename, NULL); |
| | | LOGV("opening %s", utf8); |
| | | |
| | | if (strncmp(utf8, "rtsp://", 7) == 0)//#todo release utf8 |
| | | return Java_com_example_nativecodec_NativeCodec_createStreamingMediaPlayer_rtsp(env, clazz, assetMgr, filename); |
| | | |
| | | off_t outStart, outLen; |
| | | int fd = AAsset_openFileDescriptor(AAssetManager_open(AAssetManager_fromJava(env, assetMgr), utf8, 0), |
| | | &outStart, &outLen); |
| | | |
| | | env->ReleaseStringUTFChars(filename, utf8); |
| | | if (fd < 0) { |
| | | LOGE("failed to open file: %s %d (%s)", utf8, fd, strerror(errno)); |
| | | return JNI_FALSE; |
| | | } |
| | | |
| | | data.fd = fd; |
| | | |
| | | workerdata *d = &data; |
| | | |
| | | AMediaExtractor *ex = AMediaExtractor_new(); |
| | | media_status_t err = AMediaExtractor_setDataSourceFd(ex, d->fd, |
| | | static_cast<off64_t>(outStart), |
| | | static_cast<off64_t>(outLen)); |
| | | close(d->fd); |
| | | if (err != AMEDIA_OK) { |
| | | LOGV("setDataSource error: %d", err); |
| | | return JNI_FALSE; |
| | | } |
| | | |
| | | int numtracks = AMediaExtractor_getTrackCount(ex); |
| | | |
| | | AMediaCodec *codec = NULL; |
| | | |
| | | LOGV("input has %d tracks", numtracks); |
| | | for (int i = 0; i < numtracks; i++) { |
| | | AMediaFormat *format = AMediaExtractor_getTrackFormat(ex, i); |
| | | const char *s = AMediaFormat_toString(format); |
| | | LOGV("track %d format: %s", i, s); |
| | | const char *mime; |
| | | if (!AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime)) { |
| | | LOGV("no mime type"); |
| | | return JNI_FALSE; |
| | | } else if (!strncmp(mime, "video/", 6)) { |
| | | // Omitting most error handling for clarity. |
| | | // Production code should check for errors. |
| | | AMediaExtractor_selectTrack(ex, i); |
| | | codec = AMediaCodec_createDecoderByType(mime); |
| | | AMediaCodec_configure(codec, format, d->window, NULL, 0); |
| | | d->ex = ex; |
| | | d->codec = codec; |
| | | d->renderstart = -1; |
| | | d->sawInputEOS = false; |
| | | d->sawOutputEOS = false; |
| | | d->isPlaying = false; |
| | | d->renderonce = true; |
| | | AMediaCodec_start(codec); |
| | | } |
| | | AMediaFormat_delete(format); |
| | | } |
| | | |
| | | mlooper = new mylooper(); |
| | | mlooper->post(kMsgCodecBuffer, d); |
| | | |
| | | return JNI_TRUE; |
| | | } |
| | | |
| | | // set the playing state for the streaming media player |
| | | //void Java_com_example_nativecodec_NativeCodec_setPlayingStreamingMediaPlayer(JNIEnv* env, |
| | | // jclass clazz, jboolean isPlaying) |
| | | //{ |
| | | // LOGV("@@@ playpause: %d", isPlaying); |
| | | // if (mlooper) { |
| | | // if (isPlaying) { |
| | | // mlooper->post(kMsgResume, &data); |
| | | // } else { |
| | | // mlooper->post(kMsgPause, &data); |
| | | // } |
| | | // } |
| | | //} |
| | | |
| | | |
| | | // rewind the streaming media player |
| | | //void Java_RtspNativeCodec_rewindStreamingMediaPlayer(JNIEnv *env, jclass clazz) |
| | | //{ |
| | | // LOGV("@@@ rewind"); |
| | | // if (mlooper) { |
| | | // mlooper->post(kMsgSeek, &data); |
| | | // } |
| | | //} |
| | | |
| | | // set the surface |
| | | void Java_RtspNativeCodec_setSurface(JNIEnv *env, jclass clazz, jint cameraIdx, jobject surface) |
| | | { |
| | | LOGV("@@@ Java_RtspNativeCodec_setSurface"); |
| | | |
| | | // obtain a native window from a Java surface |
| | | if (data.window) { |
| | | ANativeWindow_release(data.window); |
| | | data.window = NULL; |
| | | } |
| | | data.window = ANativeWindow_fromSurface(env, surface); |
| | | LOGV("@@@ setsurface %p", data.window); |
| | | } |
| | | |
| | | jboolean Java_RtspNativeCodec_createPlayer(JNIEnv* env, jclass clazz, jobject assetMgr, jint cameraIdx, jstring uri) |
| | | { |
| | | LOGV("@@@ Java_RtspNativeCodec_createPlayer"); |
| | | |
| | | return JNI_TRUE; |
| | | } |
| | | |
| | | // shut down the native media system |
| | | void Java_RtspNativeCodec_shutdown(JNIEnv* env, jclass clazz, jint cameraIdx) |
| | | { |
| | | LOGV("@@@ Java_RtspNativeCodec_shutdown"); |
| | | if (mlooper) { |
| | | mlooper->post(kMsgDecodeDone, &data, true /* flush */); |
| | | mlooper->quit(); |
| | | delete mlooper; |
| | | mlooper = NULL; |
| | | } |
| | | if (data.window) { |
| | | ANativeWindow_release(data.window); |
| | | data.window = NULL; |
| | | } |
| | | } |
| | | |
| | | jboolean Java_RtspNativeCodec_setFaceCallback(JNIEnv* env, jclass clazz, jobject assetMgr, jint cameraIdx, jstring func) |
| | | { |
| | | return JNI_TRUE; |
| | | } |
| | | |
| | | jboolean Java_RtspNativeCodec_getFaceList(JNIEnv* env, jclass clazz, jobject assetMgr, jint cameraIdx, jobjectRefType faceList) |
| | | { |
| | | return JNI_TRUE; |
| | | } |
| | | |
| | | } |