Commit c1a69aae authored by Linshizhi's avatar Linshizhi

update

parent fc687dec
[submodule "lib/ffmpeg.protos"]
path = lib/ffmpeg.protos
url = git@gitlab.ilaihua.com:linshizhi/ffmpeg.sharedmemoryproto.git
[submodule "lib/ffmpeg.wasm-core"]
path = lib/ffmpeg.wasm-core
url = git@gitlab.ilaihua.com:linshizhi/ffmpeg.wasm-core.git
ffmpeg.protos @ ed4e7ded
Subproject commit ed4e7ded6e361f8a6ac9e0adfd31cd7e75aed157
......@@ -24,7 +24,7 @@ self.importScripts('defs.js');
// Load wasm encoder
self.importScripts('paraencoder.js');
createParaEncoder().then(m => {
createEncoder().then(m => {
encoder = m;
});
......
......@@ -27,8 +27,8 @@ let muxBuffer = null;
self.importScripts('defs.js');
// Load wasm encoder
self.importScripts('./paraencoder.js');
createParaEncoder().then(m => {
self.importScripts('./paramuxer.js');
createMuxer().then(m => {
muxer = m;
});
......@@ -129,7 +129,7 @@ async function step() {
// Check EOF
let flag = chn.readPriv();
if (flag & PRIV_FLAGS.EOF) {
console.log("MUXER: EOF " + i)
console.log("MUXER: EOF " + i);
muxer._eof(i);
++eofCount;
......
......@@ -173,15 +173,16 @@ export class H264EncWWGroup extends WWGroup {
console.log("Ready to push...");
while (this.#channels[this.#curProcWW].push(rgbFrame) == false) {
console.log("Pushing...");
await sleep(300);
}
if (!this.#channels[this.#curProcWW].isSetPriv(PRIV_FLAGS.EXECUTING)) {
this.#encWorkers[this.#curProcWW].postMessage(
makeMsg(MESSAGE_TYPE.DATA, {}))
}
while (this.#channels[this.#curProcWW].push(rgbFrame) == false) {
console.log("Pushing...");
await sleep(300);
}
this.#curProcWW = (this.#curProcWW + 1) % this.#numOfEncWorker;
}
......
#include <stdio.h>
#include <stdint.h>
#include <malloc.h>
#include "wasm.h"
#include <iostream>
#include <chrono>
#include <ctime>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/avutil.h>
}
///////////////////////////////////////////////////////////////////////////////
// Encoding Part //
///////////////////////////////////////////////////////////////////////////////
static int width_ = 0;
static int height_ = 0;
static int framerate_ = 0;
static int timescale = 90000;
static AVFrame *rgbaFrame;
static unsigned frameIndex = 0;
static AVFrame *frame;
static AVPacket *packet;
static AVCodecContext *cc;
static struct SwsContext* swsCtx = NULL;
static AVPacket *lastPkt = NULL;
// Bitstream Filter
static AVBSFContext *bsCtx = NULL;
static const AVBitStreamFilter *bsFilter = NULL;
EM_PORT_API(int) getPackets(uint8_t *buffer, uint32_t size, uint32_t *osize);
EM_PORT_API(uint8_t) encodeInit(int width, int height, int fps) {
int ret = 0;
width_ = width;
height_ = height;
framerate_ = fps;
const AVCodec *encoder = avcodec_find_encoder_by_name("libx264");
if (encoder == NULL) {
fprintf(stderr, "Unable to find H.264 decoder\n");
return 1;
}
cc = avcodec_alloc_context3(encoder);
if (cc == NULL) {
fprintf(stderr, "Unable to alloc codec context\n");
return 2;
}
// Setup encode parameters
cc->width = width;
cc->height = height;
cc->pix_fmt = AV_PIX_FMT_YUV420P;
cc->time_base = (AVRational){1, timescale};
cc->gop_size = 0;
if ((ret = avcodec_open2(cc, encoder, NULL) < 0)) {
fprintf(stderr, "Unable to open codec context: %s\n",
av_err2str(ret));
return 3;
}
packet = av_packet_alloc();
if (packet == NULL) {
fprintf(stderr, "Could not allocate packet\n");
}
frame = av_frame_alloc();
frame->format = cc->pix_fmt;
frame->width = cc->width;
frame->height = cc->height;
ret = av_frame_get_buffer(frame, 0);
if (ret < 0) {
fprintf(stderr, "Could not allocate the video frame data\n");
return 4;
}
swsCtx = sws_getCachedContext(swsCtx, width, height, AV_PIX_FMT_RGBA,
width, height, AV_PIX_FMT_YUV420P,
SWS_BICUBIC,
NULL, NULL, NULL);
// Init bitstream filter
bsFilter = av_bsf_get_by_name("h264_mp4toannexb");
if (bsFilter == NULL) {
printf("Fail to get h264_mp4toannexb\n");
return 5;
}
av_bsf_alloc(bsFilter, &bsCtx);
if (bsCtx == NULL) {
printf("Fail to alloc bitstream filter context\n");
return 6;
}
avcodec_parameters_from_context(bsCtx->par_in, cc);
avcodec_parameters_from_context(bsCtx->par_out, cc);
if (av_bsf_init(bsCtx) < 0) {
printf("failed to init bitstream filter context\n");
return 7;
}
return 0;
}
static std::chrono::duration<double> total;
/* Ret Values:
* 0: Success
* 1: AGAIN
* 2: Buffer too small
* 3: ERROR */
EM_PORT_API(int) encode(uint8_t *data, uint8_t *buffer, uint32_t size, uint32_t *osize) {
int ret = 0;
if (av_frame_make_writable(frame) < 0) {
fprintf(stderr, "Fail to make frame writable\n");
}
rgbaFrame = av_frame_alloc();
rgbaFrame->format =AV_PIX_FMT_RGBA;
rgbaFrame->height = cc->height;
rgbaFrame->width = cc->width;
avpicture_fill((AVPicture*)rgbaFrame, data, AV_PIX_FMT_RGBA, width_, height_);
//转换的YUV数据存放在frame
int outSliceH = sws_scale(swsCtx, (const uint8_t* const*)rgbaFrame->data, rgbaFrame->linesize, 0, height_,
frame->data, frame->linesize);
if (outSliceH <= 0) {
printf("outSliceH <= 0 \n");
return 3;
}
frame->pts = timescale / framerate_ * frameIndex;
frame->pict_type = AV_PICTURE_TYPE_I;
++frameIndex;
auto start = std::chrono::system_clock::now();
// Encode
ret = avcodec_send_frame(cc, frame);
if (ret < 0) {
fprintf(stderr, "Fail to encoding\n");
}
ret = getPackets(buffer, size, osize);
auto end = std::chrono::system_clock::now();
std::chrono::duration<double> elapsed_seconds = end - start;
total += elapsed_seconds;
std::time_t end_time = std::chrono::system_clock::to_time_t(end);
std::cout << "finished computation at " << std::ctime(&end_time)
<< "elapsed time: " << elapsed_seconds.count() << "s"
<< std::endl
<< "total: " << total.count()
<< std::endl;
return ret;
}
/* Ret Values:
* 0: Success
* 1: AGAIN or EOF
* 2: Buffer too small
* 3: ERROR */
EM_PORT_API(int) getPackets(uint8_t *buffer, uint32_t size, uint32_t *osize) {
int ret = 0;
uint8_t *pos = buffer;
int remainSize = size;
*osize = 0;
if (lastPkt != NULL && lastPkt->size > remainSize) {
memcpy(pos, lastPkt->data, lastPkt->size);
pos += lastPkt->size;
remainSize -= lastPkt->size;
av_packet_unref(lastPkt);
lastPkt = NULL;
} else if (lastPkt != NULL) {
/* Buffer is too small to containe the packet */
return 2;
}
while (true) {
ret = avcodec_receive_packet(cc, packet);
if (ret < 0) {
ret = 1;
goto DONE;
}
printf("WASM Encode: Packet Size %d\n", packet->size);
// For video frame avcodec_receive_packet should return
// only once.
if (remainSize > packet->size) {
memcpy(pos, packet->data, packet->size);
pos += packet->size;
remainSize -= packet->size;
} else {
lastPkt = packet;
break;
}
av_packet_unref(packet);
}
DONE:
*osize = size - remainSize;
return ret;
}
EM_PORT_API(int) flushEncoder() {
if (avcodec_send_frame(cc, NULL) < 0) {
return 1;
}
return 0;
}
///////////////////////////////////////////////////////////////////////////////
// Muxing Parts //
///////////////////////////////////////////////////////////////////////////////
#include "ioctx.h"
#include "proto/movMemProto.h"
#include <vector>
#include <array>
#include <queue>
#include <memory>
using IOEndpoint = IOProto::MovMemProto::MovMemProto;
namespace MuxEnv {
size_t numOfPackets_;
class PacketBuffer {
public:
using PacketChain = std::queue<AVPacket*>;
PacketBuffer() = default;
PacketBuffer(int numOfStream) {
for (int i = 0; i < numOfStream; ++i) {
packets.emplace_back(PacketChain());
}
}
void push(int idx, AVPacket *packet) {
printf("PacketBuffer: Push\n");
packets[idx].push(packet);
++numOfPackets_;
}
AVPacket* pop(int idx) {
printf("PacketBuffer: POP\n");
auto &chain = packets[idx];
if (chain.empty()) {
return nullptr;
}
auto pkt = chain.front();
chain.pop();
numOfPackets_--;
return pkt;
}
int size(int idx) {
return packets[idx].size();
}
int numOfPkts() {
return numOfPackets_;
}
private:
std::vector<PacketChain> packets;
};
bool ioCtxInited = false;
int numOfStreams = 0;
int numOfStreams1 = 0;
PacketBuffer pktBuffer;
int finishedCount = 0;
int initedCount = 0;
bool *finished;
bool *inited;
bool *failed;
bool oCtxInited = false;
std::vector<IOEndpoint> protos;
IOCtx::InCtx **ctxs = nullptr;
constexpr size_t PKT_BUF_UPPER_BOUND = 300;
int currentChannel = 0;
const char *outputPath = "/tmp/output.mp4";
IOCtx::OutCtx oCtx { outputPath };
bool done = false;
/* Timestam parameters */
int timescale = 90000;
AVRational timebase = av_make_q(1, timescale);
int framerate = 30;
};
EM_PORT_API(int) muxInit(int numOfStreams) {
// Setup MuxEnv Status
MuxEnv::numOfStreams = numOfStreams;
MuxEnv::numOfStreams1 = numOfStreams;
printf("Numb of Streams %d\n", MuxEnv::numOfStreams);
MuxEnv::pktBuffer = MuxEnv::PacketBuffer(MuxEnv::numOfStreams);
MuxEnv::finished = new bool[MuxEnv::numOfStreams]{false};
MuxEnv::inited = new bool[MuxEnv::numOfStreams]{false};
MuxEnv::failed = new bool[MuxEnv::numOfStreams]{false};
MuxEnv::ctxs = (IOCtx::InCtx**)malloc(sizeof(IOCtx::InCtx*));
for (int i = 0; i < numOfStreams; ++i) {
// Create IOEncpoint for a IOCtx
MuxEnv::protos.emplace_back(IOEndpoint(nullptr, IOProto::read));
}
return 0;
}
EM_PORT_API(int) muxPush(int sIdx, uint8_t *data, size_t size) {
printf("MUX WASM: Push to proto %d: %p,%ld\n", sIdx, data, size);
MuxEnv::protos[sIdx%MuxEnv::numOfStreams1].push(data, size);
return 0;
}
int bufferPackets() {
int ret = 0, total = 0;
AVPacket *pkt = nullptr;
while (true) {
unsigned readed = 0;
// Read out all of packets of all streams
// then buffer such packets.
for (int i = 0; i < MuxEnv::numOfStreams; ++i) {
if (MuxEnv::failed[i] || MuxEnv::finished[i] || !MuxEnv::inited[i]) {
printf("WASM: SKIP %d\n", i);
continue;
}
IOCtx::InCtx *c = MuxEnv::ctxs[i];
pkt = pkt == nullptr ? av_packet_alloc() : pkt;
ret = c->readFrame_(pkt);
if (ret < 0) {
if (ret == AVERROR_EOF) {
MuxEnv::finishedCount++;
MuxEnv::finished[i] = true;
printf("WASM: EOF %d\n", i);
if (MuxEnv::finishedCount == MuxEnv::numOfStreams) {
AVPacket *nullpkt = av_packet_alloc();
nullpkt->data = nullptr;
nullpkt->size = 0;
MuxEnv::finished[i] = false;
//MuxEnv::pktBuffer.push(i, nullpkt);
}
goto END_LOOP;
} else if (ret != AVERROR(EAGAIN)) {
MuxEnv::failed[i] = true;
}
continue;
}
printf("BUFFER PACKET\n");
MuxEnv::pktBuffer.push(i, pkt);
pkt = nullptr;
++readed;
}
total += readed;
// Too few packets to read from Encoder, exit the loop
if (readed < MuxEnv::numOfStreams ||
// Packets reside in PKT Buffer is more than
// 'PKT_BUF_UPPER_BOUND', exit the loop
total >= MuxEnv::PKT_BUF_UPPER_BOUND) {
break;
}
END_LOOP:;
}
return total;
}
void ioCtxInitialize() {
for (int i = 0; i < MuxEnv::numOfStreams; ++i) {
// IOProto require 20KB to probe stream informations.
if (!MuxEnv::inited[i] && MuxEnv::protos[i].size() > 25000) {
MuxEnv::ctxs[i] = new IOCtx::InCtx{"", &MuxEnv::protos[i]};
MuxEnv::inited[i] = true;
if (!MuxEnv::oCtxInited) {
AVStream *s = MuxEnv::ctxs[i]->getStream([](AVStream *s) {
return s->codecpar->codec_type == AVMEDIA_TYPE_VIDEO;
});
MuxEnv::oCtx.newStream(s->codecpar);
MuxEnv::oCtx.writeHeader();
}
printf("INIT %d\n", i);
++MuxEnv::initedCount;
}
}
}
void timestampSetup(AVPacket *pkt) {
static int ptsVal = 0, dtsVal = 0, duration = 0;
if (duration == 0) {
duration = MuxEnv::timescale / MuxEnv::framerate;
}
if (ptsVal == 0 && dtsVal == 0) {
dtsVal = -duration;
}
pkt->pts = ptsVal;
pkt->dts = dtsVal;
pkt->duration = duration;
pkt->stream_index = 0;
pkt->pos = -1;
printf("PTS: %d\n, DTS: %d\n", ptsVal, dtsVal);
ptsVal += duration;
dtsVal += duration;
}
int writeToOutput() {
int i = MuxEnv::currentChannel;
// Write out to file
while (true) {
AVPacket *p = MuxEnv::pktBuffer.pop(i);
return 0;
printf("WriteToOutput(): Index %d\n", i);
if (p == nullptr)
if (MuxEnv::finished[i]) {
printf("WriteToOutput(): Fin\n");
goto NEXT;
} else {
printf("WriteToOutput(): END LOOP\n");
goto END_LOOP;
}
else if (p->data == nullptr && p->size == 0) {
MuxEnv::finished[i] = true;
goto MUX_DONE;
}
timestampSetup(p);
// Write to output context
//MuxEnv::oCtx.writeFrame(p);
printf("WriteToOutput(): Index %d...Done\n", i);
// Cleaning
av_packet_free(&p);
NEXT:
i = (i + 1) % MuxEnv::numOfStreams;
}
END_LOOP:
MuxEnv::currentChannel = i;
return 0;
MUX_DONE:
MuxEnv::done = true;
MuxEnv::oCtx.writeTrailer();
return 1;
}
EM_PORT_API(int) muxStep() {
// Buffer Encoded frames from
// Encoder into 'MuxEnv::pktBuffer'
bufferPackets();
printf("%d packets are buffered\n", MuxEnv::pktBuffer.numOfPkts());
// Initialize IOCtx
if (MuxEnv::initedCount < MuxEnv::numOfStreams)
ioCtxInitialize();
if (MuxEnv::initedCount == MuxEnv::numOfStreams)
// Try to write to output file
return writeToOutput();
else
return 0;
}
EM_PORT_API(int) isDone() {
return MuxEnv::done;
}
EM_PORT_API(void) eof(int i) {
MuxEnv::protos[i].eof();
}
int main(int argc, char *argv[]) {}
......@@ -15,7 +15,7 @@ describe("H264EncWWGroup Spec", () => {
const RGBAFrameSize = 1920*1080*4;
let grp = new H264EncWWGroup("h264enc", {
numOfWW: 8,
numOfWW: 11,
encchnlsize: RGBAFrameSize * 10,
bridgechnlsize: Math.pow(2, 25)
});
......
......@@ -18,7 +18,7 @@ FFMPEG_PROTO=${WORKPATH}/src/protos/src
WASM_DIR=${WORKPATH}/src/wasms
#DEBUG="-O2"
DEBUG="-g2 -gseparate-dwarf=./temp.debug.wasm -s SEPARATE_DWARF_URL=http://localhost:9876/temp.debug.wasm"
DEBUG="-g2"
BUILD_DIR=${WORKPATH}/Build
......@@ -68,24 +68,66 @@ fi
OPTIM_FLAGS="$DEBUG --closure 0"
if [[ "$FFMPEG_ST" != "yes" ]]; then
EXTRA_FLAGS=(
EXTRA_FLAGS_ENCODER=(
-pthread
-s USE_PTHREADS=1 # enable pthreads support
-s PROXY_TO_PTHREAD=1 # detach main() from browser/UI main thread
-o ${DEMO_PATH}/resources/workers/paraencoder.js
)
EXTRA_FLAGS_MUXER=(
-pthread
-s USE_PTHREADS=1 # enable pthreads support
-s PROXY_TO_PTHREAD=1 # detach main() from browser/UI main thread
-o ${DEMO_PATH}/resources/workers/paraemuxer.js
)
else
EXTRA_FLAGS=(
EXTRA_FLAGS_ENCODER=(
-o ${DEMO_PATH}/resources/workers/paraencoder.js
)
EXTRA_FLAGS_MUXER=(
-o ${DEMO_PATH}/resources/workers/paramuxer.js
)
fi
FLAGS=(
FLAGS_ENCODER=(
-I$BUILD_DIR/include -L$BUILD_DIR/lib -I$LIB_DIR/ffmpeg.wasm-core -I$LIB_DIR/ffmpeg.protos/src
-Wno-deprecated-declarations -Wno-pointer-sign -Wno-implicit-int-float-conversion -Wno-switch -Wno-parentheses -Qunused-arguments
-lavdevice -lavfilter -lavformat -lavcodec -lswresample -lswscale -lavutil -lpostproc -lm -lx264 -lz
$WASM_DIR/encoder.c
$LIB_DIR/ffmpeg.protos/src/ioctx.cc
$LIB_DIR/ffmpeg.protos/src/utils.cc
$LIB_DIR/ffmpeg.protos/src/proto/movMemProto.cc
$LIB_DIR/ffmpeg.protos/src/proto/proto.cc
-s FORCE_FILESYSTEM=1
-s WASM=1
#-s USE_SDL=0 # use SDL2
-s INVOKE_RUN=0 # not to run the main() in the beginning
-s EXIT_RUNTIME=1 # exit runtime after execution
-s MODULARIZE=1 # 延迟加载 use modularized version to be more flexible
-s EXPORT_NAME="createEncoder" # assign export name for browser
-s EXPORTED_FUNCTIONS="[_main,_malloc,_free]" # export main and proxy_main funcs
-s EXPORTED_RUNTIME_METHODS="[FS, cwrap, ccall, setValue, writeAsciiToMemory, getValue]" # export preamble funcs
-s INITIAL_MEMORY=268435456 # 64 KB * 1024 * 16 * 2047 = 2146435072 bytes ~= 2 GB, 268435456 =256M, 134,217,728 =128M
--pre-js $WORKPATH/pre.js
--post-js $WORKPATH/post.js
-gseparate-dwarf=./encoder.debug.wasm -s SEPARATE_DWARF_URL=http://localhost:9876/encoder.debug.wasm
$OPTIM_FLAGS
${EXTRA_FLAGS_ENCODER[@]}
)
FLAGS_MUXER=(
-I$BUILD_DIR/include -L$BUILD_DIR/lib -I$LIB_DIR/ffmpeg.wasm-core -I$LIB_DIR/ffmpeg.protos/src
-Wno-deprecated-declarations -Wno-pointer-sign -Wno-implicit-int-float-conversion -Wno-switch -Wno-parentheses -Qunused-arguments
-lavdevice -lavfilter -lavformat -lavcodec -lswresample -lswscale -lavutil -lpostproc -lm -lx264 -lz
$WASM_DIR/interfaces.cc
$WASM_DIR/muxer.cc
$LIB_DIR/ffmpeg.protos/src/ioctx.cc
$LIB_DIR/ffmpeg.protos/src/utils.cc
$LIB_DIR/ffmpeg.protos/src/proto/movMemProto.cc
......@@ -98,18 +140,22 @@ FLAGS=(
-s INVOKE_RUN=0 # not to run the main() in the beginning
-s EXIT_RUNTIME=1 # exit runtime after execution
-s MODULARIZE=1 # 延迟加载 use modularized version to be more flexible
-s EXPORT_NAME="createParaEncoder" # assign export name for browser
-s EXPORT_NAME="createMuxer" # assign export name for browser
-s EXPORTED_FUNCTIONS="[_main,_malloc,_free]" # export main and proxy_main funcs
-s EXPORTED_RUNTIME_METHODS="[FS, cwrap, ccall, setValue, writeAsciiToMemory, getValue]" # export preamble funcs
-s INITIAL_MEMORY=536870912 # 64 KB * 1024 * 16 * 2047 = 2146435072 bytes ~= 2 GB, 268435456 =256M, 134,217,728 =128M
-s SAFE_HEAP=1
--pre-js $WORKPATH/pre.js
--post-js $WORKPATH/post.js
-gseparate-dwarf=./muxer.debug.wasm -s SEPARATE_DWARF_URL=http://localhost:9876/muxer.debug.wasm
$OPTIM_FLAGS
${EXTRA_FLAGS[@]}
${EXTRA_FLAGS_MUXER[@]}
)
emcc "${FLAGS[@]}"
emcc "${FLAGS_ENCODER[@]}"
emcc "${FLAGS_MUXER[@]}"
chown "$1:$2" ${DEMO_PATH}/resources/workers/paraencoder.js
chown "$1:$2" ${DEMO_PATH}/resources/workers/paraencoder.js
chown "$1:$2" ${DEMO_PATH}/resources/workers/paraencoder.wasm
chown "$1:$2" ${DEMO_PATH}/resources/workers/paramuxer.wasm
chown "$1:$2" ${DEMO_PATH}/resources/workers/paramuxer.wasm
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment