Skip to content

Commit

Permalink
Merge pull request #33 from markleybros/vidcap-unblocking
Browse files Browse the repository at this point in the history
Video Capture Unblocking & Gamepad Support
  • Loading branch information
alexmarkley authored Sep 19, 2019
2 parents b3a7a74 + 1eabcde commit 859cf83
Show file tree
Hide file tree
Showing 9 changed files with 699 additions and 279 deletions.
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.2.1
0.2.2
26 changes: 26 additions & 0 deletions data/yer-face-config.json
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,32 @@
"EventLogger": {
},
"SDLDriver": {
"joystick": {
"enabled": true,
"eventsRaw": true,
"controllerSettings": {
"default": {
"buttonEventMapping": {
"basis": 9,
"previewDebugDensity": 8
},
"axisSettings": {
"max": 32767,
"min": 10
}
},
"Nintendo GameCube Controller Adapter": {
"buttonEventMapping": {
"basis": 9,
"previewDebugDensity": 7
},
"axisSettings": {
"max": 22000,
"min": 500
}
}
}
}
},
"PreviewHUD": {
"numWorkersPerCPU": 0.5,
Expand Down
615 changes: 387 additions & 228 deletions src/FFmpegDriver.cpp

Large diffs are not rendered by default.

43 changes: 28 additions & 15 deletions src/FFmpegDriver.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ namespace YerFace {

#define YERFACE_FRAME_DURATION_ESTIMATE_BUFFER 10
#define YERFACE_INITIAL_VIDEO_BACKING_FRAMES 60
#define YERFACE_MAX_PUMPTIME 67 //If a/v stream pumping is taking longer than 1/15th of a second, we may have a hardware problem.

#define YERFACE_AVLOG_LEVELMAP_MIN 0 //Less than this gets dropped.
#define YERFACE_AVLOG_LEVELMAP_ALERT 8 //Less than this (libav* defines 0-7 as PANIC) gets mapped to our LOG_SEVERITY_ALERT
Expand All @@ -34,6 +35,7 @@ namespace YerFace {

class FrameServer;
class WorkerPool;
class FFmpegDriver;

enum FFmpegDriverInputAudioChannelMap {
CHANNELMAP_NONE = 0,
Expand All @@ -55,7 +57,7 @@ class MediaInputContext {

AVFormatContext *formatContext;

AVPacket packet;
AVPacket *packet;
AVFrame *frame;
FrameNumber frameNumber;

Expand All @@ -75,6 +77,12 @@ class MediaInputContext {

bool demuxerDraining;

SDL_mutex *demuxerMutex;
SDL_Thread *demuxerThread;
bool demuxerThreadRunning;

FFmpegDriver *driver;

bool initialized;
};

Expand All @@ -91,6 +99,11 @@ class MediaOutputContext {
int audioStreamIndex;

SDL_mutex *multiplexerMutex;
SDL_cond *multiplexerCond;
SDL_Thread *multiplexerThread;
bool multiplexerThreadRunning;

std::list<AVPacket *> outputPackets;

bool initialized;
};
Expand Down Expand Up @@ -150,7 +163,7 @@ class FFmpegDriver {
void openInputMedia(string inFile, enum AVMediaType type, string inFormat, string inSize, string inChannels, string inRate, string inCodec, string inputAudioChannelMap, bool tryAudio);
void openOutputMedia(string outFile);
void setVideoCaptureWorkerPool(WorkerPool *workerPool);
void rollDemuxerThread(void);
void rollWorkerThreads(void);
bool getIsAudioInputPresent(void);
bool getIsVideoFrameBufferEmpty(void);
VideoFrame getNextVideoFrame(void);
Expand All @@ -163,15 +176,18 @@ class FFmpegDriver {
void openCodecContext(int *streamIndex, AVCodecContext **decoderContext, AVFormatContext *myFormatContext, enum AVMediaType type);
VideoFrameBacking *getNextAvailableVideoFrameBacking(void);
VideoFrameBacking *allocateNewVideoFrameBacking(void);
bool decodePacket(MediaInputContext *context, int streamIndex, bool drain);
void destroyDemuxerThread(void);
bool decodePacket(MediaInputContext *inputContext, int streamIndex, bool drain);
void destroyDemuxerThread(MediaInputContext *inputContext);
void destroyMuxerThread(void);
static int runOuterDemuxerLoop(void *ptr);
int innerDemuxerLoop(void);
void pumpDemuxer(MediaInputContext *context, enum AVMediaType type);
static int runOuterMuxerLoop(void *ptr);
int innerDemuxerLoop(MediaInputContext *inputContext);
int innerMuxerLoop(void);
void pumpDemuxer(MediaInputContext *inputContext, enum AVMediaType type);
bool flushAudioHandlers(bool draining);
bool getIsAudioDraining(void);
bool getIsVideoDraining(void);
FrameTimestamps resolveFrameTimestamp(MediaInputContext *context, enum AVMediaType type);
FrameTimestamps resolveFrameTimestamp(MediaInputContext *inputContext, enum AVMediaType type);
void recursivelyListAllAVOptions(void *obj, string depth = "-");
bool getIsAllocatedVideoFrameBackingsFull(void);
int64_t applyPTSOffset(int64_t pts, int64_t offset);
Expand All @@ -187,17 +203,13 @@ class FFmpegDriver {

std::list<double> frameStartTimes;

MediaInputContext videoContext, audioContext;
MediaInputContext videoInContext, audioInContext;
MediaOutputContext outputContext;

int width, height;
enum AVPixelFormat pixelFormat, pixelFormatBacking;
struct SwsContext *swsContext;

SDL_Thread *demuxerThread;
SDL_mutex *demuxerThreadMutex;
bool demuxerThreadRunning;

SDL_mutex *videoStreamMutex;
double videoStreamTimeBase;
double newestVideoFrameTimestamp;
Expand All @@ -213,11 +225,12 @@ class FFmpegDriver {
int videoDestBufSize;

SDL_mutex *videoFrameBufferMutex;
list<VideoFrame> readyVideoFrameBuffer;
list<VideoFrameBacking *> allocatedVideoFrameBackings;
std::list<VideoFrame> readyVideoFrameBuffer;
std::list<VideoFrameBacking *> allocatedVideoFrameBackings;

SDL_mutex *audioFrameHandlersMutex;
std::vector<AudioFrameHandler *> audioFrameHandlers;
bool audioCallbacksOkay;
bool audioFrameHandlersOkay;

static Logger *avLogger;
static SDL_mutex *avLoggerMutex;
Expand Down
120 changes: 98 additions & 22 deletions src/OutputDriver.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -164,9 +164,7 @@ bool OutputFrameContainer::isReady(void) {
OutputDriver::OutputDriver(json config, string myOutputFilename, Status *myStatus, FrameServer *myFrameServer, FaceTracker *myFaceTracker, SDLDriver *mySDLDriver) {
workerPool = NULL;
outputFilename = myOutputFilename;
newestFrameTimestamps.frameNumber = -1;
newestFrameTimestamps.startTimestamp = -1.0;
newestFrameTimestamps.estimatedEndTimestamp = -1.0;
rawEventsPending.clear();
status = myStatus;
if(status == NULL) {
throw invalid_argument("status cannot be NULL");
Expand All @@ -190,6 +188,9 @@ OutputDriver::OutputDriver(json config, string myOutputFilename, Status *myStatu
if((workerMutex = SDL_CreateMutex()) == NULL) {
throw runtime_error("Failed creating mutex!");
}
if((rawEventsMutex = SDL_CreateMutex()) == NULL) {
throw runtime_error("Failed creating mutex!");
}

//We need to know when the frame server has drained.
frameServerDrained = false;
Expand All @@ -200,24 +201,43 @@ OutputDriver::OutputDriver(json config, string myOutputFilename, Status *myStatu

autoBasisTransmitted = false;
sdlDriver->onBasisFlagEvent([this] (void) -> void {
YerFace_MutexLock(this->workerMutex);
bool eventHandled = false;
if(this->newestFrameTimestamps.frameNumber > 0 && !this->frameServerDrained) {
FrameNumber frameNumber = this->newestFrameTimestamps.frameNumber;
if(this->pendingFrames.find(frameNumber) != this->pendingFrames.end()) {
if(!this->pendingFrames[frameNumber].frameIsDraining) {
if(this->eventLogger != NULL) {
this->eventLogger->logEvent("basis", (json)true, this->newestFrameTimestamps);
}
this->handleNewBasisEvent(frameNumber);
eventHandled = true;
}
}
}
if(!eventHandled) {
this->logger->err("Discarding user basis event because frame status is already drained. (Or similar bad state.)");
// Log the user-generated basis event, but don't try to assign it to a frame because we might not have one in our pipeline right now.
YerFace_MutexLock(this->rawEventsMutex);
OutputRawEvent rawEvent;
rawEvent.eventName = "basis";
rawEvent.payload = (json)true;
this->rawEventsPending.push_back(rawEvent);
YerFace_MutexUnlock(this->rawEventsMutex);
});
sdlDriver->onJoystickButtonEvent([this] (int deviceId, int button, bool pressed, double heldSeconds) -> void {
YerFace_MutexLock(this->rawEventsMutex);
OutputRawEvent rawEvent;
rawEvent.eventName = "controller";
rawEvent.payload = {
{ "deviceId", deviceId },
{ "actionType", "button" },
{ "buttonIndex", button },
{ "buttonPressed", pressed },
{ "heldSeconds", heldSeconds }
};
if(heldSeconds < 0.0) {
rawEvent.payload["heldSeconds"] = nullptr;
}
YerFace_MutexUnlock(this->workerMutex);
this->rawEventsPending.push_back(rawEvent);
YerFace_MutexUnlock(this->rawEventsMutex);
});
sdlDriver->onJoystickAxisEvent([this] (int deviceId, int axis, double value) -> void {
YerFace_MutexLock(this->rawEventsMutex);
OutputRawEvent rawEvent;
rawEvent.eventName = "controller";
rawEvent.payload = {
{ "deviceId", deviceId },
{ "actionType", "axis" },
{ "axisIndex", axis },
{ "axisValue", value }
};
this->rawEventsPending.push_back(rawEvent);
YerFace_MutexUnlock(this->rawEventsMutex);
});
logger = new Logger("OutputDriver");

Expand Down Expand Up @@ -261,6 +281,8 @@ OutputDriver::OutputDriver(json config, string myOutputFilename, Status *myStatu
frameStatusChangeCallback.callback = handleFrameStatusChange;
frameStatusChangeCallback.newStatus = FRAME_STATUS_NEW;
frameServer->onFrameStatusChangeEvent(frameStatusChangeCallback);
frameStatusChangeCallback.newStatus = FRAME_STATUS_PREVIEW_DISPLAY;
frameServer->onFrameStatusChangeEvent(frameStatusChangeCallback);
frameStatusChangeCallback.newStatus = FRAME_STATUS_DRAINING;
frameServer->onFrameStatusChangeEvent(frameStatusChangeCallback);
frameStatusChangeCallback.newStatus = FRAME_STATUS_GONE;
Expand Down Expand Up @@ -300,6 +322,7 @@ OutputDriver::~OutputDriver() noexcept(false) {
SDL_WaitThread(webSocketServer->serverThread, NULL);
}

SDL_DestroyMutex(rawEventsMutex);
SDL_DestroyMutex(basisMutex);
SDL_DestroyMutex(webSocketServer->websocketMutex);
SDL_DestroyMutex(workerMutex);
Expand All @@ -322,7 +345,7 @@ void OutputDriver::setEventLogger(EventLogger *myEventLogger) {
basisEvent.name = "basis";
basisEvent.replayCallback = [this] (string eventName, json eventPayload, json sourcePacket) -> bool {
if(eventName != "basis" || (bool)eventPayload != true) {
this->logger->err("Got an unsupported replay event!");
this->logger->err("Got an unsupported basis replay event!");
return false;
}
this->logger->info("Received replayed Basis Flag event. Rebroadcasting...");
Expand All @@ -340,6 +363,19 @@ void OutputDriver::setEventLogger(EventLogger *myEventLogger) {
return true;
};
eventLogger->registerEventType(basisEvent);

EventType controllerEvent;
controllerEvent.name = "controller";
controllerEvent.replayCallback = [this] (string eventName, json eventPayload, json sourcePacket) -> bool {
if(eventName != "controller" || !eventPayload.is_array()) {
this->logger->err("Got an unsupported controller replay event!");
return false;
}
FrameNumber frameNumber = (FrameNumber)sourcePacket["meta"]["frameNumber"];
pendingFrames[frameNumber].frame["controller"] = eventPayload;
return true;
};
eventLogger->registerEventType(controllerEvent);
}

void OutputDriver::handleNewBasisEvent(FrameNumber frameNumber) {
Expand Down Expand Up @@ -498,6 +534,9 @@ bool OutputDriver::workerHandler(WorkerPoolWorker *worker) {
void OutputDriver::handleFrameStatusChange(void *userdata, WorkingFrameStatus newStatus, FrameTimestamps frameTimestamps) {
FrameNumber frameNumber = frameTimestamps.frameNumber;
OutputDriver *self = (OutputDriver *)userdata;
OutputRawEvent logEvent;
unordered_map<string, json> eventBuffer;
unordered_map<string, json>::iterator eventBufferIter;
static OutputFrameContainer newOutputFrame;
switch(newStatus) {
default:
Expand All @@ -516,9 +555,46 @@ void OutputDriver::handleFrameStatusChange(void *userdata, WorkingFrameStatus ne
newOutputFrame.waitingOn[waitOn] = true;
}
self->pendingFrames[frameNumber] = newOutputFrame;
self->newestFrameTimestamps = frameTimestamps;
YerFace_MutexUnlock(self->workerMutex);
break;
case FRAME_STATUS_PREVIEW_DISPLAY:
// Handle any pending raw events. (Assign them to frames as close as possible to real time.)
eventBuffer.clear();
YerFace_MutexLock(self->rawEventsMutex);
while(self->rawEventsPending.size() > 0) {
OutputRawEvent rawEvent = self->rawEventsPending.front();
self->rawEventsPending.pop_front();

//Accumulate raw events into an array, for cases where multiple of the same event fired during a single frame.
eventBufferIter = eventBuffer.find(rawEvent.eventName);
if(eventBufferIter == eventBuffer.end()) {
eventBuffer[rawEvent.eventName] = json::array();
}

eventBuffer[rawEvent.eventName].push_back(rawEvent.payload);
}
YerFace_MutexUnlock(self->rawEventsMutex);

eventBufferIter = eventBuffer.begin();
while(eventBufferIter != eventBuffer.end()) {
logEvent.eventName = eventBufferIter->first;
logEvent.payload = eventBufferIter->second;

if(logEvent.eventName == "basis") {
self->handleNewBasisEvent(frameNumber);
logEvent.payload = (json)true;
} else {
YerFace_MutexLock(self->workerMutex);
self->pendingFrames[frameNumber].frame[logEvent.eventName] = logEvent.payload;
YerFace_MutexUnlock(self->workerMutex);
}

if(self->eventLogger != NULL) {
self->eventLogger->logEvent(logEvent.eventName, logEvent.payload, frameTimestamps);
}
eventBufferIter++;
}
break;
case FRAME_STATUS_DRAINING:
YerFace_MutexLock(self->workerMutex);
self->logger->debug4("handleFrameStatusChange() Frame #" YERFACE_FRAMENUMBER_FORMAT " waiting on me. Queue depth is now %lu", frameNumber, self->pendingFrames.size());
Expand Down
10 changes: 9 additions & 1 deletion src/OutputDriver.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,12 @@ class OutputFrameContainer {
json frame;
};

class OutputRawEvent {
public:
string eventName;
json payload;
};

class OutputDriver {
friend class OutputDriverWebSocketServer;

Expand Down Expand Up @@ -70,8 +76,10 @@ friend class OutputDriverWebSocketServer;
SDL_mutex *workerMutex;
list<string> lateFrameWaitOn;
unordered_map<FrameNumber, OutputFrameContainer> pendingFrames;
FrameTimestamps newestFrameTimestamps;
bool frameServerDrained;

SDL_mutex *rawEventsMutex;
list<OutputRawEvent> rawEventsPending;
};

}; //namespace YerFace
Loading

0 comments on commit 859cf83

Please sign in to comment.