Skip to content

Commit

Permalink
Integrate metrics light (#7)
Browse files Browse the repository at this point in the history
* Setup libraries and the metrics provider class

* Verify the server pid on shutdown

* Set to send the report on shutdown if possible, add support for tags

* Add the crashpad library and setup a class for it

* Revert "Set to send the report on shutdown if possible, add support for tags"

This reverts commit a532bba.

* Revert "Revert "Set to send the report on shutdown if possible, add support for tags""

This reverts commit e755c9d.

* Revert "Add the crashpad library and setup a class for it"

This reverts commit 297c80b.

* Fix layout style

* Make the reports go to the dedicated Sentry project

* Allows exiting the crashhandler if the client wasn't able to connect

* Set the initial status in case SLOBS crashes before connecting

* Rename the pipe used for metrics

* Handle blaming directly the server or the frontend

* Adjust the initial status when initializing the metrics provider

* Fix layout issues

* Do not update the metrics status if the last one was the "handled crash" one

* Handle blaming from the client side

* Fix blaming the frontend instead the backend when the server crashes on shutdown

* Changes the SKD used to proper reflect our company, add the release tag

* Fix layout issues
  • Loading branch information
RodrigoHolztrattner authored and eddyStreamlabs committed May 8, 2019
1 parent f46e00d commit de7b34b
Show file tree
Hide file tree
Showing 4 changed files with 602 additions and 0 deletions.
44 changes: 44 additions & 0 deletions crash-handler-process/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,22 +1,66 @@
CMAKE_MINIMUM_REQUIRED(VERSION 3.1)
PROJECT(crash-handler-process VERSION 0.0.1)

include(FetchContent)

# Nlohmann JSON (modern JSON for C++)
FetchContent_Declare(
nlohmannjson
GIT_REPOSITORY https://github.com/nlohmann/json
)

add_compile_definitions(CURL_STATICLIB)

# Curl for people
FetchContent_Declare(
cpr
GIT_REPOSITORY https://github.com/whoshuu/cpr/
)

FetchContent_GetProperties(nlohmannjson)
if(NOT nlohmannjson_POPULATED)
FetchContent_Populate(nlohmannjson)
endif()

set(BUILD_CURL_EXE false CACHE BOOL "" FORCE)
set(CURL_STATIC_CRT true CACHE BOOL "" FORCE)
set(CURL_STATICLIB true CACHE BOOL "" FORCE)
set(BUILD_CPR_TESTS false CACHE BOOL "" FORCE)
set(BUILD_TESTING false CACHE BOOL "" FORCE)
set(CMAKE_USE_OPENSSL false CACHE BOOL "" FORCE)
set(CMAKE_USE_WINSSL true CACHE BOOL "" FORCE)

FetchContent_GetProperties(cpr)
if(NOT cpr_POPULATED)
FetchContent_Populate(cpr)
add_subdirectory(${cpr_SOURCE_DIR} ${cpr_BINARY_DIR})
endif()

#############################
# Source, Libraries & Directories
#############################
SET(PROJECT_SOURCE
"${PROJECT_SOURCE_DIR}/process.cpp" "${PROJECT_SOURCE_DIR}/process.hpp"
"${PROJECT_SOURCE_DIR}/metricsprovider.cpp" "${PROJECT_SOURCE_DIR}/metricsprovider.hpp"
"${PROJECT_SOURCE_DIR}/message.cpp" "${PROJECT_SOURCE_DIR}/message.hpp"
"${PROJECT_SOURCE_DIR}/namedsocket-win.cpp" "${PROJECT_SOURCE_DIR}/namedsocket-win.hpp"
"${PROJECT_SOURCE_DIR}/namedsocket.cpp" "${PROJECT_SOURCE_DIR}/namedsocket.hpp"
"${PROJECT_SOURCE_DIR}/main.cpp"
)


#############################
# Building
#############################
ADD_EXECUTABLE(crash-handler-process ${PROJECT_SOURCE})

# Include/link crash manager dependencies
target_include_directories(crash-handler-process PUBLIC
"${nlohmannjson_SOURCE_DIR}/single_include"
"${cpr_SOURCE_DIR}/include")
target_link_libraries(crash-handler-process cpr)
target_link_libraries(crash-handler-process libcurl)

#############################
# Distribute
#############################
Expand Down
35 changes: 35 additions & 0 deletions crash-handler-process/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@
#include <algorithm>
#include <vector>
#include "namedsocket-win.hpp"
#include "metricsprovider.hpp"

#include <queue>
#include <sstream>
#include <fstream>
#include <codecvt>
Expand All @@ -20,6 +22,7 @@ bool doRestartApp = false;
bool monitoring = false;
bool closeAll = false;
std::mutex* mu = new std::mutex();
MetricsProvider metricsServer;

static thread_local std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> converter;
std::string from_utf16_wide_to_utf8(const wchar_t* from, size_t length = -1)
Expand Down Expand Up @@ -217,6 +220,10 @@ void checkProcesses(std::mutex* m) {
criticalProcessAlive = processes.at(i)->getAlive();
}
if (!processes.at(index)->getCritical() && criticalProcessAlive) {

// Metrics
metricsServer.BlameFrontend();

int code = MessageBox(
NULL,
"An error occurred which has caused Streamlabs OBS to close. Don't worry! If you were streaming or recording, that is still happening in the background."
Expand Down Expand Up @@ -250,6 +257,10 @@ void checkProcesses(std::mutex* m) {
closeAll = true;
}
else {

// Metrics
metricsServer.BlameServer();

closeAll = true;
}
*exitApp = true;
Expand Down Expand Up @@ -327,6 +338,13 @@ int main(int argc, char** argv)

std::thread processManager(checkProcesses, mu);

std::thread metricsPipe([&]()
{
metricsServer.Initialize("\\\\.\\pipe\\metrics_pipe");
metricsServer.ConnectToClient();
metricsServer.StartPollingEvent();
});

std::unique_ptr<NamedSocket> sock = NamedSocket::create();

while (!(*exitApp) && !sock->read(&processes, mu, exitApp))
Expand All @@ -337,11 +355,28 @@ int main(int argc, char** argv)
*exitApp = true;
if (processManager.joinable())
processManager.join();

metricsServer.KillPendingIO();
if (metricsPipe.joinable())
metricsPipe.join();
close(closeAll);

if (doRestartApp) {
restartApp(path);
}

// Wait until the server process dies or the metrics provider signals that we can shutdown
while (metricsServer.ServerIsActive() && !metricsServer.ServerExitedSuccessfully())
{
std::this_thread::sleep_for(std::chrono::milliseconds(50));
}

// Only perform the shutdown for the metrics server if it exited successfully
if (metricsServer.ServerExitedSuccessfully())
{
metricsServer.Shutdown();
}

return 0;
}

Expand Down
Loading

0 comments on commit de7b34b

Please sign in to comment.