Compare commits

...

2 Commits

View File

@ -36,6 +36,9 @@ bool doesFileExist(string filePath),
#define DEFAULT_THREAD_ID 0 #define DEFAULT_THREAD_ID 0
#define MAIN_PRINT(x) THREAD_PRINT(DEFAULT_THREAD_ID, x) #define MAIN_PRINT(x) THREAD_PRINT(DEFAULT_THREAD_ID, x)
#define EXIT_WITH_ERROR(x) { PRINT(x); exit(EXIT_FAILURE); }
#define MAIN_EXIT_WITH_ERROR(x) { MAIN_PRINT(x); exit(EXIT_FAILURE); }
mutex printMutex, mutex printMutex,
channelsAlreadyTreatedAndToTreatMutex, channelsAlreadyTreatedAndToTreatMutex,
quotaMutex; quotaMutex;
@ -84,14 +87,12 @@ int main(int argc, char *argv[])
} }
else else
{ {
MAIN_PRINT("YouTube operational API instance URL missing!") MAIN_EXIT_WITH_ERROR("YouTube operational API instance URL missing!")
exit(EXIT_FAILURE);
} }
} }
else else
{ {
MAIN_PRINT("Unrecognized parameter " << argvStr) MAIN_EXIT_WITH_ERROR("Unrecognized parameter " << argvStr)
exit(EXIT_FAILURE);
} }
} }
@ -273,8 +274,7 @@ void treatChannelOrVideo(unsigned short threadId, bool isChannel, string id, str
json data = getJson(threadId, "playlistItems?part=snippet,contentDetails,status&playlistId=" + playlistToTreat + "&maxResults=50&pageToken=" + pageToken, true, channelToTreat, returnErrorIfPlaylistNotFound); json data = getJson(threadId, "playlistItems?part=snippet,contentDetails,status&playlistId=" + playlistToTreat + "&maxResults=50&pageToken=" + pageToken, true, channelToTreat, returnErrorIfPlaylistNotFound);
if(data.contains("error")) if(data.contains("error"))
{ {
PRINT("Not listing comments on videos, as `playlistItems` hasn't found the `uploads` playlist!") EXIT_WITH_ERROR("Not listing comments on videos, as `playlistItems` hasn't found the `uploads` playlist!")
exit(EXIT_FAILURE);
} }
json items = data["items"]; json items = data["items"];
for(const auto& item : items) for(const auto& item : items)
@ -302,8 +302,7 @@ void treatChannelOrVideo(unsigned short threadId, bool isChannel, string id, str
} }
else //if(videoCount >= 20000) else //if(videoCount >= 20000)
{ {
PRINT("The videos count of the channel exceeds the supported 20,000 limit!") EXIT_WITH_ERROR("The videos count of the channel exceeds the supported 20,000 limit!")
exit(EXIT_FAILURE);
} }
} }
} }
@ -488,11 +487,11 @@ void treatChannelOrVideo(unsigned short threadId, bool isChannel, string id, str
{ {
if(item.contains("liveStreamingDetails")) if(item.contains("liveStreamingDetails"))
{ {
PRINT(item["id"]) string videoId = item["id"];
//PRINT(videoId)
json liveStreamingDetails = item["liveStreamingDetails"]; json liveStreamingDetails = item["liveStreamingDetails"];
if(liveStreamingDetails.contains("activeLiveChatId")) if(liveStreamingDetails.contains("activeLiveChatId"))
{ {
PRINT("streaming")
string activeLiveChatId = liveStreamingDetails["activeLiveChatId"]; string activeLiveChatId = liveStreamingDetails["activeLiveChatId"];
json data = getJson(threadId, "liveChat/messages?part=snippet,authorDetails&liveChatId=" + activeLiveChatId, true, id), json data = getJson(threadId, "liveChat/messages?part=snippet,authorDetails&liveChatId=" + activeLiveChatId, true, id),
items = data["items"]; items = data["items"];
@ -500,12 +499,47 @@ void treatChannelOrVideo(unsigned short threadId, bool isChannel, string id, str
{ {
string channelId = item["snippet"]["authorChannelId"]; string channelId = item["snippet"]["authorChannelId"];
addChannelToTreat(threadId, channelId); addChannelToTreat(threadId, channelId);
PRINT("Found: " << channelId)
} }
} }
else else
{ {
PRINT("no more streaming") // As there isn't the usual pagination mechanism for these ended livestreams, we proceed in an uncertain way as follows.
set<string> messageIds;
unsigned long long lastMessageTimestampRelativeMsec = 0;
while(true)
{
string time = to_string(lastMessageTimestampRelativeMsec);
json data = getJson(threadId, "liveChats?part=snippet&id=" + videoId + "&time=" + time, false, id),
snippet = data["items"][0]["snippet"];
if(snippet.empty())
{
break;
}
json firstMessage = snippet[0];
string firstMessageId = firstMessage["id"];
// We verify that we don't skip any message by verifying that the first message was already treated if we already treated some messages.
if(!messageIds.empty() && messageIds.find(firstMessageId) == messageIds.end())
{
EXIT_WITH_ERROR("The verification that we don't skip any message failed!")
}
for(const auto& message : snippet)
{
string messageId = message["id"];
if(messageIds.find(messageId) == messageIds.end())
{
messageIds.insert(messageId);
string channelId = message["authorChannelId"];
addChannelToTreat(threadId, channelId);
}
}
json lastMessage = snippet.back();
// If there isn't any new message, then we stop the retrieving.
if(lastMessageTimestampRelativeMsec == lastMessage["videoOffsetTimeMsec"])
{
break;
}
lastMessageTimestampRelativeMsec = lastMessage["videoOffsetTimeMsec"];
}
} }
} }
} }
@ -662,8 +696,7 @@ json getJson(unsigned short threadId, string url, bool usingYoutubeDataApiv3, st
} }
catch (json::parse_error& ex) catch (json::parse_error& ex)
{ {
PRINT("Parse error for " << finalUrl << ", as got: " << content << " !") EXIT_WITH_ERROR("Parse error for " << finalUrl << ", as got: " << content << " !")
exit(EXIT_FAILURE);
} }
if(data.contains("error")) if(data.contains("error"))