Skip to content

Commit

Permalink
Feat: blive danmaku analyze.
Browse files Browse the repository at this point in the history
  • Loading branch information
remiliacn committed Aug 19, 2024
1 parent 79203ac commit 6122870
Show file tree
Hide file tree
Showing 5 changed files with 86 additions and 11 deletions.
13 changes: 8 additions & 5 deletions Services/live_notification.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ class LivestreamDanmakuData:
gift_total_price: float = 0
new_captains: int = 0
top_crazy_timestamps: List[str] = dataclasses.field(default_factory=list)
danmaku_analyze_graph: str = ''


class DynamicNotificationData:
Expand Down Expand Up @@ -228,18 +229,20 @@ def stringify_danmaku_data(data: LivestreamDanmakuData) -> Message:

new_captains_prompt = f'新舰长{data.new_captains}\n' if data.new_captains >= 3 else ''
gift_price_string = f'(预估收入:¥{data.gift_total_price:.2f}\n' if data.gift_total_price > 0 else ''
hotspot_data_prompt = (f'前{len(data.top_crazy_timestamps)}弹幕最多的精彩时间:'
f'\n{", ".join(data.top_crazy_timestamps)}') \
if data.top_crazy_timestamps else ''
# hotspot_data_prompt = (f'前{len(data.top_crazy_timestamps)}弹幕最多的精彩时间:'
# f'\n{", ".join(data.top_crazy_timestamps)}') \
# if data.top_crazy_timestamps else ''
danmaku_graph_data = MessageSegment.image(data.danmaku_analyze_graph) if data.danmaku_analyze_graph else ''
return construct_message_chain(
'直播已结束!撒花~✿✿ヽ(°▽°)ノ✿\n',
f'一共收到啦{data.danmaku_count}枚弹幕\n',
new_captains_prompt,
f'收到礼物(包括SC){data.gift_received_count}\n',
f'{gift_price_string}',
f'最高人气排名:{data.highest_rank}\n',
f'{hotspot_data_prompt}\n\n',
MessageSegment.image(path))
# f'{hotspot_data_prompt}\n\n',
MessageSegment.image(path),
danmaku_graph_data)

def _delete_dumped_live_data(self, uid):
self.live_database.execute(
Expand Down
6 changes: 4 additions & 2 deletions Services/twitch_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from typing import Union, List

from loguru import logger
from nonebot.adapters.onebot.v11 import MessageSegment
from nonebot.internal.matcher import Matcher
from twitchdl import twitch
from youtube_dl.utils import sanitize_filename
Expand All @@ -20,6 +21,7 @@
from Services.util.common_util import OptionalDict, HttpxHelperClient, Status, TwitchDownloadStatus, \
ValidatedTimestampStatus
from config import SUPER_USER, PATH_TO_ONEDRIVE, SHARE_LINK, CLOUD_STORAGE_SIZE_LIMIT_GB
from util.helper_util import construct_message_chain


class TwitchLiveData:
Expand Down Expand Up @@ -328,8 +330,8 @@ async def _check_space_used():
try:
file_size = path.getsize(file_path)
if file_size > size_limit_bytes:
return Status(False, f'Someone tell [CQ:at,qq={SUPER_USER}]'
f' there is not enough space in the disk.')
return Status(False, construct_message_chain(f'Someone tell ', MessageSegment.at(SUPER_USER),
f' there is not enough space in the disk.'))
except OSError as err:
return Status(False,
f'Someone tell [CQ:at,qq={SUPER_USER}] '
Expand Down
2 changes: 1 addition & 1 deletion Services/util/common_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ async def time_to_literal(time_string: int) -> str:
return result


def construct_timestamp_string(seconds: float) -> str:
def construct_timestamp_string(seconds: float, _pos=None) -> str:
seconds = ceil(seconds)

hours = seconds // 3600
Expand Down
73 changes: 71 additions & 2 deletions blive_danmaku_report_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,12 @@
import sys
import time
from functools import lru_cache
from os import getpid
from os import getpid, getcwd
from random import randint
from typing import Optional, Set, List, Dict
from typing import Optional, Set, List, Dict, Tuple

import aiohttp
from matplotlib import font_manager, rc, rcParams, ticker
from nonebot.log import logger

import blivedm.models.web as web_models
Expand All @@ -27,6 +28,16 @@
# Make it random because it is funny lol
TOP_TIMESTAMP_LIMIT = randint(5, 7)

font_path = f'{getcwd()}/Services/util/SourceHanSansSC-Bold.otf'
font_manager.fontManager.addfont(font_path)
prop = font_manager.FontProperties(fname=font_path)

rc('font', family='sans-serif')
rcParams.update({
'font.size': 12,
'font.sans-serif': prop.get_name()
})


def _get_log_filename() -> str:
return f'log_{int(time.time())}_{getpid()}.log'
Expand Down Expand Up @@ -147,6 +158,14 @@ def _on_heartbeat(self, client: ws_base.WebSocketClientBase, message: web_models
logger.error(f'Failed to get hotspot data: {err.__class__}')
hotspot_timestamp_data = []

try:
danmaku_graph_hotspot = _get_sorted_hotspot_time_to_frequency(self.stream_hotspot_timestamp_list)
logger.info(f'Danmaku graph hotspot list: {danmaku_graph_hotspot}')
file_name = _draw_danmaku_frequency_graph(danmaku_graph_hotspot)
except Exception as err:
logger.error(f'Failed to get danmaku graph data: {err.__class__}')
file_name = ''

pickled_data = codecs.encode(pickle.dumps(LivestreamDanmakuData(
danmaku_count=self.danmaku_count,
danmaku_frequency_dict=self.danmaku_frequency_dict,
Expand All @@ -157,6 +176,7 @@ def _on_heartbeat(self, client: ws_base.WebSocketClientBase, message: web_models
gift_total_price=self.gift_price if live_notification.is_fetch_gift_price(self.room_id) else 0,
new_captains=self.new_captains,
top_crazy_timestamps=hotspot_timestamp_data,
danmaku_analyze_graph=file_name
)), 'base64').decode()
live_notification.dump_live_data(pickled_data)

Expand Down Expand Up @@ -230,6 +250,55 @@ def hotspot_analyzation(timestamps: List[float], intervals=60) -> Dict[float, fl
return result_dict


def seconds_to_hms(x, pos):
hours, remainder = divmod(x, 3600)
minutes, seconds = divmod(remainder, 60)
return f'{int(hours):02}:{int(minutes):02}:{int(seconds):02}'


def _get_sorted_hotspot_time_to_frequency(stream_time_frequency_list: List[float]) -> Tuple[List[float], List[float]]:
hotspot_analyzation_result = hotspot_analyzation(stream_time_frequency_list, 60)
sorted_result = sorted(hotspot_analyzation_result.items(), key=lambda x: x[0])

x_axis_data = [x[0] for x in sorted_result]
y_axis_data = [x[1] for x in sorted_result]

return x_axis_data, y_axis_data


def _draw_danmaku_frequency_graph(data_tuple: Tuple[List[float], List[float]]) -> str:
import matplotlib.pyplot as plt
from scipy.ndimage import gaussian_filter1d

plt.rcParams['axes.unicode_minus'] = False

x_axis_data, y_axis_data = data_tuple

smoothed_y_axis_data = gaussian_filter1d(y_axis_data, sigma=2)

logger.info(f'X axis data: {x_axis_data}')
logger.info(f'Y axis data: {y_axis_data}')

plt.margins(x=0, y=0)
# noinspection PyTypeChecker
plt.plot(x_axis_data, smoothed_y_axis_data)

plt.gca().xaxis.set_major_formatter(ticker.FuncFormatter(seconds_to_hms))
plt.gcf().autofmt_xdate()

plt.xlabel('直播时间(误差+-1分钟)')
plt.ylabel('弹幕量')
plt.title('弹幕活跃趋势')

# noinspection PyTypeChecker
plt.fill_between(x_axis_data, 0, smoothed_y_axis_data, alpha=0.5, color='green')

file_name = f'{getcwd()}/data/live/{int(time.time())}_danmaku.png'
plt.savefig(file_name)

return file_name


def get_sorted_timestamp_hotspot(stream_time_frequency_list: List[float], intervals=60) -> List[str]:
hotspot_analyzation_result = hotspot_analyzation(stream_time_frequency_list, intervals)
sorted_result = sorted(hotspot_analyzation_result.items(), key=lambda x: x[1], reverse=True)
Expand Down
3 changes: 2 additions & 1 deletion util/helper_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ def construct_message_chain(*args: [str, MessageSegment, Message, List[MessageSe
continue

if isinstance(arg, str):
message_list.append(MessageSegment.text(arg))
if arg:
message_list.append(MessageSegment.text(arg))
elif isinstance(arg, Message):
message_list += [x for x in arg]
elif isinstance(arg, list):
Expand Down

0 comments on commit 6122870

Please sign in to comment.