from tool.scrawl import Scawler import tool.word_filter as static import tool.keywords as keywords import tool.csv_parse as cp from tool.static import analyze_danmu_statistics from tool.cloud_show import Cloud_shower import cProfile import pstats from io import StringIO if __name__ == "__main__": # # 爬虫性能分析部分 # scawler = Scawler() # profiler = cProfile.Profile() # profiler.enable() # ls = scawler.work(1, 4) # danmu_counter = analyze_danmu_statistics(ls) # shower = Cloud_shower() # shower.to_show(danmu_counter) # profiler.disable() # stream = StringIO() # stats = pstats.Stats(profiler, stream=stream) # stats.sort_stats('cumulative') # 按累计时间排序 # stats.print_stats(20) # 显示前20行 # print("\n=== 详细分析报告 ===") # print(stream.getvalue()) # 数据爬取部分 scawler = Scawler() ls = scawler.work(1, 40) # with open('./raw_danmu.txt', 'w', encoding='utf-8') as f: # for danmu in ls: # f.write(danmu + '\n') # 数据处理部分 # ls = [] # with open('./raw_danmu.txt', 'r', encoding='utf-8') as f: # ls = [line.strip() for line in f.readlines() if line.strip()] danmu_counter = analyze_danmu_statistics(ls) shower = Cloud_shower() shower.to_show(danmu_counter)