Hi!
So I’m trying to do a report view with a data from multiple snort log files, which can contain about 100K+ records. The problem is that this task takes to long. For a single file I’m taking information from a single line assign it to dictionary and then to OrderedDict. In my view I can access data in DataTable and ECharts. What would be the best approach to optimize process? I heard about asynchronous support in Django 3, but it’s a mystery for me
Here is my view function:
@login_required
def generate_report_fast(request):
if UploadFile.objects.filter(user=request.user):
files = UploadFile.objects.filter(user=request.user)
dict_alert = OrderedDict()
count = 1
for file in files:
path = file.upload_file.path
date = str(file.get_year())
with open(path) as alertfile:
for line in alertfile:
if line is None:
break
dict_alert[count] = read_data(line, date)
count += 1
proto_count_data = protocount(dict_alert)
ip_count_data = ipcount(dict_alert)
classi_count_data = classicount(dict_alert)
priority_count_data = prioritycount(dict_alert)
time_count_data = timecount(dict_alert)
return render(request, 'report.html', {'pkts': dict_alert, 'proto': proto_count_data, 'ip_data': ip_count_data,
'classi_data': classi_count_data, 'priority_data': priority_count_data,
'time_data': time_count_data})
else:
return render(request, 'details_error.html')