Commit 31be9296 authored by Marko Kollo's avatar Marko Kollo
Browse files

Fixed document count error in Index Management.

parent 1217a8f3
import datetime
import json
import multiprocessing
import os
import shutil
from django.contrib.auth.decorators import login_required
from django.contrib.auth.decorators import user_passes_test
from django.contrib.auth.models import User, Group, Permission
from django.contrib.auth.decorators import login_required, user_passes_test
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.urls import reverse
from django.core.files.base import ContentFile
from django.core.files.storage import default_storage
from django.http import HttpResponse, HttpResponseRedirect, JsonResponse
from django.template import loader
from django.core.files.storage import default_storage
from django.core.files.base import ContentFile
from utils.datasets import Datasets
from task_manager.models import Task
from permission_admin.models import Dataset, ScriptProject
from utils.es_manager import ES_Manager
from texta.settings import STATIC_URL, URL_PREFIX, SCRIPT_MANAGER_DIR
from task_manager.tasks.task_types import TaskTypes
from permission_admin.script_runner import ScriptRunner
import multiprocessing
import os
import shutil
from task_manager.models import Task
from task_manager.tasks.task_types import TaskTypes
# remove
from texta.settings import SCRIPT_MANAGER_DIR, STATIC_URL, URL_PREFIX
from utils.datasets import Datasets
from utils.es_manager import ES_Manager
#remove
from texta.settings import STATIC_URL, URL_PREFIX, INFO_LOGGER, ERROR_LOGGER
import logging
@login_required
@user_passes_test(lambda u: u.is_superuser)
......@@ -182,12 +175,14 @@ def get_datasets(indices=None):
datasets_out = []
for dataset in datasets:
ds_out = dataset.__dict__
if indices:
for index in indices:
if index['index'] == ds_out['index']:
ds_out['status'] = index['status']
ds_out['docs_count'] = index['docs_count']
ds_out['docs_count'] = ES_Manager.single_index_count(index["index"]) # Passed value from indices is wrong.
ds_out['store_size'] = index['store_size']
elif '*' in ds_out['index']:
ds_out['status'] = 'open'
ds_out['docs_count'] = 'multiindex'
......@@ -365,7 +360,7 @@ def _unpickle_method(func_name, obj, cls):
return func.__get__(obj, cls)
import copyreg
import copyreg
import types
copyreg.pickle(types.MethodType, _pickle_method, _unpickle_method)
copyreg.pickle(types.MethodType, _pickle_method, _unpickle_method)
# -*- coding: utf8 -*-
import datetime
import json
import copy
import logging
from typing import List, Dict, Any
import requests
from functools import reduce
from typing import Dict, List
import elasticsearch
import requests
from elasticsearch import Elasticsearch, ElasticsearchException
from elasticsearch_dsl import Search, A
from elasticsearch_dsl import A, Search
from elasticsearch_dsl.query import MoreLikeThis, Q
from permission_admin.models import Dataset
from texta.settings import ERROR_LOGGER, FACT_FIELD, date_format, es_ldap_password, es_ldap_user, es_prefix, es_url, es_use_ldap
from utils.ds_importer_helper import check_for_analyzer
from utils.generic_helpers import find_key_recursivly
import datetime
from utils.query_builder import QueryBuilder
from texta.settings import es_url, es_use_ldap, es_ldap_user, es_ldap_password, FACT_PROPERTIES, date_format, es_prefix, \
FACT_FIELD, ERROR_LOGGER
# Need to update index.max_inner_result_window to increase
HEADERS = {'Content-Type': 'application/json'}
......@@ -687,6 +683,11 @@ class ES_Manager:
response = ES_Manager.requests.get(url=endpoint_url, json=query).json()
return response['count']
@staticmethod
def single_index_count(index_name: str) -> int:
count = Search(using=Elasticsearch(es_url), index=index_name).count()
return count
def get_field_mappings(self) -> dict:
"""
Uses the _mapping endpoint to fetch the mapping data of ALL the fields in
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment