diff --git a/apis_core/apis_entities/autocomplete3.py b/apis_core/apis_entities/autocomplete3.py
index 6ad900644..1b5e8c0e0 100644
--- a/apis_core/apis_entities/autocomplete3.py
+++ b/apis_core/apis_entities/autocomplete3.py
@@ -2,12 +2,9 @@
# -*- coding: utf-8 -*-
import json
import operator
-import re
from functools import reduce
import importlib
-import dateutil.parser
-import requests
from dal import autocomplete
from django import http
from django.conf import settings
@@ -15,8 +12,7 @@
from django.core.exceptions import FieldError
from django.db.models import Q
-from apis_core.apis_metainfo.models import Uri, Collection
-from apis_core.utils import caching
+from apis_core.apis_metainfo.models import Collection
from apis_core.utils.caching import get_autocomplete_property_choices
from apis_core.utils.settings import get_entity_settings_by_modelname
@@ -84,308 +80,6 @@ def get_more(self):
return res4
-class GenericEntitiesAutocomplete(autocomplete.Select2ListView):
- @staticmethod
- def parse_stanbol_object(obj, key, *args):
- if len(args) > 0:
- lst1 = args[0]
- else:
- lst1 = None
- if obj[1] == "GNDDate":
- if lst1 is not None:
- try:
- return dateutil.parser.parse(lst1[key][0]["value"])
- except Exception:
- return lst1[key][0]["value"]
- else:
- return obj[0]
- elif obj[1] == "String":
- if lst1 is not None:
- return lst1[key][0]["value"]
- else:
- return obj[0]
- elif obj[1] == "gndLong":
- if lst1 is not None:
- try:
- return re.search(
- "Point \( [+-]([0-9\.]+) [+-]([0-9\.]+)", lst1[key][0]["value"]
- ).group(1)
- except Exception:
- print("extract fails")
- return None
- else:
- print("no match")
-
- def get(self, request, *args, **kwargs):
- """
- Look up entity objects from which to create relationships.
- """
- page_size = 20
- offset = (int(self.request.GET.get("page", 1)) - 1) * page_size
- ac_type = self.kwargs["entity"]
- db_include = self.kwargs.get("db_include", False)
- ent_merge_pk = self.kwargs.get("ent_merge_pk", False)
- choices = []
- headers = {"Content-Type": "application/json"}
- ent_model = caching.get_ontology_class_of_name(ac_type)
- ent_model_name = ent_model.__name__
-
- model_fields = ent_model._meta.get_fields()
- default_search_fields = [
- field.name for field in model_fields if not field.is_relation
- ]
-
- # inspect user search query
- q3 = None
- if self.q.startswith("http"):
- res = ent_model.objects.filter(uri__uri=self.q.strip())
- elif len(self.q) > 0:
- # looks for pattern matching "hello [world]" or "hello [world=1 universe=2]"
- q1 = re.match("([^\[]+)\[([^\]]+)\]$", self.q)
- if q1:
- q = q1.group(1).strip()
- q3 = q1.group(2).split(";")
- q3 = [e.strip() for e in q3]
- else:
- q = re.match("^[^\[]+", self.q).group(0)
- if re.match("^[^*]+\*$", q.strip()):
- search_type = "__istartswith"
- q = re.match("^([^*]+)\*$", q.strip()).group(1)
- elif re.match("^\*[^*]+$", q.strip()):
- search_type = "__iendswith"
- q = re.match("^\*([^*]+)$", q.strip()).group(1)
- elif re.match('^"[^"]+"$', q.strip()):
- search_type = ""
- q = re.match('^"([^"]+)"$', q.strip()).group(1)
- elif re.match("^[^*]+$", q.strip()):
- search_type = "__icontains"
- q = q.strip()
- else:
- search_type = "__icontains"
- q = q.strip()
- arg_list = [
- Q(**{x + search_type: q})
- for x in get_entity_settings_by_modelname(ent_model_name).get(
- "search", default_search_fields
- )
- ]
- res = ent_model.objects.filter(reduce(operator.or_, arg_list)).distinct()
- if q3:
- f_dict2 = {}
- for fd in q3:
- try:
- # try to split query along equal signs
- f_dict2[fd.split("=")[0].strip()] = fd.split("=")[1].strip()
- except IndexError as e:
- print(e)
- try:
- res = res.filter(**f_dict2)
- except Exception as e:
- choices.append({"name": str(e)})
- else:
- q = ""
- res = []
- test_db = True
- test_stanbol = False
- test_stanbol_list = dict()
- more = True
- if not db_include:
- for r in res[offset : offset + page_size]:
- if int(r.pk) == int(ent_merge_pk):
- continue
-
- f = dict()
- dataclass = ""
- f["id"] = Uri.objects.filter(root_object=r)[0].uri
- if hasattr(r, "lng"):
- if r.lng and r.lat:
- dataclass = 'data-vis-tooltip="{}" data-lat="{}" \
- data-long="{}" class="apis-autocomplete-span"'.format(
- ac_type, r.lat, r.lng
- )
- # this is a temporary workaround and can be removed once
- # we use the new custom autocomplete overrides for the relations
- text = str(r)
- dateend = datestart = ""
- if hasattr(r, "end_date") and r.end_date is not None:
- dateend = r.end_date.year
- if hasattr(r, "start_date") and r.start_date is not None:
- datestart = r.start_date.year
- if dateend or datestart:
- text += f" ({datestart} - {dateend})"
- if hasattr(r, "professioncategory"):
- text += f" {r.professioncategory}"
- f["text"] = f"db {text}"
- choices.append(f)
- if len(choices) < page_size:
- test_db = False
- else:
- test_db = False
-
- if ent_model_name in ac_settings.keys():
- for y in ac_settings[ent_model_name]:
- ldpath = ""
- for d in y["fields"].keys():
- ldpath += "{} = <{}>;\n".format(d, y["fields"][d][0])
- if self.q.startswith("http"):
- q = False
- match_url_geo = re.search(
- r"geonames[^0-9]+([0-9]+)", self.q.strip()
- )
- if match_url_geo:
- url = "https://sws.geonames.org/{}/".format(
- match_url_geo.group(1)
- )
- else:
- url = self.q.strip()
- params = {"id": url, "ldpath": ldpath}
- headers = {"Content-Type": "application/json"}
- w = requests.get(
- y["url"].replace("find", "entity"),
- params=params,
- headers=headers,
- )
- res3 = dict()
- ldpath_fields = [y["fields"][d][0] for d in y["fields"].keys()]
- print(w.status_code)
- if w.status_code == 200:
- for x in w.json()["representation"].keys():
- if x in ldpath_fields:
- for d in y["fields"].keys():
- if y["fields"][d][0] == x:
- res3[d] = w.json()["representation"][x]
- res = dict()
- res3["id"] = w.json()["id"]
- res["results"] = [res3]
- else:
- continue
- else:
- data = {
- "limit": page_size,
- "ldpath": ldpath,
- "offset": offset,
- "constraints": [
- {
- "type": "text",
- "patternType": "wildcard",
- "field": "https://www.w3.org/2000/01/rdf-schema#label",
- "text": q.split(),
- }
- ],
- }
- if q3 and "search fields" in y.keys():
- for fd in q3:
- fd = [fd2.strip() for fd2 in fd.split("=")]
- if fd[0] in y["search fields"].keys():
- fd3 = y["search fields"][fd[0]]
- v = False
- if isinstance(fd3[1], dict):
- if fd[1] in fd3[1].keys():
- v = fd3[1][fd[1]]
- else:
- v = fd3[1](fd[1])
- if fd3[2] == "reference" and v:
- fd_4 = {
- "type": "reference",
- "value": v,
- "field": fd3[0],
- }
- data["constraints"].append(fd_4)
- elif fd3[2] == "date_exact" and v:
- fd_4 = {
- "type": "value",
- "value": v,
- "field": fd3[0],
- "datatype": "xsd:dateTime",
- }
- data["constraints"].append(fd_4)
- elif fd3[2] == "date_gt" and v:
- fd_4 = {
- "type": "range",
- "lowerBound": v,
- "upperBound": "2100-12-31T23:59:59.999Z",
- "field": fd3[0],
- "datatype": "xsd:dateTime",
- }
- data["constraints"].append(fd_4)
- elif fd3[2] == "date_lt" and v:
- fd_4 = {
- "type": "range",
- "lowerBound": "1-01-01T23:59:59.999Z",
- "upperBound": v,
- "field": fd3[0],
- "datatype": "xsd:dateTime",
- }
- data["constraints"].append(fd_4)
- else:
- choices.append(
- {"name": "No additional query setting for Stanbol"}
- )
- try:
- url2 = y["url"].replace("find", "query")
- r = requests.post(url2, data=json.dumps(data), headers=headers)
- if r.status_code != 200:
- choices.append({"name": "Connection to Stanbol failed"})
- continue
- res = r.json()
- except Exception as e:
- choices.append({"name": "Connection to Stanbol failed"})
- print(e)
- continue
- if len(res["results"]) < page_size:
- test_stanbol_list[y["url"]] = False
- else:
- test_stanbol_list[y["url"]] = True
- for x in res["results"]:
- f = dict()
- dataclass = ""
- name = x["name"][0]["value"]
- if ac_settings["score"] in x.keys():
- score = str(x[ac_settings["score"]][0]["value"])
- else:
- score = "NA"
- id = x[ac_settings["uri"]]
- score = score
- f["id"] = id
- source = y["source"]
- if "lat" in x.keys() and "long" in x.keys():
- dataclass = 'data-vis-tooltip="{}" \
- data-lat="{}" data-long="{}"'.format(
- ac_type, x["lat"][0]["value"], x["long"][0]["value"]
- )
- if "descr" in x.keys():
- descr = x["descr"][0]["value"]
- else:
- descr = None
- f[
- "text"
- ] = '{} {}\
- ({}): {}'.format(
- dataclass, source, name, score, descr
- )
- choices.append(f)
- for k in test_stanbol_list.keys():
- if test_stanbol_list[k]:
- test_stanbol = True
-
- cust_auto_more = False
- if q:
- cust_auto = CustomEntityAutocompletes(
- ac_type, q, page_size=page_size, offset=offset
- )
- if cust_auto.results is not None:
- cust_auto_more = cust_auto.more
- if len(cust_auto.results) > 0:
- choices.extend(cust_auto.results)
- if not test_db and not test_stanbol and not cust_auto_more:
- more = False
-
- return http.HttpResponse(
- json.dumps({"results": choices + [], "pagination": {"more": more}}),
- content_type="application/json",
- )
-
-
# TODO RDF: Check if this should be removed or adapted
class GenericNetworkEntitiesAutocomplete(autocomplete.Select2ListView):
def get(self, request, *args, **kwargs):
diff --git a/apis_core/apis_entities/urls.py b/apis_core/apis_entities/urls.py
index 205b7762f..b8f6608df 100644
--- a/apis_core/apis_entities/urls.py
+++ b/apis_core/apis_entities/urls.py
@@ -4,7 +4,6 @@
from django.shortcuts import get_list_or_404
from .autocomplete3 import (
- GenericEntitiesAutocomplete,
GenericNetworkEntitiesAutocomplete,
)
@@ -82,33 +81,11 @@ def to_url(self, value):
),
]
-autocomplete_patterns = [
- path(
- "//",
- GenericEntitiesAutocomplete.as_view(),
- name="generic_entities_autocomplete",
- ),
- path(
- "//",
- GenericEntitiesAutocomplete.as_view(),
- name="generic_entities_autocomplete",
- ),
- path(
- "/",
- GenericEntitiesAutocomplete.as_view(),
- name="generic_entities_autocomplete",
- ),
-]
-
urlpatterns = [
path(
"entity//",
include(entity_patterns),
),
- path(
- "autocomplete/",
- include(autocomplete_patterns),
- ),
path(
"autocomplete-network//",
GenericNetworkEntitiesAutocomplete.as_view(),
diff --git a/apis_core/apis_relations/forms.py b/apis_core/apis_relations/forms.py
index d6f42b53c..fdebb23b0 100644
--- a/apis_core/apis_relations/forms.py
+++ b/apis_core/apis_relations/forms.py
@@ -16,7 +16,6 @@
from .tables import get_generic_triple_table
from apis_core.apis_entities.autocomplete3 import (
PropertyAutocomplete,
- GenericEntitiesAutocomplete,
)
@@ -58,10 +57,6 @@ def __init__(self, entity_type_self_str, entity_type_other_str):
attrs_target = copy.deepcopy(attrs)
attrs_target["data-tags"] = "1"
- # This assert only serves as a linking for us devs, to make explicit what internal object the class
- # Select2ListCreateChoiceField object afterwards uses.
- assert GenericEntitiesAutocomplete
-
ct = ContentType.objects.get(model=entity_type_other_str.lower())
url = reverse("apis:generic:autocomplete", args=[ct])