Skip to content

Commit

Permalink
Dependencies: downgrade elasticsearch-7x branch to Elasticsearch 7.17…
Browse files Browse the repository at this point in the history
….8 (#465)
  • Loading branch information
alexklibisz authored Jan 17, 2023
1 parent d591cf9 commit e065603
Show file tree
Hide file tree
Showing 26 changed files with 438 additions and 405 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@ on:
pull_request:
branches:
- main
- elasticsearch-7x
push:
branches:
- main
- elasticsearch-7x

concurrency:
group: ${{ github.ref }}
Expand Down
11 changes: 6 additions & 5 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import ElasticsearchPluginPlugin.autoImport._

Global / scalaVersion := "2.13.9"
Global / scalaVersion := "2.13.5"

lazy val CirceVersion = "0.14.1"
lazy val CirceGenericExtrasVersion = "0.14.1"
lazy val ElasticsearchVersion = "8.6.0"
lazy val Elastic4sVersion = "8.5.2"
lazy val ElasticsearchVersion = "7.17.8"
lazy val Elastic4sVersion = "7.17.4"
lazy val ElastiknnVersion = IO.read(file("version")).strip()
lazy val LuceneVersion = "9.4.2"
lazy val LuceneVersion = "8.11.1"

lazy val ScalacOptions = List("-Xfatal-warnings", "-Ywarn-unused:imports")
lazy val TestSettings = Seq(
Expand Down Expand Up @@ -64,7 +64,8 @@ lazy val `elastiknn-lucene` = project
version := ElastiknnVersion,
libraryDependencies ++= Seq(
"org.apache.lucene" % "lucene-core" % LuceneVersion,
"org.apache.lucene" % "lucene-analysis-common" % LuceneVersion % Test
"org.apache.lucene" % "lucene-analyzers-common" % LuceneVersion % Test,
"org.apache.lucene" % "lucene-codecs" % LuceneVersion % Test
),
scalacOptions ++= ScalacOptions,
TestSettings
Expand Down
32 changes: 16 additions & 16 deletions client-python/elastiknn/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def __init__(self, es: Elasticsearch = None):
Defaults to a client pointing at http://localhost:9200.
"""
if es is None:
self.es = Elasticsearch(["http://localhost:9200"], request_timeout=99)
self.es = Elasticsearch(["http://localhost:9200"], timeout=99)
else:
self.es = es

Expand All @@ -48,14 +48,16 @@ def put_mapping(self, index: str, vec_field: str, mapping: Mapping.Base, stored_
Dict
Json response as a dict. Successful request returns `{"acknowledged": true}`.
"""
properties = {
vec_field: mapping.to_dict(),
stored_id_field: {
"type": "keyword",
"store": True
body = {
"properties": {
vec_field: mapping.to_dict(),
stored_id_field: {
"type": "keyword",
"store": True
}
}
}
return self.es.indices.put_mapping(properties=properties, index=index)
return self.es.indices.put_mapping(body, index=index)

def index(self, index: str, vec_field: str, vecs: Iterable[Vec.Base], stored_id_field: str, ids: Iterable[str], refresh: bool = False) -> Tuple[int, List[Dict]]:
"""Index (i.e. store) the given vectors at the given index and field with the optional ids.
Expand Down Expand Up @@ -115,16 +117,14 @@ def nearest_neighbors(self, index: str, query: NearestNeighborsQuery.Base, store
Dict
Standard Elasticsearch search response parsed as a dict.
"""
query = {
"elastiknn_nearest_neighbors": query.to_dict()
body = {
"query": {
"elastiknn_nearest_neighbors": query.to_dict()
}
}
if fetch_source:
return self.es.search(index=index, query=query, size=k)
return self.es.search(index=index, body=body, size=k)
else:
return self.es.search(index=index,
query=query,
size=k,
_source=fetch_source,
docvalue_fields=[stored_id_field],
return self.es.search(index=index, body=body, size=k, _source=fetch_source, docvalue_fields=stored_id_field,
stored_fields="_none_",
filter_path=[f'hits.hits.fields.{stored_id_field}', 'hits.hits._score'])
filter_path=[f'hits.hits.fields.{stored_id_field}', 'hits.hits._score'])
6 changes: 3 additions & 3 deletions client-python/elastiknn/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@ def fit(self, X: Union[np.ndarray, csr_matrix, List[Vec.SparseBool], List[Vec.De
self._index = f"{ELASTIKNN_NAME}-{int(time())}"
self._logger.warning(f"index was not given, using {self._index} instead")

self._eknn.es.indices.delete(index=self._index, ignore_unavailable=True)
# body = dict(settings=dict(number_of_shards=shards, elastiknn=True, number_of_replicas=0))
self._eknn.es.indices.create(index=self._index, settings=dict(number_of_shards=shards, elastiknn=True, number_of_replicas=0))
self._eknn.es.indices.delete(self._index, ignore=[400, 404])
body = dict(settings=dict(number_of_shards=shards, elastiknn=True, number_of_replicas=0))
self._eknn.es.indices.create(self._index, body=json.dumps(body))
self._eknn.put_mapping(self._index, self._vec_field, mapping, self._stored_id_field)

self._logger.info(f"indexing {len(X)} vectors into index {self._index}")
Expand Down
2 changes: 1 addition & 1 deletion client-python/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
elasticsearch==8.6.0
elasticsearch==7.17.8
dataclasses-json==0.3.7
tqdm==4.61.1
scipy==1.7.0
2 changes: 1 addition & 1 deletion client-python/tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def test_exact_jaccard(self):
id_field = "id"
mapping = Mapping.SparseBool(dims=dim)

eknn.es.indices.delete(index=index, ignore_unavailable=True)
eknn.es.indices.delete(index=index, ignore=[400, 404])
eknn.es.indices.refresh()
eknn.es.indices.create(index=index)
eknn.es.indices.refresh()
Expand Down
2 changes: 1 addition & 1 deletion docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
FROM docker.elastic.co/elasticsearch/elasticsearch:8.6.0-amd64
FROM docker.elastic.co/elasticsearch/elasticsearch:7.17.8-amd64
COPY elastiknn-plugin/target/elastiknn*.zip .
RUN elasticsearch-plugin install -b file:$(ls elastiknn*zip | sort | tail -n1)
Loading

0 comments on commit e065603

Please sign in to comment.