Skip to content

Commit

Permalink
Merge branch 'feast-dev:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
redhatHameed authored Jan 10, 2025
2 parents 7ea572d + a8aeb79 commit f21193c
Show file tree
Hide file tree
Showing 6 changed files with 39 additions and 9 deletions.
2 changes: 1 addition & 1 deletion infra/charts/feast-feature-server/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ apiVersion: v2
name: feast-feature-server
description: Feast Feature Server in Go or Python
type: application
version: 0.42.0
version: 0.42.1
keywords:
- machine learning
- big data
Expand Down
3 changes: 2 additions & 1 deletion infra/charts/feast-feature-server/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,4 +60,5 @@ See [here](https://github.com/feast-dev/feast/tree/master/examples/python-helm-d
| service.port | int | `80` | |
| service.type | string | `"ClusterIP"` | |
| serviceAccount.name | string | `""` | |
| tolerations | list | `[]` | |
| tolerations | list | `[]` | |
| route.enabled | bool | `false` | |
18 changes: 18 additions & 0 deletions infra/charts/feast-feature-server/templates/route.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{{- if and (.Values.route.enabled) (eq .Values.feast_mode "ui") }}
---
kind: Route
apiVersion: route.openshift.io/v1
metadata:
name: {{ include "feast-feature-server.fullname" . }}
labels:
{{- include "feast-feature-server.labels" . | nindent 4 }}
spec:
to:
kind: Service
name: {{ include "feast-feature-server.fullname" . }}
port:
targetPort: http
tls:
termination: edge
insecureEdgeTerminationPolicy: Redirect
{{- end}}
4 changes: 4 additions & 0 deletions infra/charts/feast-feature-server/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -74,3 +74,7 @@ livenessProbe:
readinessProbe:
initialDelaySeconds: 20
periodSeconds: 10

# to create OpenShift Route object for UI
route:
enabled: false
19 changes: 12 additions & 7 deletions sdk/python/feast/infra/online_stores/milvus_online_store/milvus.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,12 +108,16 @@ class MilvusOnlineStore(OnlineStore):

def _connect(self, config: RepoConfig) -> MilvusClient:
if not self.client:
self.client = MilvusClient(
url=f"{config.online_store.host}:{config.online_store.port}",
token=f"{config.online_store.username}:{config.online_store.password}"
if config.online_store.username and config.online_store.password
else "",
)
if config.provider == "local":
print("Connecting to Milvus in local mode using ./milvus_demo.db")
self.client = MilvusClient("./milvus_demo.db")
else:
self.client = MilvusClient(
url=f"{config.online_store.host}:{config.online_store.port}",
token=f"{config.online_store.username}:{config.online_store.password}"
if config.online_store.username and config.online_store.password
else "",
)
return self.client

def _get_collection(self, config: RepoConfig, table: FeatureView) -> Dict[str, Any]:
Expand Down Expand Up @@ -247,7 +251,8 @@ def online_write_batch(
progress(1)

self.client.insert(
collection_name=collection["collection_name"], data=entity_batch_to_insert
collection_name=collection["collection_name"],
data=entity_batch_to_insert,
)

def online_read(
Expand Down
2 changes: 2 additions & 0 deletions sdk/python/feast/type_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -815,6 +815,7 @@ def spark_to_feast_value_type(spark_type_as_str: str) -> ValueType:
"float": ValueType.FLOAT,
"boolean": ValueType.BOOL,
"timestamp": ValueType.UNIX_TIMESTAMP,
"date": ValueType.UNIX_TIMESTAMP,
"array<byte>": ValueType.BYTES_LIST,
"array<string>": ValueType.STRING_LIST,
"array<int>": ValueType.INT32_LIST,
Expand All @@ -824,6 +825,7 @@ def spark_to_feast_value_type(spark_type_as_str: str) -> ValueType:
"array<float>": ValueType.FLOAT_LIST,
"array<boolean>": ValueType.BOOL_LIST,
"array<timestamp>": ValueType.UNIX_TIMESTAMP_LIST,
"array<date>": ValueType.UNIX_TIMESTAMP_LIST,
}
if spark_type_as_str.startswith("decimal"):
spark_type_as_str = "decimal"
Expand Down

0 comments on commit f21193c

Please sign in to comment.