Skip to content

Commit

Permalink
fix not check None Type in json and support no need to pass in None i…
Browse files Browse the repository at this point in the history
…n row insert

Signed-off-by: lixinguo <xinguo.li@zilliz.com>
  • Loading branch information
lixinguo committed Sep 26, 2024
1 parent 6cc2e55 commit 247e736
Show file tree
Hide file tree
Showing 4 changed files with 60 additions and 16 deletions.
2 changes: 1 addition & 1 deletion pymilvus/client/abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -511,7 +511,7 @@ def get_fields_by_range(
res = apply_valid_data(
scalars.json_data.data[start:end], field.valid_data, start, end
)
json_dict_list = list(map(ujson.loads, res))
json_dict_list = [ujson.loads(item) if item is not None else item for item in res]
field2data[name] = json_dict_list, field_meta
continue

Expand Down
55 changes: 42 additions & 13 deletions pymilvus/client/entity_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,15 +191,19 @@ def convert_to_json(obj: object):
return ujson.dumps(obj, ensure_ascii=False).encode(Config.EncodeProtocol)


def convert_to_json_arr(objs: List[object]):
def convert_to_json_arr(objs: List[object],field_info: Any):
arr = []
for obj in objs:
if obj is None:
raise ParamError(
message=f"field ({field_info['name']}) expect not None input"
)
arr.append(convert_to_json(obj))
return arr


def entity_to_json_arr(entity: Dict):
return convert_to_json_arr(entity.get("values", []))
def entity_to_json_arr(entity: Dict,field_info: Any):
return convert_to_json_arr(entity.get("values", []),field_info)


def convert_to_array_arr(objs: List[Any], field_info: Any):
Expand Down Expand Up @@ -243,39 +247,55 @@ def pack_field_value_to_field_data(
field_name = field_info["name"]
if field_type == DataType.BOOL:
try:
field_data.scalars.bool_data.data.append(field_value)
if field_value is None:
field_data.scalars.bool_data.data.extend([])
else:
field_data.scalars.bool_data.data.append(field_value)
except (TypeError, ValueError) as e:
raise DataNotMatchException(
message=ExceptionsMessage.FieldDataInconsistent
% (field_name, "bool", type(field_value))
) from e
elif field_type in (DataType.INT8, DataType.INT16, DataType.INT32):
try:
field_data.scalars.int_data.data.append(field_value)
# need to extend it, or cannot correctly identify field_data.scalars.int_data.data
if field_value is None:
field_data.scalars.int_data.data.extend([])
else:
field_data.scalars.int_data.data.append(field_value)
except (TypeError, ValueError) as e:
raise DataNotMatchException(
message=ExceptionsMessage.FieldDataInconsistent
% (field_name, "int", type(field_value))
) from e
elif field_type == DataType.INT64:
try:
field_data.scalars.long_data.data.append(field_value)
if field_value is None:
field_data.scalars.long_data.data.extend([])
else:
field_data.scalars.long_data.data.append(field_value)
except (TypeError, ValueError) as e:
raise DataNotMatchException(
message=ExceptionsMessage.FieldDataInconsistent
% (field_name, "int64", type(field_value))
) from e
elif field_type == DataType.FLOAT:
try:
field_data.scalars.float_data.data.append(field_value)
if field_value is None:
field_data.scalars.float_data.data.extend([])
else:
field_data.scalars.float_data.data.append(field_value)
except (TypeError, ValueError) as e:
raise DataNotMatchException(
message=ExceptionsMessage.FieldDataInconsistent
% (field_name, "float", type(field_value))
) from e
elif field_type == DataType.DOUBLE:
try:
field_data.scalars.double_data.data.append(field_value)
if field_value is None:
field_data.scalars.double_data.data.extend([])
else:
field_data.scalars.double_data.data.append(field_value)
except (TypeError, ValueError) as e:
raise DataNotMatchException(
message=ExceptionsMessage.FieldDataInconsistent
Expand Down Expand Up @@ -369,7 +389,10 @@ def pack_field_value_to_field_data(
) from e
elif field_type == DataType.VARCHAR:
try:
field_data.scalars.string_data.data.append(
if field_value is None:
field_data.scalars.string_data.data.extend([])
else:
field_data.scalars.string_data.data.append(
convert_to_str_array(field_value, field_info, CHECK_STR_ARRAY)
)
except (TypeError, ValueError) as e:
Expand All @@ -379,15 +402,21 @@ def pack_field_value_to_field_data(
) from e
elif field_type == DataType.JSON:
try:
field_data.scalars.json_data.data.append(convert_to_json(field_value))
if field_value is None:
field_data.scalars.json_data.data.extend([])
else:
field_data.scalars.json_data.data.append(convert_to_json(field_value))
except (TypeError, ValueError) as e:
raise DataNotMatchException(
message=ExceptionsMessage.FieldDataInconsistent
% (field_name, "json", type(field_value))
) from e
elif field_type == DataType.ARRAY:
try:
field_data.scalars.array_data.data.append(convert_to_array(field_value, field_info))
if field_value is None:
field_data.scalars.array_data.data.extend([])
else:
field_data.scalars.array_data.data.append(convert_to_array(field_value, field_info))
except (TypeError, ValueError) as e:
raise DataNotMatchException(
message=ExceptionsMessage.FieldDataInconsistent
Expand Down Expand Up @@ -507,11 +536,11 @@ def entity_to_field_data(entity: Any, field_info: Any, num_rows: int):
) from e
elif entity_type == DataType.JSON:
try:
field_data.scalars.json_data.data.extend(entity_to_json_arr(entity))
field_data.scalars.json_data.data.extend(entity_to_json_arr(entity, field_info))
except (TypeError, ValueError) as e:
raise DataNotMatchException(
message=ExceptionsMessage.FieldDataInconsistent
% (field_name, "json", type(entity.get("values")[0]))
% (field_name, "json", type(entity.get("values")))
) from e
elif entity_type == DataType.ARRAY:
try:
Expand Down
16 changes: 14 additions & 2 deletions pymilvus/client/prepare.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,8 +405,20 @@ def _parse_row_request(
"default_value", None
):
field_data.valid_data.append(v is not None)
if v is not None:
entity_helper.pack_field_value_to_field_data(v, field_data, field_info)
entity_helper.pack_field_value_to_field_data(v, field_data, field_info)
for k, v in fields_data.items():
if k in entity:
continue
field_info, field_data = field_info_map[k], fields_data[k]
if field_info.get("nullable", False) or field_info.get(
"default_value", None
):
field_data.valid_data.append(False)
entity_helper.pack_field_value_to_field_data(None, field_data, field_info)
else:
raise DataNotMatchException(
message=ExceptionsMessage.InsertMissedField % k
)
json_dict = {
k: v for k, v in entity.items() if k not in fields_data and enable_dynamic
}
Expand Down
3 changes: 3 additions & 0 deletions pymilvus/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,9 @@ class ExceptionsMessage:
InsertUnexpectedField = (
"Attempt to insert an unexpected field `%s` to collection without enabling dynamic field"
)
InsertMissedField = (
"Insert missed an field `%s` to collection without set nullable==true or set default_value"
)
UpsertAutoIDTrue = "Upsert don't support autoid == true"
AmbiguousDeleteFilterParam = (
"Ambiguous filter parameter, only one deletion condition can be specified."
Expand Down

0 comments on commit 247e736

Please sign in to comment.