diff --git a/tests/python/conftest.py b/tests/python/conftest.py
index 12c2a4dd98d83575f794a06afed239759bee3a4b..75328740dcf4b272e716bbdf3c0635067d9f0902 100644
--- a/tests/python/conftest.py
+++ b/tests/python/conftest.py
@@ -171,6 +171,7 @@ def collection(request, connect):
     try:
         default_fields = gen_default_fields()
         connect.create_collection(collection_name, default_fields)
+        connect.load_collection(collection_name)
     except Exception as e:
         pytest.exit(str(e))
     def teardown():
@@ -189,6 +190,7 @@ def id_collection(request, connect):
     try:
         fields = gen_default_fields(auto_id=False)
         connect.create_collection(collection_name, fields)
+        connect.load_collection(collection_name)
     except Exception as e:
         pytest.exit(str(e))
     def teardown():
@@ -206,6 +208,7 @@ def binary_collection(request, connect):
     try:
         fields = gen_binary_default_fields()
         connect.create_collection(collection_name, fields)
+        connect.load_collection(collection_name)
     except Exception as e:
         pytest.exit(str(e))
     def teardown():
@@ -225,6 +228,7 @@ def binary_id_collection(request, connect):
     try:
         fields = gen_binary_default_fields(auto_id=False)
         connect.create_collection(collection_name, fields)
+        connect.load_collection(collection_name)
     except Exception as e:
         pytest.exit(str(e))
     def teardown():
diff --git a/tests/python/test_bulk_insert.py b/tests/python/test_bulk_insert.py
deleted file mode 100644
index 8b43626ea04e4cc5296e557fe247acba52d97f4e..0000000000000000000000000000000000000000
--- a/tests/python/test_bulk_insert.py
+++ /dev/null
@@ -1,1148 +0,0 @@
-import pytest
-from .utils import *
-from .constants import *
-
-ADD_TIMEOUT = 600
-uid = "test_insert"
-field_name = default_float_vec_field_name
-binary_field_name = default_binary_vec_field_name
-default_single_query = {
-    "bool": {
-        "must": [
-            {"vector": {field_name: {"topk": 10, "query": gen_vectors(1, default_dim), "metric_type": "L2",
-                                     "params": {"nprobe": 10}}}}
-        ]
-    }
-}
-
-
-class TestInsertBase:
-    """
-    ******************************************************************
-      The following cases are used to test `insert` function
-    ******************************************************************
-    """
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_simple_index()
-    )
-    def get_simple_index(self, request, connect):
-        if str(connect._cmd("mode")) == "CPU":
-            if request.param["index_type"] in index_cpu_not_support():
-                pytest.skip("CPU not support index_type: ivf_sq8h")
-        return request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_single_filter_fields()
-    )
-    def get_filter_field(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_single_vector_fields()
-    )
-    def get_vector_field(self, request):
-        yield request.param
-
-    def test_add_vector_with_empty_vector(self, connect, collection):
-        '''
-        target: test add vectors with empty vectors list
-        method: set empty vectors list as add method params
-        expected: raises a Exception
-        '''
-        vector = []
-        with pytest.raises(Exception) as e:
-            status, ids = connect.bulk_insert(collection, vector)
-
-    def test_add_vector_with_None(self, connect, collection):
-        '''
-        target: test add vectors with None
-        method: set None as add method params
-        expected: raises a Exception
-        '''
-        vector = None
-        with pytest.raises(Exception) as e:
-            status, ids = connect.bulk_insert(collection, vector)
-
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_collection_not_existed(self, connect):
-        '''
-        target: test insert, with collection not existed
-        method: insert entity into a random named collection
-        expected: error raised 
-        '''
-        collection_name = gen_unique_str(uid)
-        with pytest.raises(Exception) as e:
-            connect.bulk_insert(collection_name, default_entities)
-
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_drop_collection(self, connect, collection):
-        '''
-        target: test delete collection after insert vector
-        method: insert vector and delete collection
-        expected: no error raised
-        '''
-        ids = connect.bulk_insert(collection, default_entity)
-        assert len(ids) == 1
-        connect.drop_collection(collection)
-
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_sleep_drop_collection(self, connect, collection):
-        '''
-        target: test delete collection after insert vector for a while
-        method: insert vector, sleep, and delete collection
-        expected: no error raised 
-        '''
-        ids = connect.bulk_insert(collection, default_entity)
-        assert len(ids) == 1
-        connect.flush([collection])
-        connect.drop_collection(collection)
-
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_create_index(self, connect, collection, get_simple_index):
-        '''
-        target: test build index insert after vector
-        method: insert vector and build index
-        expected: no error raised
-        '''
-        ids = connect.bulk_insert(collection, default_entities)
-        assert len(ids) == default_nb
-        connect.flush([collection])
-        connect.create_index(collection, field_name, get_simple_index)
-        info = connect.get_index_info(collection, field_name)
-        assert info == get_simple_index
-
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_create_index_new(self, connect, collection, get_simple_index):
-        '''
-        target: test build index insert after vector
-        method: insert vector and build index
-        expected: no error raised
-        '''
-        ids = connect.bulk_insert(collection, default_entities_new)
-        assert len(ids) == default_nb
-        connect.flush([collection])
-        connect.create_index(collection, field_name, get_simple_index)
-        info = connect.get_index_info(collection, field_name)
-        assert info == get_simple_index
-
-    @pytest.mark.skip("r0.3-test")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_after_create_index(self, connect, collection, get_simple_index):
-        '''
-        target: test build index insert after vector
-        method: insert vector and build index
-        expected: no error raised
-        '''
-        connect.create_index(collection, field_name, get_simple_index)
-        ids = connect.bulk_insert(collection, default_entities)
-        assert len(ids) == default_nb
-        info = connect.get_index_info(collection, field_name)
-        assert info == get_simple_index
-
-    @pytest.mark.skip("r0.3-test")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_search(self, connect, collection):
-        '''
-        target: test search vector after insert vector after a while
-        method: insert vector, sleep, and search collection
-        expected: no error raised 
-        '''
-        ids = connect.bulk_insert(collection, default_entities)
-        connect.flush([collection])
-        res = connect.search(collection, default_single_query)
-        logging.getLogger().debug(res)
-        assert res
-    
-    @pytest.mark.skip("segment row count")
-    def test_insert_segment_row_count(self, connect, collection):
-        nb = default_segment_row_limit + 1
-        res_ids = connect.bulk_insert(collection, gen_entities(nb))
-        connect.flush([collection])
-        assert len(res_ids) == nb
-        stats = connect.get_collection_stats(collection)
-        assert len(stats['partitions'][0]['segments']) == 2
-        for segment in stats['partitions'][0]['segments']:
-            assert segment['row_count'] in [default_segment_row_limit, 1]
-
-    @pytest.fixture(
-        scope="function",
-        params=[
-            1,
-            2000
-        ],
-    )
-    def insert_count(self, request):
-        yield request.param
-
-    @pytest.mark.skip(" todo support count entities")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_ids(self, connect, id_collection, insert_count):
-        '''
-        target: test insert vectors in collection, use customize ids
-        method: create collection and insert vectors in it, check the ids returned and the collection length after vectors inserted
-        expected: the length of ids and the collection row count
-        '''
-        nb = insert_count
-        ids = [i for i in range(nb)]
-        res_ids = connect.bulk_insert(id_collection, gen_entities(nb), ids)
-        connect.flush([id_collection])
-        assert len(res_ids) == nb
-        assert res_ids == ids
-        res_count = connect.count_entities(id_collection)
-        assert res_count == nb
-
-    @pytest.mark.skip(" todo support count entities")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_the_same_ids(self, connect, id_collection, insert_count):
-        '''
-        target: test insert vectors in collection, use customize the same ids
-        method: create collection and insert vectors in it, check the ids returned and the collection length after vectors inserted
-        expected: the length of ids and the collection row count
-        '''
-        nb = insert_count
-        ids = [1 for i in range(nb)]
-        res_ids = connect.bulk_insert(id_collection, gen_entities(nb), ids)
-        connect.flush([id_collection])
-        assert len(res_ids) == nb
-        assert res_ids == ids
-        res_count = connect.count_entities(id_collection)
-        assert res_count == nb
-
-    @pytest.mark.skip(" todo support count entities")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_ids_fields(self, connect, get_filter_field, get_vector_field):
-        '''
-        target: test create normal collection with different fields, insert entities into id with ids
-        method: create collection with diff fields: metric/field_type/..., insert, and count
-        expected: row count correct
-        '''
-        nb = 5
-        filter_field = get_filter_field
-        vector_field = get_vector_field
-        collection_name = gen_unique_str("test_collection")
-        fields = {
-            "fields": [filter_field, vector_field],
-            "segment_row_limit": default_segment_row_limit,
-            "auto_id": True
-        }
-        connect.create_collection(collection_name, fields)
-        ids = [i for i in range(nb)]
-        entities = gen_entities_by_fields(fields["fields"], nb, default_dim)
-        res_ids = connect.bulk_insert(collection_name, entities, ids)
-        assert res_ids == ids
-        connect.flush([collection_name])
-        res_count = connect.count_entities(collection_name)
-        assert res_count == nb
-
-    # TODO: assert exception && enable
-    @pytest.mark.level(2)
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_twice_ids_no_ids(self, connect, id_collection):
-        '''
-        target: check the result of insert, with params ids and no ids
-        method: test insert vectors twice, use customize ids first, and then use no ids
-        expected:  error raised
-        '''
-        ids = [i for i in range(default_nb)]
-        res_ids = connect.bulk_insert(id_collection, default_entities, ids)
-        with pytest.raises(Exception) as e:
-            res_ids_new = connect.bulk_insert(id_collection, default_entities)
-
-    # TODO: assert exception && enable
-    @pytest.mark.level(2)
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_twice_not_ids_ids(self, connect, id_collection):
-        '''
-        target: check the result of insert, with params ids and no ids
-        method: test insert vectors twice, use not ids first, and then use customize ids
-        expected:  error raised
-        '''
-        with pytest.raises(Exception) as e:
-            res_ids = connect.bulk_insert(id_collection, default_entities)
-
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_ids_length_not_match_batch(self, connect, id_collection):
-        '''
-        target: test insert vectors in collection, use customize ids, len(ids) != len(vectors)
-        method: create collection and insert vectors in it
-        expected: raise an exception
-        '''
-        ids = [i for i in range(1, default_nb)]
-        logging.getLogger().info(len(ids))
-        with pytest.raises(Exception) as e:
-            res_ids = connect.bulk_insert(id_collection, default_entities, ids)
-
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_ids_length_not_match_single(self, connect, collection):
-        '''
-        target: test insert vectors in collection, use customize ids, len(ids) != len(vectors)
-        method: create collection and insert vectors in it
-        expected: raise an exception
-        '''
-        ids = [i for i in range(1, default_nb)]
-        logging.getLogger().info(len(ids))
-        with pytest.raises(Exception) as e:
-            res_ids = connect.bulk_insert(collection, default_entity, ids)
-
-    @pytest.mark.skip(" todo support count entities")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_ids_fields(self, connect, get_filter_field, get_vector_field):
-        '''
-        target: test create normal collection with different fields, insert entities into id without ids
-        method: create collection with diff fields: metric/field_type/..., insert, and count
-        expected: row count correct
-        '''
-        nb = 5
-        filter_field = get_filter_field
-        vector_field = get_vector_field
-        collection_name = gen_unique_str("test_collection")
-        fields = {
-            "fields": [filter_field, vector_field],
-            "segment_row_limit": default_segment_row_limit
-        }
-        connect.create_collection(collection_name, fields)
-        entities = gen_entities_by_fields(fields["fields"], nb, default_dim)
-        res_ids = connect.bulk_insert(collection_name, entities)
-        connect.flush([collection_name])
-        res_count = connect.count_entities(collection_name)
-        assert res_count == nb
-
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_tag(self, connect, collection):
-        '''
-        target: test insert entities in collection created before
-        method: create collection and insert entities in it, with the partition_tag param
-        expected: the collection row count equals to nq
-        '''
-        connect.create_partition(collection, default_tag)
-        ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
-        assert len(ids) == default_nb
-        assert connect.has_partition(collection, default_tag)
-
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_tag_with_ids(self, connect, id_collection):
-        '''
-        target: test insert entities in collection created before, insert with ids
-        method: create collection and insert entities in it, with the partition_tag param
-        expected: the collection row count equals to nq
-        '''
-        connect.create_partition(id_collection, default_tag)
-        ids = [i for i in range(default_nb)]
-        res_ids = connect.bulk_insert(id_collection, default_entities, ids, partition_tag=default_tag)
-        assert res_ids == ids
-
-
-    @pytest.mark.skip(" todo support count entities")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_default_tag(self, connect, collection):
-        '''
-        target: test insert entities into default partition
-        method: create partition and insert info collection without tag params
-        expected: the collection row count equals to nb
-        '''
-        connect.create_partition(collection, default_tag)
-        ids = connect.bulk_insert(collection, default_entities)
-        connect.flush([collection])
-        assert len(ids) == default_nb
-        res_count = connect.count_entities(collection)
-        assert res_count == default_nb
-
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_tag_not_existed(self, connect, collection):
-        '''
-        target: test insert entities in collection created before
-        method: create collection and insert entities in it, with the not existed partition_tag param
-        expected: error raised
-        '''
-        tag = gen_unique_str()
-        with pytest.raises(Exception) as e:
-            ids = connect.bulk_insert(collection, default_entities, partition_tag=tag)
-
-    @pytest.mark.skip(" not support count entities")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_tag_existed(self, connect, collection):
-        '''
-        target: test insert entities in collection created before
-        method: create collection and insert entities in it repeatly, with the partition_tag param
-        expected: the collection row count equals to nq
-        '''
-        connect.create_partition(collection, default_tag)
-        ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
-        ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
-        connect.flush([collection])
-        res_count = connect.count_entities(collection)
-        assert res_count == 2 * default_nb
-
-    @pytest.mark.level(2)
-    def test_insert_without_connect(self, dis_connect, collection):
-        '''
-        target: test insert entities without connection
-        method: create collection and insert entities in it, check if inserted successfully
-        expected: raise exception
-        '''
-        with pytest.raises(Exception) as e:
-            ids = dis_connect.bulk_insert(collection, default_entities)
-
-    def test_insert_collection_not_existed(self, connect):
-        '''
-        target: test insert entities in collection, which not existed before
-        method: insert entities collection not existed, check the status
-        expected: error raised
-        '''
-        with pytest.raises(Exception) as e:
-            ids = connect.bulk_insert(gen_unique_str("not_exist_collection"), default_entities)
-
-    @pytest.mark.skip("to do add dim check ")
-    def test_insert_dim_not_matched(self, connect, collection):
-        '''
-        target: test insert entities, the vector dimension is not equal to the collection dimension
-        method: the entities dimension is half of the collection dimension, check the status
-        expected: error raised
-        '''
-        vectors = gen_vectors(default_nb, int(default_dim) // 2)
-        insert_entities = copy.deepcopy(default_entities)
-        insert_entities[-1]["values"] = vectors
-        with pytest.raises(Exception) as e:
-            ids = connect.bulk_insert(collection, insert_entities)
-
-
-    def test_insert_with_field_name_not_match(self, connect, collection):
-        '''
-        target: test insert entities, with the entity field name updated
-        method: update entity field name
-        expected: error raised
-        '''
-        tmp_entity = update_field_name(copy.deepcopy(default_entity), "int64", "int64new")
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    @pytest.mark.skip(" todo support  type check")
-    def test_insert_with_field_type_not_match(self, connect, collection):
-        '''
-        target: test insert entities, with the entity field type updated
-        method: update entity field type
-        expected: error raised
-        '''
-        tmp_entity = update_field_type(copy.deepcopy(default_entity), "int64", DataType.FLOAT)
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    @pytest.mark.skip("to do add field_type check ")
-    @pytest.mark.level(2)
-    def test_insert_with_field_type_not_match_B(self, connect, collection):
-        '''
-        target: test insert entities, with the entity field type updated
-        method: update entity field type
-        expected: error raised
-        '''
-        tmp_entity = update_field_type(copy.deepcopy(default_entity), "int64", DataType.DOUBLE)
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    @pytest.mark.level(2)
-    def test_insert_with_field_value_not_match(self, connect, collection):
-        '''
-        target: test insert entities, with the entity field value updated
-        method: update entity field value
-        expected: error raised
-        '''
-        tmp_entity = update_field_value(copy.deepcopy(default_entity), DataType.FLOAT, 's')
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    def test_insert_with_field_more(self, connect, collection):
-        '''
-        target: test insert entities, with more fields than collection schema
-        method: add entity field
-        expected: error raised
-        '''
-        tmp_entity = add_field(copy.deepcopy(default_entity))
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    def test_insert_with_field_vector_more(self, connect, collection):
-        '''
-        target: test insert entities, with more fields than collection schema
-        method: add entity vector field
-        expected: error raised
-        '''
-        tmp_entity = add_vector_field(default_nb, default_dim)
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    def test_insert_with_field_less(self, connect, collection):
-        '''
-        target: test insert entities, with less fields than collection schema
-        method: remove entity field
-        expected: error raised
-        '''
-        tmp_entity = remove_field(copy.deepcopy(default_entity))
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    def test_insert_with_field_vector_less(self, connect, collection):
-        '''
-        target: test insert entities, with less fields than collection schema
-        method: remove entity vector field
-        expected: error raised
-        '''
-        tmp_entity = remove_vector_field(copy.deepcopy(default_entity))
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    def test_insert_with_no_field_vector_value(self, connect, collection):
-        '''
-        target: test insert entities, with no vector field value
-        method: remove entity vector field
-        expected: error raised
-        '''
-        tmp_entity = copy.deepcopy(default_entity)
-        del tmp_entity[-1]["values"]
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    def test_insert_with_no_field_vector_type(self, connect, collection):
-        '''
-        target: test insert entities, with no vector field type
-        method: remove entity vector field
-        expected: error raised
-        '''
-        tmp_entity = copy.deepcopy(default_entity)
-        del tmp_entity[-1]["type"]
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    def test_insert_with_no_field_vector_name(self, connect, collection):
-        '''
-        target: test insert entities, with no vector field name
-        method: remove entity vector field
-        expected: error raised
-        '''
-        tmp_entity = copy.deepcopy(default_entity)
-        del tmp_entity[-1]["name"]
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    @pytest.mark.skip("support count entities")
-    @pytest.mark.level(2)
-    @pytest.mark.timeout(30)
-    def test_collection_insert_rows_count_multi_threading(self, args, collection):
-        '''
-        target: test collection rows_count is correct or not with multi threading
-        method: create collection and insert entities in it(idmap),
-            assert the value returned by count_entities method is equal to length of entities
-        expected: the count is equal to the length of entities
-        '''
-        if args["handler"] == "HTTP":
-            pytest.skip("Skip test in http mode")
-        thread_num = 8
-        threads = []
-        milvus = get_milvus(host=args["ip"], port=args["port"], handler=args["handler"], try_connect=False)
-
-        def insert(thread_i):
-            logging.getLogger().info("In thread-%d" % thread_i)
-            milvus.bulk_insert(collection, default_entities)
-            milvus.flush([collection])
-
-        for i in range(thread_num):
-            t = MilvusTestThread(target=insert, args=(i,))
-            threads.append(t)
-            t.start()
-        for t in threads:
-            t.join()
-        res_count = milvus.count_entities(collection)
-        assert res_count == thread_num * default_nb
-
-    # TODO: unable to set config
-    @pytest.mark.skip("get entity by id")
-    @pytest.mark.level(2)
-    def _test_insert_disable_auto_flush(self, connect, collection):
-        '''
-        target: test insert entities, with disable autoflush
-        method: disable autoflush and insert, get entity
-        expected: the count is equal to 0
-        '''
-        delete_nums = 500
-        disable_flush(connect)
-        ids = connect.bulk_insert(collection, default_entities)
-        res = connect.get_entity_by_id(collection, ids[:delete_nums])
-        assert len(res) == delete_nums
-        assert res[0] is None
-
-
-
-class TestInsertBinary:
-    @pytest.fixture(
-        scope="function",
-        params=gen_binary_index()
-    )
-    def get_binary_index(self, request):
-        request.param["metric_type"] = "JACCARD"
-        return request.param
-
-    @pytest.mark.skip("count entities")
-    def test_insert_binary_entities(self, connect, binary_collection):
-        '''
-        target: test insert entities in binary collection
-        method: create collection and insert binary entities in it
-        expected: the collection row count equals to nb
-        '''
-        ids = connect.bulk_insert(binary_collection, default_binary_entities)
-        assert len(ids) == default_nb
-        connect.flush()
-        assert connect.count_entities(binary_collection) == default_nb
-
-    @pytest.mark.skip("count entities")
-    def test_insert_binary_entities_new(self, connect, binary_collection):
-        '''
-        target: test insert entities in binary collection
-        method: create collection and insert binary entities in it
-        expected: the collection row count equals to nb
-        '''
-        ids = connect.bulk_insert(binary_collection, default_binary_entities_new)
-        assert len(ids) == default_nb
-        connect.flush()
-        assert connect.count_entities(binary_collection) == default_nb
-
-    # @pytest.mark.skip
-    def test_insert_binary_tag(self, connect, binary_collection):
-        '''
-        target: test insert entities and create partition tag
-        method: create collection and insert binary entities in it, with the partition_tag param
-        expected: the collection row count equals to nb
-        '''
-        connect.create_partition(binary_collection, default_tag)
-        ids = connect.bulk_insert(binary_collection, default_binary_entities, partition_tag=default_tag)
-        assert len(ids) == default_nb
-        assert connect.has_partition(binary_collection, default_tag)
-
-    @pytest.mark.skip("count entities")
-    @pytest.mark.level(2)
-    def test_insert_binary_multi_times(self, connect, binary_collection):
-        '''
-        target: test insert entities multi times and final flush
-        method: create collection and insert binary entity multi and final flush
-        expected: the collection row count equals to nb
-        '''
-        for i in range(default_nb):
-            ids = connect.bulk_insert(binary_collection, default_binary_entity)
-            assert len(ids) == 1
-        connect.flush([binary_collection])
-        assert connect.count_entities(binary_collection) == default_nb
-
-    def test_insert_binary_after_create_index(self, connect, binary_collection, get_binary_index):
-        '''
-        target: test insert binary entities after build index
-        method: build index and insert entities
-        expected: no error raised
-        '''
-        connect.create_index(binary_collection, binary_field_name, get_binary_index)
-        ids = connect.bulk_insert(binary_collection, default_binary_entities)
-        assert len(ids) == default_nb
-        connect.flush([binary_collection])
-        info = connect.get_index_info(binary_collection, binary_field_name)
-        assert info == get_binary_index
-
-    @pytest.mark.skip("r0.3-test")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_binary_create_index(self, connect, binary_collection, get_binary_index):
-        '''
-        target: test build index insert after vector
-        method: insert vector and build index
-        expected: no error raised
-        '''
-        ids = connect.bulk_insert(binary_collection, default_binary_entities)
-        assert len(ids) == default_nb
-        connect.flush([binary_collection])
-        connect.create_index(binary_collection, binary_field_name, get_binary_index)
-        info = connect.get_index_info(binary_collection, binary_field_name)
-        assert info == get_binary_index
-
-    @pytest.mark.skip("binary search")
-    def test_insert_binary_search(self, connect, binary_collection):
-        '''
-        target: test search vector after insert vector after a while
-        method: insert vector, sleep, and search collection
-        expected: no error raised
-        '''
-        ids = connect.bulk_insert(binary_collection, default_binary_entities)
-        connect.flush([binary_collection])
-        query, vecs = gen_query_vectors(binary_field_name, default_binary_entities, default_top_k, 1, metric_type="JACCARD")
-        res = connect.search(binary_collection, query)
-        logging.getLogger().debug(res)
-        assert res
-
-
-class TestInsertAsync:
-    @pytest.fixture(scope="function", autouse=True)
-    def skip_http_check(self, args):
-        if args["handler"] == "HTTP":
-            pytest.skip("skip in http mode")
-
-    @pytest.fixture(
-        scope="function",
-        params=[
-            1,
-            1000
-        ],
-    )
-    def insert_count(self, request):
-        yield request.param
-
-    def check_status(self, result):
-        logging.getLogger().info("In callback check status")
-        assert not result
-
-    def check_result(self, result):
-        logging.getLogger().info("In callback check status")
-        assert result
-
-    def test_insert_async(self, connect, collection, insert_count):
-        '''
-        target: test insert vectors with different length of vectors
-        method: set different vectors as insert method params
-        expected: length of ids is equal to the length of vectors
-        '''
-        nb = insert_count
-        future = connect.bulk_insert(collection, gen_entities(nb), _async=True)
-        ids = future.result()
-        connect.flush([collection])
-        assert len(ids) == nb
-
-    @pytest.mark.level(2)
-    def test_insert_async_false(self, connect, collection, insert_count):
-        '''
-        target: test insert vectors with different length of vectors
-        method: set different vectors as insert method params
-        expected: length of ids is equal to the length of vectors
-        '''
-        nb = insert_count
-        ids = connect.bulk_insert(collection, gen_entities(nb), _async=False)
-        # ids = future.result()
-        connect.flush([collection])
-        assert len(ids) == nb
-
-    def test_insert_async_callback(self, connect, collection, insert_count):
-        '''
-        target: test insert vectors with different length of vectors
-        method: set different vectors as insert method params
-        expected: length of ids is equal to the length of vectors
-        '''
-        nb = insert_count
-        future = connect.bulk_insert(collection, gen_entities(nb), _async=True, _callback=self.check_result)
-        future.done()
-        ids = future.result()
-        assert len(ids) == nb
-
-
-    @pytest.mark.skip("count entities")
-    @pytest.mark.level(2)
-    def test_insert_async_long(self, connect, collection):
-        '''
-        target: test insert vectors with different length of vectors
-        method: set different vectors as insert method params
-        expected: length of ids is equal to the length of vectors
-        '''
-        nb = 50000
-        future = connect.bulk_insert(collection, gen_entities(nb), _async=True, _callback=self.check_result)
-        result = future.result()
-        assert len(result) == nb
-        connect.flush([collection])
-        count = connect.count_entities(collection)
-        logging.getLogger().info(count)
-        assert count == nb
-
-    @pytest.mark.skip("count entities")
-    @pytest.mark.level(2)
-    def test_insert_async_callback_timeout(self, connect, collection):
-        '''
-        target: test insert vectors with different length of vectors
-        method: set different vectors as insert method params
-        expected: length of ids is equal to the length of vectors
-        '''
-        nb = 100000
-        future = connect.bulk_insert(collection, gen_entities(nb), _async=True, _callback=self.check_status, timeout=1)
-        with pytest.raises(Exception) as e:
-            result = future.result()
-        count = connect.count_entities(collection)
-        assert count == 0
-
-    def test_insert_async_invalid_params(self, connect):
-        '''
-        target: test insert vectors with different length of vectors
-        method: set different vectors as insert method params
-        expected: raise exception
-        '''
-        collection_new = gen_unique_str()
-        with pytest.raises(Exception) as e:
-            future = connect.bulk_insert(collection_new, default_entities, _async=True)
-            result = future.result()
-
-    def test_insert_async_invalid_params_raise_exception(self, connect, collection):
-        '''
-        target: test insert vectors with different length of vectors
-        method: set different vectors as insert method params
-        expected: raise exception
-        '''
-        entities = []
-        with pytest.raises(Exception) as e:
-            future = connect.bulk_insert(collection, entities, _async=True)
-            future.result()
-
-
-class TestInsertMultiCollections:
-    """
-    ******************************************************************
-      The following cases are used to test `insert` function
-    ******************************************************************
-    """
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_simple_index()
-    )
-    def get_simple_index(self, request, connect):
-        logging.getLogger().info(request.param)
-        if str(connect._cmd("mode")) == "CPU":
-            if request.param["index_type"] in index_cpu_not_support():
-                pytest.skip("sq8h not support in CPU mode")
-        return request.param
-
-    @pytest.mark.skip("count entities")
-    def test_insert_vector_multi_collections(self, connect):
-        '''
-        target: test insert entities
-        method: create 10 collections and insert entities into them in turn
-        expected: row count
-        '''
-        collection_num = 10
-        collection_list = []
-        for i in range(collection_num):
-            collection_name = gen_unique_str(uid)
-            collection_list.append(collection_name)
-            connect.create_collection(collection_name, default_fields)
-            ids = connect.bulk_insert(collection_name, default_entities)
-            connect.flush([collection_name])
-            assert len(ids) == default_nb
-            count = connect.count_entities(collection_name)
-            assert count == default_nb
-
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_drop_collection_insert_vector_another(self, connect, collection):
-        '''
-        target: test insert vector to collection_1 after collection_2 deleted
-        method: delete collection_2 and insert vector to collection_1
-        expected: row count equals the length of entities inserted
-        '''
-        collection_name = gen_unique_str(uid)
-        connect.create_collection(collection_name, default_fields)
-        connect.drop_collection(collection)
-        ids = connect.bulk_insert(collection_name, default_entity)
-        connect.flush([collection_name])
-        assert len(ids) == 1
-
-    @pytest.mark.skip("r0.3-test")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_create_index_insert_vector_another(self, connect, collection, get_simple_index):
-        '''
-        target: test insert vector to collection_2 after build index for collection_1
-        method: build index and insert vector
-        expected: status ok
-        '''
-        collection_name = gen_unique_str(uid)
-        connect.create_collection(collection_name, default_fields)
-        connect.create_index(collection, field_name, get_simple_index)
-        ids = connect.bulk_insert(collection, default_entity)
-        connect.drop_collection(collection_name)
-
-    @pytest.mark.skip("count entities")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_vector_create_index_another(self, connect, collection, get_simple_index):
-        '''
-        target: test insert vector to collection_2 after build index for collection_1
-        method: build index and insert vector
-        expected: status ok
-        '''
-        collection_name = gen_unique_str(uid)
-        connect.create_collection(collection_name, default_fields)
-        ids = connect.bulk_insert(collection, default_entity)
-        connect.create_index(collection, field_name, get_simple_index)
-        count = connect.count_entities(collection_name)
-        assert count == 0
-
-    @pytest.mark.skip("count entities")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_vector_sleep_create_index_another(self, connect, collection, get_simple_index):
-        '''
-        target: test insert vector to collection_2 after build index for collection_1 for a while
-        method: build index and insert vector
-        expected: status ok
-        '''
-        collection_name = gen_unique_str(uid)
-        connect.create_collection(collection_name, default_fields)
-        ids = connect.bulk_insert(collection, default_entity)
-        connect.flush([collection])
-        connect.create_index(collection, field_name, get_simple_index)
-        count = connect.count_entities(collection)
-        assert count == 1
-
-    @pytest.mark.skip("count entities")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_search_vector_insert_vector_another(self, connect, collection):
-        '''
-        target: test insert vector to collection_1 after search collection_2
-        method: search collection and insert vector
-        expected: status ok
-        '''
-        collection_name = gen_unique_str(uid)
-        connect.create_collection(collection_name, default_fields)
-        res = connect.search(collection, default_single_query)
-        logging.getLogger().debug(res)
-        ids = connect.bulk_insert(collection_name, default_entity)
-        connect.flush()
-        count = connect.count_entities(collection_name)
-        assert count == 1
-
-    @pytest.mark.skip("r0.3-test")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_vector_search_vector_another(self, connect, collection):
-        '''
-        target: test insert vector to collection_1 after search collection_2
-        method: search collection and insert vector
-        expected: status ok
-        '''
-        collection_name = gen_unique_str(uid)
-        connect.create_collection(collection_name, default_fields)
-        ids = connect.bulk_insert(collection, default_entity)
-        result = connect.search(collection_name, default_single_query)
-
-    @pytest.mark.skip("r0.3-test")
-    @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_vector_sleep_search_vector_another(self, connect, collection):
-        '''
-        target: test insert vector to collection_1 after search collection_2 a while
-        method: search collection , sleep, and insert vector
-        expected: status ok
-        '''
-        collection_name = gen_unique_str(uid)
-        connect.create_collection(collection_name, default_fields)
-        ids = connect.bulk_insert(collection, default_entity)
-        connect.flush([collection])
-        result = connect.search(collection_name, default_single_query)
-
-
-class TestInsertInvalid(object):
-    """
-    Test inserting vectors with invalid collection names
-    """
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_strs()
-    )
-    def get_collection_name(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_strs()
-    )
-    def get_tag_name(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_strs()
-    )
-    def get_field_name(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_strs()
-    )
-    def get_field_type(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_strs()
-    )
-    def get_field_int_value(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_ints()
-    )
-    def get_entity_id(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_vectors()
-    )
-    def get_field_vectors_value(self, request):
-        yield request.param
-
-    def test_insert_ids_invalid(self, connect, id_collection, get_entity_id):
-        '''
-        target: test insert, with using customize ids, which are not int64
-        method: create collection and insert entities in it
-        expected: raise an exception
-        '''
-        entity_id = get_entity_id
-        ids = [entity_id for _ in range(default_nb)]
-        with pytest.raises(Exception):
-            connect.bulk_insert(id_collection, default_entities, ids)
-
-    def test_insert_with_invalid_collection_name(self, connect, get_collection_name):
-        collection_name = get_collection_name
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection_name, default_entity)
-
-    def test_insert_with_invalid_tag_name(self, connect, collection, get_tag_name):
-        tag_name = get_tag_name
-        connect.create_partition(collection, default_tag)
-        if tag_name is not None:
-            with pytest.raises(Exception):
-                connect.bulk_insert(collection, default_entity, partition_tag=tag_name)
-        else:
-            connect.bulk_insert(collection, default_entity, partition_tag=tag_name)
-
-    def test_insert_with_invalid_field_name(self, connect, collection, get_field_name):
-        field_name = get_field_name
-        tmp_entity = update_field_name(copy.deepcopy(default_entity), "int64", get_field_name)
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    @pytest.mark.skip("laster add check of field type")
-    def test_insert_with_invalid_field_type(self, connect, collection, get_field_type):
-        field_type = get_field_type
-        tmp_entity = update_field_type(copy.deepcopy(default_entity), 'float', field_type)
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    @pytest.mark.skip("laster add check of field value")
-    def test_insert_with_invalid_field_value(self, connect, collection, get_field_int_value):
-        field_value = get_field_int_value
-        tmp_entity = update_field_type(copy.deepcopy(default_entity), 'int64', field_value)
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-    def test_insert_with_invalid_field_vector_value(self, connect, collection, get_field_vectors_value):
-        tmp_entity = copy.deepcopy(default_entity)
-        src_vector = tmp_entity[-1]["values"]
-        src_vector[0][1] = get_field_vectors_value
-        with pytest.raises(Exception):
-            connect.bulk_insert(collection, tmp_entity)
-
-
-class TestInsertInvalidBinary(object):
-    """
-    Test inserting vectors with invalid collection names
-    """
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_strs()
-    )
-    def get_collection_name(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_strs()
-    )
-    def get_tag_name(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_strs()
-    )
-    def get_field_name(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_strs()
-    )
-    def get_field_type(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_strs()
-    )
-    def get_field_int_value(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_ints()
-    )
-    def get_entity_id(self, request):
-        yield request.param
-
-    @pytest.fixture(
-        scope="function",
-        params=gen_invalid_vectors()
-    )
-    def get_field_vectors_value(self, request):
-        yield request.param
-
-    @pytest.mark.level(2)
-    def test_insert_with_invalid_field_name(self, connect, binary_collection, get_field_name):
-        tmp_entity = update_field_name(copy.deepcopy(default_binary_entity), "int64", get_field_name)
-        with pytest.raises(Exception):
-            connect.bulk_insert(binary_collection, tmp_entity)
-
-    @pytest.mark.skip("todo support row data check")
-    @pytest.mark.level(2)
-    def test_insert_with_invalid_field_value(self, connect, binary_collection, get_field_int_value):
-        tmp_entity = update_field_type(copy.deepcopy(default_binary_entity), 'int64', get_field_int_value)
-        with pytest.raises(Exception):
-            connect.bulk_insert(binary_collection, tmp_entity)
-
-    @pytest.mark.skip("todo support row data check")
-    @pytest.mark.level(2)
-    def test_insert_with_invalid_field_vector_value(self, connect, binary_collection, get_field_vectors_value):
-        tmp_entity = copy.deepcopy(default_binary_entity)
-        src_vector = tmp_entity[-1]["values"]
-        src_vector[0][1] = get_field_vectors_value
-        with pytest.raises(Exception):
-            connect.bulk_insert(binary_collection, tmp_entity)
-
-    @pytest.mark.level(2)
-    def test_insert_ids_invalid(self, connect, binary_id_collection, get_entity_id):
-        '''
-        target: test insert, with using customize ids, which are not int64
-        method: create collection and insert entities in it
-        expected: raise an exception
-        '''
-        entity_id = get_entity_id
-        ids = [entity_id for _ in range(default_nb)]
-        with pytest.raises(Exception):
-            connect.bulk_insert(binary_id_collection, default_binary_entities, ids)
-
-    @pytest.mark.skip("check filed")
-    @pytest.mark.level(2)
-    def test_insert_with_invalid_field_type(self, connect, binary_collection, get_field_type):
-        field_type = get_field_type
-        tmp_entity = update_field_type(copy.deepcopy(default_binary_entity), 'int64', field_type)
-        with pytest.raises(Exception):
-            connect.bulk_insert(binary_collection, tmp_entity)
-
-    @pytest.mark.skip("check field")
-    @pytest.mark.level(2)
-    def test_insert_with_invalid_field_vector_value(self, connect, binary_collection, get_field_vectors_value):
-        tmp_entity = copy.deepcopy(default_binary_entities)
-        src_vector = tmp_entity[-1]["values"]
-        src_vector[1] = get_field_vectors_value
-        with pytest.raises(Exception):
-            connect.bulk_insert(binary_collection, tmp_entity)
diff --git a/tests/python/test_create_collection.py b/tests/python/test_create_collection.py
index 10da49dd62656035a865c9ada2c7bd602283e2b0..68e00e8cf2d0a375c1bc2d929fc071fdd91e2bb2 100644
--- a/tests/python/test_create_collection.py
+++ b/tests/python/test_create_collection.py
@@ -97,7 +97,7 @@ class TestCreateCollection:
         expected: error raised
         '''
         # pdb.set_trace()
-        connect.bulk_insert(collection, default_entity)
+        connect.insert(collection, default_entity)
 
         with pytest.raises(Exception) as e:
             connect.create_collection(collection, default_fields)
@@ -108,7 +108,7 @@ class TestCreateCollection:
         method: insert vector and create collection
         expected: error raised
         '''
-        connect.bulk_insert(collection, default_entity)
+        connect.insert(collection, default_entity)
         connect.flush([collection])
         with pytest.raises(Exception) as e:
             connect.create_collection(collection, default_fields)
diff --git a/tests/python/test_get_collection_info.py b/tests/python/test_get_collection_info.py
index 1575d0f29bfdc2bd6b3da7b9b7d494cb94e3ad97..a36e4dceed5d9bfeaea19a6aaa66817dc78c44da 100644
--- a/tests/python/test_get_collection_info.py
+++ b/tests/python/test_get_collection_info.py
@@ -33,9 +33,9 @@ class TestInfoBase:
     )
     def get_simple_index(self, request, connect):
         logging.getLogger().info(request.param)
-        if str(connect._cmd("mode")) == "CPU":
-            if request.param["index_type"] in index_cpu_not_support():
-                pytest.skip("sq8h not support in CPU mode")
+        # if str(connect._cmd("mode")) == "CPU":
+        if request.param["index_type"] in index_cpu_not_support():
+            pytest.skip("sq8h not support in CPU mode")
         return request.param
 
     """
@@ -88,7 +88,7 @@ class TestInfoBase:
     @pytest.mark.skip("no create Index")
     def test_get_collection_info_after_index_created(self, connect, collection, get_simple_index):
         connect.create_index(collection, default_float_vec_field_name, get_simple_index)
-        info = connect.get_index_info(collection, field_name)
+        info = connect.describe_index(collection, field_name)
         assert info == get_simple_index
         res = connect.get_collection_info(collection, default_float_vec_field_name)
         assert index["index_type"] == get_simple_index["index_type"]
@@ -161,7 +161,7 @@ class TestInfoBase:
         }
         connect.create_collection(collection_name, fields)
         entities = gen_entities_by_fields(fields["fields"], default_nb, vector_field["params"]["dim"])
-        res_ids = connect.bulk_insert(collection_name, entities)
+        res_ids = connect.insert(collection_name, entities)
         connect.flush([collection_name])
         res = connect.get_collection_info(collection_name)
         assert res['auto_id'] == True
@@ -186,7 +186,7 @@ class TestInfoBase:
        fields["segment_row_limit"] = get_segment_row_limit
        connect.create_collection(collection_name, fields)
        entities = gen_entities_by_fields(fields["fields"], default_nb, fields["fields"][-1]["params"]["dim"])
-       res_ids = connect.bulk_insert(collection_name, entities)
+       res_ids = connect.insert(collection_name, entities)
        connect.flush([collection_name])
        res = connect.get_collection_info(collection_name)
        assert res['auto_id'] == True
diff --git a/tests/python/test_index.py b/tests/python/test_index.py
index cad7695c9530e04202b575951b1c0f4c567ee4c7..115d9812366f3b5e989d748f3566b5e5cc84d150 100644
--- a/tests/python/test_index.py
+++ b/tests/python/test_index.py
@@ -26,9 +26,9 @@ class TestIndexBase:
     def get_simple_index(self, request, connect):
         import copy
         logging.getLogger().info(request.param)
-        if str(connect._cmd("mode")) == "CPU":
-            if request.param["index_type"] in index_cpu_not_support():
-                pytest.skip("sq8h not support in CPU mode")
+        #if str(connect._cmd("mode")) == "CPU":
+        if request.param["index_type"] in index_cpu_not_support():
+            pytest.skip("sq8h not support in CPU mode")
         return copy.deepcopy(request.param)
 
     @pytest.fixture(
@@ -55,7 +55,7 @@ class TestIndexBase:
         method: create collection and add entities in it, create index
         expected: return search success
         '''
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         connect.create_index(collection, field_name, get_simple_index)
 
     def test_create_index_on_field_not_existed(self, connect, collection, get_simple_index):
@@ -65,7 +65,7 @@ class TestIndexBase:
         expected: error raised
         '''
         tmp_field_name = gen_unique_str()
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         with pytest.raises(Exception) as e:
             connect.create_index(collection, tmp_field_name, get_simple_index)
 
@@ -77,7 +77,7 @@ class TestIndexBase:
         expected: error raised
         '''
         tmp_field_name = "int64"
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         with pytest.raises(Exception) as e:
             connect.create_index(collection, tmp_field_name, get_simple_index)
 
@@ -98,7 +98,7 @@ class TestIndexBase:
         expected: return search success
         '''
         connect.create_partition(collection, default_tag)
-        ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
+        ids = connect.insert(collection, default_entities, partition_tag=default_tag)
         connect.flush([collection])
         connect.create_index(collection, field_name, get_simple_index)
 
@@ -110,7 +110,7 @@ class TestIndexBase:
         expected: return search success
         '''
         connect.create_partition(collection, default_tag)
-        ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
+        ids = connect.insert(collection, default_entities, partition_tag=default_tag)
         connect.flush()
         connect.create_index(collection, field_name, get_simple_index)
 
@@ -131,7 +131,7 @@ class TestIndexBase:
         method: create collection and add entities in it, create index
         expected: return search success
         '''
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         connect.create_index(collection, field_name, get_simple_index)
         # logging.getLogger().info(connect.get_collection_stats(collection))
         nq = get_nq
@@ -150,7 +150,7 @@ class TestIndexBase:
         method: create collection and add entities in it, create index
         expected: return search success
         '''
-        connect.bulk_insert(collection, default_entities)
+        connect.insert(collection, default_entities)
 
         def build(connect):
             connect.create_index(collection, field_name, default_index)
@@ -187,7 +187,7 @@ class TestIndexBase:
         expected: create index ok, and count correct
         '''
         connect.create_index(collection, field_name, get_simple_index)
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         connect.flush([collection])
         count = connect.count_entities(collection)
         assert count == default_nb
@@ -213,7 +213,7 @@ class TestIndexBase:
         method: create another index with different index_params after index have been built
         expected: return code 0, and describe index result equals with the second index params
         '''
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         indexs = [default_index, {"metric_type":"L2", "index_type": "FLAT", "params":{"nlist": 1024}}]
         for index in indexs:
             connect.create_index(collection, field_name, index)
@@ -228,7 +228,7 @@ class TestIndexBase:
         method: create collection and add entities in it, create index
         expected: return search success
         '''
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         get_simple_index["metric_type"] = "IP"
         connect.create_index(collection, field_name, get_simple_index)
 
@@ -250,7 +250,7 @@ class TestIndexBase:
         expected: return search success
         '''
         connect.create_partition(collection, default_tag)
-        ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
+        ids = connect.insert(collection, default_entities, partition_tag=default_tag)
         connect.flush([collection])
         get_simple_index["metric_type"] = "IP"
         connect.create_index(collection, field_name, get_simple_index)
@@ -263,7 +263,7 @@ class TestIndexBase:
         expected: return search success
         '''
         connect.create_partition(collection, default_tag)
-        ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
+        ids = connect.insert(collection, default_entities, partition_tag=default_tag)
         connect.flush()
         get_simple_index["metric_type"] = "IP"
         connect.create_index(collection, field_name, get_simple_index)
@@ -277,7 +277,7 @@ class TestIndexBase:
         expected: return search success
         '''
         metric_type = "IP"
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         get_simple_index["metric_type"] = metric_type
         connect.create_index(collection, field_name, get_simple_index)
         # logging.getLogger().info(connect.get_collection_stats(collection))
@@ -297,7 +297,7 @@ class TestIndexBase:
         method: create collection and add entities in it, create index
         expected: return search success
         '''
-        connect.bulk_insert(collection, default_entities)
+        connect.insert(collection, default_entities)
 
         def build(connect):
             default_index["metric_type"] = "IP"
@@ -336,7 +336,7 @@ class TestIndexBase:
         '''
         default_index["metric_type"] = "IP"
         connect.create_index(collection, field_name, get_simple_index)
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         connect.flush([collection])
         count = connect.count_entities(collection)
         assert count == default_nb
@@ -364,7 +364,7 @@ class TestIndexBase:
         method: create another index with different index_params after index have been built
         expected: return code 0, and describe index result equals with the second index params
         '''
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         indexs = [default_index, {"index_type": "FLAT", "params": {"nlist": 1024}, "metric_type": "IP"}]
         for index in indexs:
             connect.create_index(collection, field_name, index)
@@ -385,7 +385,7 @@ class TestIndexBase:
         method: create collection and add entities in it, create index, call drop index
         expected: return code 0, and default index param
         '''
-        # ids = connect.bulk_insert(collection, entities)
+        # ids = connect.insert(collection, entities)
         connect.create_index(collection, field_name, get_simple_index)
         connect.drop_index(collection, field_name)
         stats = connect.get_collection_stats(collection)
@@ -439,7 +439,7 @@ class TestIndexBase:
         method: create collection and add entities in it, create index
         expected: return code not equals to 0, drop index failed
         '''
-        # ids = connect.bulk_insert(collection, entities)
+        # ids = connect.insert(collection, entities)
         # no create index
         connect.drop_index(collection, field_name)
 
@@ -462,7 +462,7 @@ class TestIndexBase:
         method: create collection and add entities in it, create index, call drop index
         expected: return code 0, and default index param
         '''
-        # ids = connect.bulk_insert(collection, entities)
+        # ids = connect.insert(collection, entities)
         get_simple_index["metric_type"] = "IP"
         connect.create_index(collection, field_name, get_simple_index)
         connect.drop_index(collection, field_name)
@@ -506,7 +506,7 @@ class TestIndexBase:
         method: create collection and add entities in it, create index
         expected: return code not equals to 0, drop index failed
         '''
-        # ids = connect.bulk_insert(collection, entities)
+        # ids = connect.insert(collection, entities)
         # no create index
         connect.drop_index(collection, field_name)
 
@@ -579,7 +579,7 @@ class TestIndexBinary:
         method: create collection and add entities in it, create index
         expected: return search success
         '''
-        ids = connect.bulk_insert(binary_collection, default_binary_entities)
+        ids = connect.insert(binary_collection, default_binary_entities)
         connect.create_index(binary_collection, binary_field_name, get_jaccard_index)
 
     @pytest.mark.timeout(BUILD_TIMEOUT)
@@ -590,7 +590,7 @@ class TestIndexBinary:
         expected: return search success
         '''
         connect.create_partition(binary_collection, default_tag)
-        ids = connect.bulk_insert(binary_collection, default_binary_entities, partition_tag=default_tag)
+        ids = connect.insert(binary_collection, default_binary_entities, partition_tag=default_tag)
         connect.create_index(binary_collection, binary_field_name, get_jaccard_index)
 
     @pytest.mark.skip("r0.3-test")
@@ -602,7 +602,7 @@ class TestIndexBinary:
         expected: return search success
         '''
         nq = get_nq
-        ids = connect.bulk_insert(binary_collection, default_binary_entities)
+        ids = connect.insert(binary_collection, default_binary_entities)
         connect.create_index(binary_collection, binary_field_name, get_jaccard_index)
         query, vecs = gen_query_vectors(binary_field_name, default_binary_entities, default_top_k, nq, metric_type="JACCARD")
         search_param = get_search_param(get_jaccard_index["index_type"], metric_type="JACCARD")
@@ -619,7 +619,7 @@ class TestIndexBinary:
         expected: return create_index failure
         '''
         # insert 6000 vectors
-        ids = connect.bulk_insert(binary_collection, default_binary_entities)
+        ids = connect.insert(binary_collection, default_binary_entities)
         connect.flush([binary_collection])
 
         if get_l2_index["index_type"] == "BIN_FLAT":
@@ -641,7 +641,7 @@ class TestIndexBinary:
         method: create collection and add entities in it, create index, call describe index
         expected: return code 0, and index instructure
         '''
-        ids = connect.bulk_insert(binary_collection, default_binary_entities)
+        ids = connect.insert(binary_collection, default_binary_entities)
         connect.flush([binary_collection])
         connect.create_index(binary_collection, binary_field_name, get_jaccard_index)
         stats = connect.get_collection_stats(binary_collection)
@@ -662,7 +662,7 @@ class TestIndexBinary:
         expected: return code 0, and index instructure
         '''
         connect.create_partition(binary_collection, default_tag)
-        ids = connect.bulk_insert(binary_collection, default_binary_entities, partition_tag=default_tag)
+        ids = connect.insert(binary_collection, default_binary_entities, partition_tag=default_tag)
         connect.flush([binary_collection])
         connect.create_index(binary_collection, binary_field_name, get_jaccard_index)
         stats = connect.get_collection_stats(binary_collection)
@@ -706,7 +706,7 @@ class TestIndexBinary:
         expected: return code 0, and default index param
         '''
         connect.create_partition(binary_collection, default_tag)
-        ids = connect.bulk_insert(binary_collection, default_binary_entities, partition_tag=default_tag)
+        ids = connect.insert(binary_collection, default_binary_entities, partition_tag=default_tag)
         connect.flush([binary_collection])
         connect.create_index(binary_collection, binary_field_name, get_jaccard_index)
         stats = connect.get_collection_stats(binary_collection)
@@ -802,7 +802,7 @@ class TestIndexAsync:
         method: create collection and add entities in it, create index
         expected: return search success
         '''
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         logging.getLogger().info("start index")
         future = connect.create_index(collection, field_name, get_simple_index, _async=True)
         logging.getLogger().info("before result")
@@ -817,7 +817,7 @@ class TestIndexAsync:
         method: create collection and add entities in it, create index
         expected: return search success
         '''
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         logging.getLogger().info("start index")
         future = connect.create_index(collection, field_name, get_simple_index, _async=True)
         logging.getLogger().info("DROP")
@@ -837,7 +837,7 @@ class TestIndexAsync:
         method: create collection and add entities in it, create index
         expected: return search success
         '''
-        ids = connect.bulk_insert(collection, default_entities)
+        ids = connect.insert(collection, default_entities)
         logging.getLogger().info("start index")
         future = connect.create_index(collection, field_name, get_simple_index, _async=True,
                                       _callback=self.check_result)
diff --git a/tests/python/test_insert.py b/tests/python/test_insert.py
index 22093708b6b6077d3a54fb5b9b13a4e99242bac2..8026a1fa5a616a8f0105697cd1913ea221c9d691 100644
--- a/tests/python/test_insert.py
+++ b/tests/python/test_insert.py
@@ -28,9 +28,9 @@ class TestInsertBase:
         params=gen_simple_index()
     )
     def get_simple_index(self, request, connect):
-        if str(connect._cmd("mode")) == "CPU":
-            if request.param["index_type"] in index_cpu_not_support():
-                pytest.skip("CPU not support index_type: ivf_sq8h")
+        # if str(connect._cmd("mode")) == "CPU":
+        if request.param["index_type"] in index_cpu_not_support():
+            pytest.skip("CPU not support index_type: ivf_sq8h")
         return request.param
 
     @pytest.fixture(
@@ -76,7 +76,7 @@ class TestInsertBase:
         '''
         collection_name = gen_unique_str(uid)
         with pytest.raises(Exception) as e:
-            connect.insert(collection_name, default_entities_rows)
+            connect.insert(collection_name, default_entities)
 
     @pytest.mark.timeout(ADD_TIMEOUT)
     def test_insert_drop_collection(self, connect, collection):
@@ -85,7 +85,7 @@ class TestInsertBase:
         method: insert vector and delete collection
         expected: no error raised
         '''
-        ids = connect.insert(collection, default_entity_row)
+        ids = connect.insert(collection, default_entity)
         assert len(ids) == 1
         connect.drop_collection(collection)
 
@@ -96,7 +96,7 @@ class TestInsertBase:
         method: insert vector, sleep, and delete collection
         expected: no error raised 
         '''
-        ids = connect.insert(collection, default_entity_row)
+        ids = connect.insert(collection, default_entity)
         assert len(ids) == 1
         connect.flush([collection])
         connect.drop_collection(collection)
@@ -108,14 +108,28 @@ class TestInsertBase:
         method: insert vector and build index
         expected: no error raised
         '''
-        ids = connect.insert(collection, default_entities_rows)
+        ids = connect.insert(collection, default_entities)
         assert len(ids) == default_nb
         connect.flush([collection])
         connect.create_index(collection, field_name, get_simple_index)
-        info = connect.get_index_info(collection, field_name)
+        info = connect.describe_index(collection, field_name)
         assert info == get_simple_index
 
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_create_index_new(self, connect, collection, get_simple_index):
+        '''
+        target: test build index insert after vector
+        method: insert vector and build index
+        expected: no error raised
+        '''
+        ids = connect.insert(collection, default_entities_new)
+        assert len(ids) == default_nb
+        connect.flush([collection])
+        connect.create_index(collection, field_name, get_simple_index)
+        info = connect.describe_index(collection, field_name)
+        assert info == get_simple_index
 
+    @pytest.mark.skip("r0.3-test")
     @pytest.mark.timeout(ADD_TIMEOUT)
     def test_insert_after_create_index(self, connect, collection, get_simple_index):
         '''
@@ -124,9 +138,9 @@ class TestInsertBase:
         expected: no error raised
         '''
         connect.create_index(collection, field_name, get_simple_index)
-        ids = connect.insert(collection, default_entities_rows)
+        ids = connect.insert(collection, default_entities)
         assert len(ids) == default_nb
-        info = connect.get_index_info(collection, field_name)
+        info = connect.describe_index(collection, field_name)
         assert info == get_simple_index
 
     @pytest.mark.skip("r0.3-test")
@@ -137,16 +151,16 @@ class TestInsertBase:
         method: insert vector, sleep, and search collection
         expected: no error raised 
         '''
-        ids = connect.insert(collection, default_entities_rows)
+        ids = connect.insert(collection, default_entities)
         connect.flush([collection])
         res = connect.search(collection, default_single_query)
         logging.getLogger().debug(res)
         assert res
-
+    
     @pytest.mark.skip("segment row count")
     def test_insert_segment_row_count(self, connect, collection):
         nb = default_segment_row_limit + 1
-        res_ids = connect.insert(collection, gen_entities_rows(nb))
+        res_ids = connect.insert(collection, gen_entities(nb))
         connect.flush([collection])
         assert len(res_ids) == nb
         stats = connect.get_collection_stats(collection)
@@ -164,26 +178,138 @@ class TestInsertBase:
     def insert_count(self, request):
         yield request.param
 
+    @pytest.mark.skip(" todo support count entities")
     @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_ids_not_match(self, connect, id_collection, insert_count):
+    def test_insert_ids(self, connect, id_collection, insert_count):
         '''
         target: test insert vectors in collection, use customize ids
         method: create collection and insert vectors in it, check the ids returned and the collection length after vectors inserted
         expected: the length of ids and the collection row count
         '''
         nb = insert_count
-        with pytest.raises(Exception) as e:
-            res_ids = connect.insert(id_collection, gen_entities_rows(nb))
+        ids = [i for i in range(nb)]
+        res_ids = connect.insert(id_collection, gen_entities(nb), ids)
+        connect.flush([id_collection])
+        assert len(res_ids) == nb
+        assert res_ids == ids
+        res_count = connect.count_entities(id_collection)
+        assert res_count == nb
+
+    @pytest.mark.skip(" todo support count entities")
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_the_same_ids(self, connect, id_collection, insert_count):
+        '''
+        target: test insert vectors in collection, use customize the same ids
+        method: create collection and insert vectors in it, check the ids returned and the collection length after vectors inserted
+        expected: the length of ids and the collection row count
+        '''
+        nb = insert_count
+        ids = [1 for i in range(nb)]
+        res_ids = connect.insert(id_collection, gen_entities(nb), ids)
+        connect.flush([id_collection])
+        assert len(res_ids) == nb
+        assert res_ids == ids
+        res_count = connect.count_entities(id_collection)
+        assert res_count == nb
 
+    @pytest.mark.skip(" todo support count entities")
     @pytest.mark.timeout(ADD_TIMEOUT)
-    def test_insert_twice_ids_no_ids(self, connect, collection):
+    def test_insert_ids_fields(self, connect, get_filter_field, get_vector_field):
+        '''
+        target: test create normal collection with different fields, insert entities into id with ids
+        method: create collection with diff fields: metric/field_type/..., insert, and count
+        expected: row count correct
+        '''
+        nb = 5
+        filter_field = get_filter_field
+        vector_field = get_vector_field
+        collection_name = gen_unique_str("test_collection")
+        fields = {
+            "fields": [filter_field, vector_field],
+            "segment_row_limit": default_segment_row_limit,
+            "auto_id": True
+        }
+        connect.create_collection(collection_name, fields)
+        ids = [i for i in range(nb)]
+        entities = gen_entities_by_fields(fields["fields"], nb, default_dim)
+        res_ids = connect.insert(collection_name, entities, ids)
+        assert res_ids == ids
+        connect.flush([collection_name])
+        res_count = connect.count_entities(collection_name)
+        assert res_count == nb
+
+    # TODO: assert exception && enable
+    @pytest.mark.level(2)
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_twice_ids_no_ids(self, connect, id_collection):
         '''
         target: check the result of insert, with params ids and no ids
         method: test insert vectors twice, use customize ids first, and then use no ids
         expected:  error raised
         '''
+        ids = [i for i in range(default_nb)]
+        res_ids = connect.insert(id_collection, default_entities, ids)
         with pytest.raises(Exception) as e:
-            res_ids = connect.insert(collection, gen_entities_rows(default_nb, _id=False))
+            res_ids_new = connect.insert(id_collection, default_entities)
+
+    # TODO: assert exception && enable
+    @pytest.mark.level(2)
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_twice_not_ids_ids(self, connect, id_collection):
+        '''
+        target: check the result of insert, with params ids and no ids
+        method: test insert vectors twice, use not ids first, and then use customize ids
+        expected:  error raised
+        '''
+        with pytest.raises(Exception) as e:
+            res_ids = connect.insert(id_collection, default_entities)
+
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_ids_length_not_match_batch(self, connect, id_collection):
+        '''
+        target: test insert vectors in collection, use customize ids, len(ids) != len(vectors)
+        method: create collection and insert vectors in it
+        expected: raise an exception
+        '''
+        ids = [i for i in range(1, default_nb)]
+        logging.getLogger().info(len(ids))
+        with pytest.raises(Exception) as e:
+            res_ids = connect.insert(id_collection, default_entities, ids)
+
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_ids_length_not_match_single(self, connect, collection):
+        '''
+        target: test insert vectors in collection, use customize ids, len(ids) != len(vectors)
+        method: create collection and insert vectors in it
+        expected: raise an exception
+        '''
+        ids = [i for i in range(1, default_nb)]
+        logging.getLogger().info(len(ids))
+        with pytest.raises(Exception) as e:
+            res_ids = connect.insert(collection, default_entity, ids)
+
+    @pytest.mark.skip(" todo support count entities")
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_ids_fields(self, connect, get_filter_field, get_vector_field):
+        '''
+        target: test create normal collection with different fields, insert entities into id without ids
+        method: create collection with diff fields: metric/field_type/..., insert, and count
+        expected: row count correct
+        '''
+        nb = 5
+        filter_field = get_filter_field
+        vector_field = get_vector_field
+        collection_name = gen_unique_str("test_collection")
+        fields = {
+            "fields": [filter_field, vector_field],
+            "segment_row_limit": default_segment_row_limit
+        }
+        connect.create_collection(collection_name, fields)
+        entities = gen_entities_by_fields(fields["fields"], nb, default_dim)
+        res_ids = connect.insert(collection_name, entities)
+        connect.flush([collection_name])
+        res_count = connect.count_entities(collection_name)
+        assert res_count == nb
 
     @pytest.mark.timeout(ADD_TIMEOUT)
     def test_insert_tag(self, connect, collection):
@@ -193,7 +319,7 @@ class TestInsertBase:
         expected: the collection row count equals to nq
         '''
         connect.create_partition(collection, default_tag)
-        ids = connect.insert(collection, default_entities_rows, partition_tag=default_tag)
+        ids = connect.insert(collection, default_entities, partition_tag=default_tag)
         assert len(ids) == default_nb
         assert connect.has_partition(collection, default_tag)
 
@@ -206,9 +332,25 @@ class TestInsertBase:
         '''
         connect.create_partition(id_collection, default_tag)
         ids = [i for i in range(default_nb)]
-        res_ids = connect.insert(id_collection, gen_entities_rows(default_nb, _id=False), partition_tag=default_tag)
+        res_ids = connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
         assert res_ids == ids
 
+
+    @pytest.mark.skip(" todo support count entities")
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_default_tag(self, connect, collection):
+        '''
+        target: test insert entities into default partition
+        method: create partition and insert info collection without tag params
+        expected: the collection row count equals to nb
+        '''
+        connect.create_partition(collection, default_tag)
+        ids = connect.insert(collection, default_entities)
+        connect.flush([collection])
+        assert len(ids) == default_nb
+        res_count = connect.count_entities(collection)
+        assert res_count == default_nb
+
     @pytest.mark.timeout(ADD_TIMEOUT)
     def test_insert_tag_not_existed(self, connect, collection):
         '''
@@ -218,9 +360,9 @@ class TestInsertBase:
         '''
         tag = gen_unique_str()
         with pytest.raises(Exception) as e:
-            ids = connect.insert(collection, default_entities_rows, partition_tag=tag)
+            ids = connect.insert(collection, default_entities, partition_tag=tag)
 
-    @pytest.mark.skip("todo support count entites")
+    @pytest.mark.skip(" not support count entities")
     @pytest.mark.timeout(ADD_TIMEOUT)
     def test_insert_tag_existed(self, connect, collection):
         '''
@@ -229,13 +371,22 @@ class TestInsertBase:
         expected: the collection row count equals to nq
         '''
         connect.create_partition(collection, default_tag)
-        ids = connect.insert(collection, default_entities_rows, partition_tag=default_tag)
-        ids = connect.insert(collection, default_entities_rows, partition_tag=default_tag)
+        ids = connect.insert(collection, default_entities, partition_tag=default_tag)
+        ids = connect.insert(collection, default_entities, partition_tag=default_tag)
         connect.flush([collection])
         res_count = connect.count_entities(collection)
         assert res_count == 2 * default_nb
 
     @pytest.mark.level(2)
+    def test_insert_without_connect(self, dis_connect, collection):
+        '''
+        target: test insert entities without connection
+        method: create collection and insert entities in it, check if inserted successfully
+        expected: raise exception
+        '''
+        with pytest.raises(Exception) as e:
+            ids = dis_connect.insert(collection, default_entities)
+
     def test_insert_collection_not_existed(self, connect):
         '''
         target: test insert entities in collection, which not existed before
@@ -243,9 +394,9 @@ class TestInsertBase:
         expected: error raised
         '''
         with pytest.raises(Exception) as e:
-            ids = connect.insert(gen_unique_str("not_exist_collection"), default_entities_rows)
+            ids = connect.insert(gen_unique_str("not_exist_collection"), default_entities)
 
-    @pytest.mark.skip("todo support row data check")
+    @pytest.mark.skip("to do add dim check ")
     def test_insert_dim_not_matched(self, connect, collection):
         '''
         target: test insert entities, the vector dimension is not equal to the collection dimension
@@ -253,12 +404,177 @@ class TestInsertBase:
         expected: error raised
         '''
         vectors = gen_vectors(default_nb, int(default_dim) // 2)
-        insert_entities = copy.deepcopy(default_entities_rows)
-        insert_entities[-1][default_float_vec_field_name] = vectors
+        insert_entities = copy.deepcopy(default_entities)
+        insert_entities[-1]["values"] = vectors
         with pytest.raises(Exception) as e:
             ids = connect.insert(collection, insert_entities)
 
 
+    def test_insert_with_field_name_not_match(self, connect, collection):
+        '''
+        target: test insert entities, with the entity field name updated
+        method: update entity field name
+        expected: error raised
+        '''
+        tmp_entity = update_field_name(copy.deepcopy(default_entity), "int64", "int64new")
+        with pytest.raises(Exception):
+            connect.insert(collection, tmp_entity)
+
+    @pytest.mark.skip(" todo support  type check")
+    def test_insert_with_field_type_not_match(self, connect, collection):
+        '''
+        target: test insert entities, with the entity field type updated
+        method: update entity field type
+        expected: error raised
+        '''
+        tmp_entity = update_field_type(copy.deepcopy(default_entity), "int64", DataType.FLOAT)
+        with pytest.raises(Exception):
+            connect.insert(collection, tmp_entity)
+
+    @pytest.mark.skip("to do add field_type check ")
+    @pytest.mark.level(2)
+    def test_insert_with_field_type_not_match_B(self, connect, collection):
+        '''
+        target: test insert entities, with the entity field type updated
+        method: update entity field type
+        expected: error raised
+        '''
+        tmp_entity = update_field_type(copy.deepcopy(default_entity), "int64", DataType.DOUBLE)
+        with pytest.raises(Exception):
+            connect.insert(collection, tmp_entity)
+
+    @pytest.mark.level(2)
+    def test_insert_with_field_value_not_match(self, connect, collection):
+        '''
+        target: test insert entities, with the entity field value updated
+        method: update entity field value
+        expected: error raised
+        '''
+        tmp_entity = update_field_value(copy.deepcopy(default_entity), DataType.FLOAT, 's')
+        with pytest.raises(Exception):
+            connect.insert(collection, tmp_entity)
+
+    def test_insert_with_field_more(self, connect, collection):
+        '''
+        target: test insert entities, with more fields than collection schema
+        method: add entity field
+        expected: error raised
+        '''
+        tmp_entity = add_field(copy.deepcopy(default_entity))
+        with pytest.raises(Exception):
+            connect.insert(collection, tmp_entity)
+
+    def test_insert_with_field_vector_more(self, connect, collection):
+        '''
+        target: test insert entities, with more fields than collection schema
+        method: add entity vector field
+        expected: error raised
+        '''
+        tmp_entity = add_vector_field(default_nb, default_dim)
+        with pytest.raises(Exception):
+            connect.insert(collection, tmp_entity)
+
+    def test_insert_with_field_less(self, connect, collection):
+        '''
+        target: test insert entities, with less fields than collection schema
+        method: remove entity field
+        expected: error raised
+        '''
+        tmp_entity = remove_field(copy.deepcopy(default_entity))
+        with pytest.raises(Exception):
+            connect.insert(collection, tmp_entity)
+
+    def test_insert_with_field_vector_less(self, connect, collection):
+        '''
+        target: test insert entities, with less fields than collection schema
+        method: remove entity vector field
+        expected: error raised
+        '''
+        tmp_entity = remove_vector_field(copy.deepcopy(default_entity))
+        with pytest.raises(Exception):
+            connect.insert(collection, tmp_entity)
+
+    def test_insert_with_no_field_vector_value(self, connect, collection):
+        '''
+        target: test insert entities, with no vector field value
+        method: remove entity vector field
+        expected: error raised
+        '''
+        tmp_entity = copy.deepcopy(default_entity)
+        del tmp_entity[-1]["values"]
+        with pytest.raises(Exception):
+            connect.insert(collection, tmp_entity)
+
+    def test_insert_with_no_field_vector_type(self, connect, collection):
+        '''
+        target: test insert entities, with no vector field type
+        method: remove entity vector field
+        expected: error raised
+        '''
+        tmp_entity = copy.deepcopy(default_entity)
+        del tmp_entity[-1]["type"]
+        with pytest.raises(Exception):
+            connect.insert(collection, tmp_entity)
+
+    def test_insert_with_no_field_vector_name(self, connect, collection):
+        '''
+        target: test insert entities, with no vector field name
+        method: remove entity vector field
+        expected: error raised
+        '''
+        tmp_entity = copy.deepcopy(default_entity)
+        del tmp_entity[-1]["name"]
+        with pytest.raises(Exception):
+            connect.insert(collection, tmp_entity)
+
+    @pytest.mark.skip("support count entities")
+    @pytest.mark.level(2)
+    @pytest.mark.timeout(30)
+    def test_collection_insert_rows_count_multi_threading(self, args, collection):
+        '''
+        target: test collection rows_count is correct or not with multi threading
+        method: create collection and insert entities in it(idmap),
+            assert the value returned by count_entities method is equal to length of entities
+        expected: the count is equal to the length of entities
+        '''
+        if args["handler"] == "HTTP":
+            pytest.skip("Skip test in http mode")
+        thread_num = 8
+        threads = []
+        milvus = get_milvus(host=args["ip"], port=args["port"], handler=args["handler"], try_connect=False)
+
+        def insert(thread_i):
+            logging.getLogger().info("In thread-%d" % thread_i)
+            milvus.insert(collection, default_entities)
+            milvus.flush([collection])
+
+        for i in range(thread_num):
+            t = MilvusTestThread(target=insert, args=(i,))
+            threads.append(t)
+            t.start()
+        for t in threads:
+            t.join()
+        res_count = milvus.count_entities(collection)
+        assert res_count == thread_num * default_nb
+
+    # TODO: unable to set config
+    @pytest.mark.skip("get entity by id")
+    @pytest.mark.level(2)
+    def _test_insert_disable_auto_flush(self, connect, collection):
+        '''
+        target: test insert entities, with disable autoflush
+        method: disable autoflush and insert, get entity
+        expected: the count is equal to 0
+        '''
+        delete_nums = 500
+        disable_flush(connect)
+        ids = connect.insert(collection, default_entities)
+        res = connect.get_entity_by_id(collection, ids[:delete_nums])
+        assert len(res) == delete_nums
+        assert res[0] is None
+
+
+
 class TestInsertBinary:
     @pytest.fixture(
         scope="function",
@@ -275,11 +591,24 @@ class TestInsertBinary:
         method: create collection and insert binary entities in it
         expected: the collection row count equals to nb
         '''
-        ids = connect.insert(binary_collection, default_binary_entities_rows)
+        ids = connect.insert(binary_collection, default_binary_entities)
         assert len(ids) == default_nb
         connect.flush()
         assert connect.count_entities(binary_collection) == default_nb
 
+    @pytest.mark.skip("count entities")
+    def test_insert_binary_entities_new(self, connect, binary_collection):
+        '''
+        target: test insert entities in binary collection
+        method: create collection and insert binary entities in it
+        expected: the collection row count equals to nb
+        '''
+        ids = connect.insert(binary_collection, default_binary_entities_new)
+        assert len(ids) == default_nb
+        connect.flush()
+        assert connect.count_entities(binary_collection) == default_nb
+
+    # @pytest.mark.skip
     def test_insert_binary_tag(self, connect, binary_collection):
         '''
         target: test insert entities and create partition tag
@@ -287,11 +616,10 @@ class TestInsertBinary:
         expected: the collection row count equals to nb
         '''
         connect.create_partition(binary_collection, default_tag)
-        ids = connect.insert(binary_collection, default_binary_entities_rows, partition_tag=default_tag)
+        ids = connect.insert(binary_collection, default_binary_entities, partition_tag=default_tag)
         assert len(ids) == default_nb
         assert connect.has_partition(binary_collection, default_tag)
 
-    # TODO
     @pytest.mark.skip("count entities")
     @pytest.mark.level(2)
     def test_insert_binary_multi_times(self, connect, binary_collection):
@@ -301,7 +629,7 @@ class TestInsertBinary:
         expected: the collection row count equals to nb
         '''
         for i in range(default_nb):
-            ids = connect.insert(binary_collection, default_binary_entity_row)
+            ids = connect.insert(binary_collection, default_binary_entity)
             assert len(ids) == 1
         connect.flush([binary_collection])
         assert connect.count_entities(binary_collection) == default_nb
@@ -313,13 +641,12 @@ class TestInsertBinary:
         expected: no error raised
         '''
         connect.create_index(binary_collection, binary_field_name, get_binary_index)
-        ids = connect.insert(binary_collection, default_binary_entities_rows)
+        ids = connect.insert(binary_collection, default_binary_entities)
         assert len(ids) == default_nb
         connect.flush([binary_collection])
-        info = connect.get_index_info(binary_collection, binary_field_name)
+        info = connect.describe_index(binary_collection, binary_field_name)
         assert info == get_binary_index
 
-
     @pytest.mark.skip("r0.3-test")
     @pytest.mark.timeout(ADD_TIMEOUT)
     def test_insert_binary_create_index(self, connect, binary_collection, get_binary_index):
@@ -328,13 +655,285 @@ class TestInsertBinary:
         method: insert vector and build index
         expected: no error raised
         '''
-        ids = connect.insert(binary_collection, default_binary_entities_rows)
+        ids = connect.insert(binary_collection, default_binary_entities)
         assert len(ids) == default_nb
         connect.flush([binary_collection])
         connect.create_index(binary_collection, binary_field_name, get_binary_index)
-        info = connect.get_index_info(binary_collection, binary_field_name)
+        info = connect.describe_index(binary_collection, binary_field_name)
         assert info == get_binary_index
 
+    @pytest.mark.skip("binary search")
+    def test_insert_binary_search(self, connect, binary_collection):
+        '''
+        target: test search vector after insert vector after a while
+        method: insert vector, sleep, and search collection
+        expected: no error raised
+        '''
+        ids = connect.insert(binary_collection, default_binary_entities)
+        connect.flush([binary_collection])
+        query, vecs = gen_query_vectors(binary_field_name, default_binary_entities, default_top_k, 1, metric_type="JACCARD")
+        res = connect.search(binary_collection, query)
+        logging.getLogger().debug(res)
+        assert res
+
+
+class TestInsertAsync:
+    @pytest.fixture(scope="function", autouse=True)
+    def skip_http_check(self, args):
+        if args["handler"] == "HTTP":
+            pytest.skip("skip in http mode")
+
+    @pytest.fixture(
+        scope="function",
+        params=[
+            1,
+            1000
+        ],
+    )
+    def insert_count(self, request):
+        yield request.param
+
+    def check_status(self, result):
+        logging.getLogger().info("In callback check status")
+        assert not result
+
+    def check_result(self, result):
+        logging.getLogger().info("In callback check status")
+        assert result
+
+    def test_insert_async(self, connect, collection, insert_count):
+        '''
+        target: test insert vectors with different length of vectors
+        method: set different vectors as insert method params
+        expected: length of ids is equal to the length of vectors
+        '''
+        nb = insert_count
+        future = connect.insert(collection, gen_entities(nb), _async=True)
+        ids = future.result()
+        connect.flush([collection])
+        assert len(ids) == nb
+
+    @pytest.mark.level(2)
+    def test_insert_async_false(self, connect, collection, insert_count):
+        '''
+        target: test insert vectors with different length of vectors
+        method: set different vectors as insert method params
+        expected: length of ids is equal to the length of vectors
+        '''
+        nb = insert_count
+        ids = connect.insert(collection, gen_entities(nb), _async=False)
+        # ids = future.result()
+        connect.flush([collection])
+        assert len(ids) == nb
+
+    def test_insert_async_callback(self, connect, collection, insert_count):
+        '''
+        target: test insert vectors with different length of vectors
+        method: set different vectors as insert method params
+        expected: length of ids is equal to the length of vectors
+        '''
+        nb = insert_count
+        future = connect.insert(collection, gen_entities(nb), _async=True, _callback=self.check_result)
+        future.done()
+        ids = future.result()
+        assert len(ids) == nb
+
+
+    @pytest.mark.skip("count entities")
+    @pytest.mark.level(2)
+    def test_insert_async_long(self, connect, collection):
+        '''
+        target: test insert vectors with different length of vectors
+        method: set different vectors as insert method params
+        expected: length of ids is equal to the length of vectors
+        '''
+        nb = 50000
+        future = connect.insert(collection, gen_entities(nb), _async=True, _callback=self.check_result)
+        result = future.result()
+        assert len(result) == nb
+        connect.flush([collection])
+        count = connect.count_entities(collection)
+        logging.getLogger().info(count)
+        assert count == nb
+
+    @pytest.mark.skip("count entities")
+    @pytest.mark.level(2)
+    def test_insert_async_callback_timeout(self, connect, collection):
+        '''
+        target: test insert vectors with different length of vectors
+        method: set different vectors as insert method params
+        expected: length of ids is equal to the length of vectors
+        '''
+        nb = 100000
+        future = connect.insert(collection, gen_entities(nb), _async=True, _callback=self.check_status, timeout=1)
+        with pytest.raises(Exception) as e:
+            result = future.result()
+        count = connect.count_entities(collection)
+        assert count == 0
+
+    def test_insert_async_invalid_params(self, connect):
+        '''
+        target: test insert vectors with different length of vectors
+        method: set different vectors as insert method params
+        expected: raise exception
+        '''
+        collection_new = gen_unique_str()
+        with pytest.raises(Exception) as e:
+            future = connect.insert(collection_new, default_entities, _async=True)
+            result = future.result()
+
+    def test_insert_async_invalid_params_raise_exception(self, connect, collection):
+        '''
+        target: test insert vectors with different length of vectors
+        method: set different vectors as insert method params
+        expected: raise exception
+        '''
+        entities = []
+        with pytest.raises(Exception) as e:
+            future = connect.insert(collection, entities, _async=True)
+            future.result()
+
+
+class TestInsertMultiCollections:
+    """
+    ******************************************************************
+      The following cases are used to test `insert` function
+    ******************************************************************
+    """
+
+    @pytest.fixture(
+        scope="function",
+        params=gen_simple_index()
+    )
+    def get_simple_index(self, request, connect):
+        logging.getLogger().info(request.param)
+        # if str(connect._cmd("mode")) == "CPU":
+        if request.param["index_type"] in index_cpu_not_support():
+            pytest.skip("sq8h not support in CPU mode")
+        return request.param
+
+    @pytest.mark.skip("count entities")
+    def test_insert_vector_multi_collections(self, connect):
+        '''
+        target: test insert entities
+        method: create 10 collections and insert entities into them in turn
+        expected: row count
+        '''
+        collection_num = 10
+        collection_list = []
+        for i in range(collection_num):
+            collection_name = gen_unique_str(uid)
+            collection_list.append(collection_name)
+            connect.create_collection(collection_name, default_fields)
+            ids = connect.insert(collection_name, default_entities)
+            connect.flush([collection_name])
+            assert len(ids) == default_nb
+            count = connect.count_entities(collection_name)
+            assert count == default_nb
+
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_drop_collection_insert_vector_another(self, connect, collection):
+        '''
+        target: test insert vector to collection_1 after collection_2 deleted
+        method: delete collection_2 and insert vector to collection_1
+        expected: row count equals the length of entities inserted
+        '''
+        collection_name = gen_unique_str(uid)
+        connect.create_collection(collection_name, default_fields)
+        connect.drop_collection(collection)
+        ids = connect.insert(collection_name, default_entity)
+        connect.flush([collection_name])
+        assert len(ids) == 1
+
+    @pytest.mark.skip("r0.3-test")
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_create_index_insert_vector_another(self, connect, collection, get_simple_index):
+        '''
+        target: test insert vector to collection_2 after build index for collection_1
+        method: build index and insert vector
+        expected: status ok
+        '''
+        collection_name = gen_unique_str(uid)
+        connect.create_collection(collection_name, default_fields)
+        connect.create_index(collection, field_name, get_simple_index)
+        ids = connect.insert(collection, default_entity)
+        connect.drop_collection(collection_name)
+
+    @pytest.mark.skip("count entities")
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_vector_create_index_another(self, connect, collection, get_simple_index):
+        '''
+        target: test insert vector to collection_2 after build index for collection_1
+        method: build index and insert vector
+        expected: status ok
+        '''
+        collection_name = gen_unique_str(uid)
+        connect.create_collection(collection_name, default_fields)
+        ids = connect.insert(collection, default_entity)
+        connect.create_index(collection, field_name, get_simple_index)
+        count = connect.count_entities(collection_name)
+        assert count == 0
+
+    @pytest.mark.skip("count entities")
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_vector_sleep_create_index_another(self, connect, collection, get_simple_index):
+        '''
+        target: test insert vector to collection_2 after build index for collection_1 for a while
+        method: build index and insert vector
+        expected: status ok
+        '''
+        collection_name = gen_unique_str(uid)
+        connect.create_collection(collection_name, default_fields)
+        ids = connect.insert(collection, default_entity)
+        connect.flush([collection])
+        connect.create_index(collection, field_name, get_simple_index)
+        count = connect.count_entities(collection)
+        assert count == 1
+
+    @pytest.mark.skip("count entities")
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_search_vector_insert_vector_another(self, connect, collection):
+        '''
+        target: test insert vector to collection_1 after search collection_2
+        method: search collection and insert vector
+        expected: status ok
+        '''
+        collection_name = gen_unique_str(uid)
+        connect.create_collection(collection_name, default_fields)
+        res = connect.search(collection, default_single_query)
+        logging.getLogger().debug(res)
+        ids = connect.insert(collection_name, default_entity)
+        connect.flush()
+        count = connect.count_entities(collection_name)
+        assert count == 1
+
+    @pytest.mark.skip("r0.3-test")
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_vector_search_vector_another(self, connect, collection):
+        '''
+        target: test insert vector to collection_1 after search collection_2
+        method: search collection and insert vector
+        expected: status ok
+        '''
+        collection_name = gen_unique_str(uid)
+        connect.create_collection(collection_name, default_fields)
+        ids = connect.insert(collection, default_entity)
+        result = connect.search(collection_name, default_single_query)
+
+    @pytest.mark.skip("r0.3-test")
+    @pytest.mark.timeout(ADD_TIMEOUT)
+    def test_insert_vector_sleep_search_vector_another(self, connect, collection):
+        '''
+        target: test insert vector to collection_1 after search collection_2 a while
+        method: search collection , sleep, and insert vector
+        expected: status ok
+        '''
+        collection_name = gen_unique_str(uid)
+        connect.create_collection(collection_name, default_fields)
+        ids = connect.insert(collection, default_entity)
+        connect.flush([collection])
+        result = connect.search(collection_name, default_single_query)
+
 
 class TestInsertInvalid(object):
     """
@@ -390,60 +989,160 @@ class TestInsertInvalid(object):
     def get_field_vectors_value(self, request):
         yield request.param
 
-    @pytest.mark.skip("todo support row data check")
-    def test_insert_field_name_not_match(self, connect, collection):
+    def test_insert_ids_invalid(self, connect, id_collection, get_entity_id):
         '''
-        target: test insert, with field name not matched
+        target: test insert, with using customize ids, which are not int64
         method: create collection and insert entities in it
         expected: raise an exception
         '''
-        tmp_entity = copy.deepcopy(default_entity_row)
-        tmp_entity[0]["string"] = "string"
+        entity_id = get_entity_id
+        ids = [entity_id for _ in range(default_nb)]
         with pytest.raises(Exception):
-            connect.insert(collection, default_entity_row)
+            connect.insert(id_collection, default_entities, ids)
 
     def test_insert_with_invalid_collection_name(self, connect, get_collection_name):
         collection_name = get_collection_name
         with pytest.raises(Exception):
-            connect.insert(collection_name, default_entity_row)
+            connect.insert(collection_name, default_entity)
 
     def test_insert_with_invalid_tag_name(self, connect, collection, get_tag_name):
         tag_name = get_tag_name
         connect.create_partition(collection, default_tag)
         if tag_name is not None:
             with pytest.raises(Exception):
-                connect.insert(collection, default_entity_row, partition_tag=tag_name)
+                connect.insert(collection, default_entity, partition_tag=tag_name)
         else:
-            connect.insert(collection, default_entity_row, partition_tag=tag_name)
+            connect.insert(collection, default_entity, partition_tag=tag_name)
 
-    @pytest.mark.skip("todo support row data check")
-    def test_insert_with_less_field(self, connect, collection):
-        tmp_entity = copy.deepcopy(default_entity_row)
-        tmp_entity[0].pop(default_float_vec_field_name)
+    def test_insert_with_invalid_field_name(self, connect, collection, get_field_name):
+        field_name = get_field_name
+        tmp_entity = update_field_name(copy.deepcopy(default_entity), "int64", get_field_name)
         with pytest.raises(Exception):
-            connect.insert(collection, tmp_entity) 
+            connect.insert(collection, tmp_entity)
 
-    def test_insert_with_less_field_id(self, connect, id_collection):
-        tmp_entity = copy.deepcopy(gen_entities_rows(default_nb, _id=False))
-        tmp_entity[0].pop("_id")
+    @pytest.mark.skip("laster add check of field type")
+    def test_insert_with_invalid_field_type(self, connect, collection, get_field_type):
+        field_type = get_field_type
+        tmp_entity = update_field_type(copy.deepcopy(default_entity), 'float', field_type)
         with pytest.raises(Exception):
-            connect.insert(id_collection, tmp_entity)
+            connect.insert(collection, tmp_entity)
 
-    def test_insert_with_more_field(self, connect, collection):
-        tmp_entity = copy.deepcopy(default_entity_row)
-        tmp_entity[0]["new_field"] = 1
+    @pytest.mark.skip("laster add check of field value")
+    def test_insert_with_invalid_field_value(self, connect, collection, get_field_int_value):
+        field_value = get_field_int_value
+        tmp_entity = update_field_type(copy.deepcopy(default_entity), 'int64', field_value)
         with pytest.raises(Exception):
-            connect.insert(collection, tmp_entity) 
+            connect.insert(collection, tmp_entity)
 
-    def test_insert_with_more_field_id(self, connect, collection):
-        tmp_entity = copy.deepcopy(default_entity_row)
-        tmp_entity[0]["_id"] = 1
+    def test_insert_with_invalid_field_vector_value(self, connect, collection, get_field_vectors_value):
+        tmp_entity = copy.deepcopy(default_entity)
+        src_vector = tmp_entity[-1]["values"]
+        src_vector[0][1] = get_field_vectors_value
         with pytest.raises(Exception):
             connect.insert(collection, tmp_entity)
 
+
+class TestInsertInvalidBinary(object):
+    """
+    Test inserting vectors with invalid collection names
+    """
+
+    @pytest.fixture(
+        scope="function",
+        params=gen_invalid_strs()
+    )
+    def get_collection_name(self, request):
+        yield request.param
+
+    @pytest.fixture(
+        scope="function",
+        params=gen_invalid_strs()
+    )
+    def get_tag_name(self, request):
+        yield request.param
+
+    @pytest.fixture(
+        scope="function",
+        params=gen_invalid_strs()
+    )
+    def get_field_name(self, request):
+        yield request.param
+
+    @pytest.fixture(
+        scope="function",
+        params=gen_invalid_strs()
+    )
+    def get_field_type(self, request):
+        yield request.param
+
+    @pytest.fixture(
+        scope="function",
+        params=gen_invalid_strs()
+    )
+    def get_field_int_value(self, request):
+        yield request.param
+
+    @pytest.fixture(
+        scope="function",
+        params=gen_invalid_ints()
+    )
+    def get_entity_id(self, request):
+        yield request.param
+
+    @pytest.fixture(
+        scope="function",
+        params=gen_invalid_vectors()
+    )
+    def get_field_vectors_value(self, request):
+        yield request.param
+
+    @pytest.mark.level(2)
+    def test_insert_with_invalid_field_name(self, connect, binary_collection, get_field_name):
+        tmp_entity = update_field_name(copy.deepcopy(default_binary_entity), "int64", get_field_name)
+        with pytest.raises(Exception):
+            connect.insert(binary_collection, tmp_entity)
+
     @pytest.mark.skip("todo support row data check")
-    def test_insert_with_invalid_field_vector_value(self, connect, collection, get_field_vectors_value):
-        tmp_entity = copy.deepcopy(default_entity_row)
-        tmp_entity[0][default_float_vec_field_name][1] = get_field_vectors_value
+    @pytest.mark.level(2)
+    def test_insert_with_invalid_field_value(self, connect, binary_collection, get_field_int_value):
+        tmp_entity = update_field_type(copy.deepcopy(default_binary_entity), 'int64', get_field_int_value)
         with pytest.raises(Exception):
-            connect.insert(collection, tmp_entity)
+            connect.insert(binary_collection, tmp_entity)
+
+    @pytest.mark.skip("todo support row data check")
+    @pytest.mark.level(2)
+    def test_insert_with_invalid_field_vector_value(self, connect, binary_collection, get_field_vectors_value):
+        tmp_entity = copy.deepcopy(default_binary_entity)
+        src_vector = tmp_entity[-1]["values"]
+        src_vector[0][1] = get_field_vectors_value
+        with pytest.raises(Exception):
+            connect.insert(binary_collection, tmp_entity)
+
+    @pytest.mark.level(2)
+    def test_insert_ids_invalid(self, connect, binary_id_collection, get_entity_id):
+        '''
+        target: test insert, with using customize ids, which are not int64
+        method: create collection and insert entities in it
+        expected: raise an exception
+        '''
+        entity_id = get_entity_id
+        ids = [entity_id for _ in range(default_nb)]
+        with pytest.raises(Exception):
+            connect.insert(binary_id_collection, default_binary_entities, ids)
+
+    @pytest.mark.skip("check filed")
+    @pytest.mark.level(2)
+    def test_insert_with_invalid_field_type(self, connect, binary_collection, get_field_type):
+        field_type = get_field_type
+        tmp_entity = update_field_type(copy.deepcopy(default_binary_entity), 'int64', field_type)
+        with pytest.raises(Exception):
+            connect.insert(binary_collection, tmp_entity)
+
+    @pytest.mark.skip("check field")
+    @pytest.mark.level(2)
+    def test_insert_with_invalid_field_vector_value(self, connect, binary_collection, get_field_vectors_value):
+        tmp_entity = copy.deepcopy(default_binary_entities)
+        src_vector = tmp_entity[-1]["values"]
+        src_vector[1] = get_field_vectors_value
+        with pytest.raises(Exception):
+            connect.insert(binary_collection, tmp_entity)
diff --git a/tests/python/test_partition.py b/tests/python/test_partition.py
index 5b21be5c476847c36ac2f3463fed963667f11f08..8a412dcdc2146006cd21d8f2ea4c81e827719235 100644
--- a/tests/python/test_partition.py
+++ b/tests/python/test_partition.py
@@ -101,7 +101,7 @@ class TestCreateBase:
         '''
         connect.create_partition(id_collection, default_tag)
         ids = [i for i in range(default_nb)]
-        insert_ids = connect.bulk_insert(id_collection, default_entities, ids)
+        insert_ids = connect.insert(id_collection, default_entities, ids)
         assert len(insert_ids) == len(ids)
  
     @pytest.mark.skip("not support custom id")
@@ -113,7 +113,7 @@ class TestCreateBase:
         '''
         connect.create_partition(id_collection, default_tag)
         ids = [i for i in range(default_nb)]
-        insert_ids = connect.bulk_insert(id_collection, default_entities, ids, partition_tag=default_tag)
+        insert_ids = connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
         assert len(insert_ids) == len(ids)
 
     def test_create_partition_insert_with_tag_not_existed(self, connect, collection):
@@ -126,7 +126,7 @@ class TestCreateBase:
         connect.create_partition(collection, default_tag)
         ids = [i for i in range(default_nb)]
         with pytest.raises(Exception) as e:
-            insert_ids = connect.bulk_insert(collection, default_entities, ids, partition_tag=tag_new)
+            insert_ids = connect.insert(collection, default_entities, ids, partition_tag=tag_new)
 
     @pytest.mark.skip("not support custom id")
     def test_create_partition_insert_same_tags(self, connect, id_collection):
@@ -137,9 +137,9 @@ class TestCreateBase:
         '''
         connect.create_partition(id_collection, default_tag)
         ids = [i for i in range(default_nb)]
-        insert_ids = connect.bulk_insert(id_collection, default_entities, ids, partition_tag=default_tag)
+        insert_ids = connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
         ids = [(i+default_nb) for i in range(default_nb)]
-        new_insert_ids = connect.bulk_insert(id_collection, default_entities, ids, partition_tag=default_tag)
+        new_insert_ids = connect.insert(id_collection, default_entities, ids, partition_tag=default_tag)
         connect.flush([id_collection])
         res = connect.count_entities(id_collection)
         assert res == default_nb * 2
@@ -156,8 +156,8 @@ class TestCreateBase:
         collection_new = gen_unique_str()
         connect.create_collection(collection_new, default_fields)
         connect.create_partition(collection_new, default_tag)
-        ids = connect.bulk_insert(collection, default_entities, partition_tag=default_tag)
-        ids = connect.bulk_insert(collection_new, default_entities, partition_tag=default_tag)
+        ids = connect.insert(collection, default_entities, partition_tag=default_tag)
+        ids = connect.insert(collection_new, default_entities, partition_tag=default_tag)
         connect.flush([collection, collection_new])
         res = connect.count_entities(collection)
         assert res == default_nb
diff --git a/tests/python/test_search.py b/tests/python/test_search.py
index 970fbb7d34829bf1b46135ac1f7716b9af53f758..605bd8507da33c1ce3cd67e13c06b6893b06c75e 100644
--- a/tests/python/test_search.py
+++ b/tests/python/test_search.py
@@ -36,14 +36,14 @@ def init_data(connect, collection, nb=1200, partition_tags=None, auto_id=True):
         insert_entities = gen_entities(nb, is_normal=True)
     if partition_tags is None:
         if auto_id:
-            ids = connect.bulk_insert(collection, insert_entities)
+            ids = connect.insert(collection, insert_entities)
         else:
-            ids = connect.bulk_insert(collection, insert_entities, ids=[i for i in range(nb)])
+            ids = connect.insert(collection, insert_entities, ids=[i for i in range(nb)])
     else:
         if auto_id:
-            ids = connect.bulk_insert(collection, insert_entities, partition_tag=partition_tags)
+            ids = connect.insert(collection, insert_entities, partition_tag=partition_tags)
         else:
-            ids = connect.bulk_insert(collection, insert_entities, ids=[i for i in range(nb)], partition_tag=partition_tags)
+            ids = connect.insert(collection, insert_entities, ids=[i for i in range(nb)], partition_tag=partition_tags)
     # connect.flush([collection])
     return insert_entities, ids
 
@@ -62,9 +62,9 @@ def init_binary_data(connect, collection, nb=1200, insert=True, partition_tags=N
         insert_raw_vectors, insert_entities = gen_binary_entities(nb)
     if insert is True:
         if partition_tags is None:
-            ids = connect.bulk_insert(collection, insert_entities)
+            ids = connect.insert(collection, insert_entities)
         else:
-            ids = connect.bulk_insert(collection, insert_entities, partition_tag=partition_tags)
+            ids = connect.insert(collection, insert_entities, partition_tag=partition_tags)
         connect.flush([collection])
     return insert_raw_vectors, insert_entities, ids
 
@@ -79,9 +79,9 @@ class TestSearchBase:
         params=gen_index()
     )
     def get_index(self, request, connect):
-        if str(connect._cmd("mode")) == "CPU":
-            if request.param["index_type"] in index_cpu_not_support():
-                pytest.skip("sq8h not support in CPU mode")
+        # if str(connect._cmd("mode")) == "CPU":
+        if request.param["index_type"] in index_cpu_not_support():
+            pytest.skip("sq8h not support in CPU mode")
         return request.param
 
     @pytest.fixture(
@@ -90,9 +90,9 @@ class TestSearchBase:
     )
     def get_simple_index(self, request, connect):
         import copy
-        if str(connect._cmd("mode")) == "CPU":
-            if request.param["index_type"] in index_cpu_not_support():
-                pytest.skip("sq8h not support in CPU mode")
+        # if str(connect._cmd("mode")) == "CPU":
+        if request.param["index_type"] in index_cpu_not_support():
+            pytest.skip("sq8h not support in CPU mode")
         return copy.deepcopy(request.param)
 
     @pytest.fixture(
@@ -1250,7 +1250,7 @@ class TestSearchDSL(object):
         collection_term = gen_unique_str("term")
         connect.create_collection(collection_term, term_fields)
         term_entities = add_field(entities, field_name="term")
-        ids = connect.bulk_insert(collection_term, term_entities)
+        ids = connect.insert(collection_term, term_entities)
         assert len(ids) == default_nb
         connect.flush([collection_term])
         count = connect.count_entities(collection_term) # count_entities is not impelmented
@@ -1695,9 +1695,9 @@ class TestSearchInvalid(object):
         params=gen_simple_index()
     )
     def get_simple_index(self, request, connect):
-        if str(connect._cmd("mode")) == "CPU":
-            if request.param["index_type"] in index_cpu_not_support():
-                pytest.skip("sq8h not support in CPU mode")
+        # if str(connect._cmd("mode")) == "CPU":
+        if request.param["index_type"] in index_cpu_not_support():
+            pytest.skip("sq8h not support in CPU mode")
         return request.param
 
     # PASS