python列表按照批次分配數據


# 場景:一次取回mongodb某表-64萬數據,pymongo.errors.DocumentTooLarge: BSON document too large (28888095 bytes) - 
# the connected server supports BSON document sizes up to 16777216 bytes. 文檔太大就無法返回,因此分10次取,每次
# 取回6400條,然后統一更新到本地的字典中。

import uuid

data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 3, 5, 6, 7, 8, 9, 6, 5, 4, 3, 8, 9]
batch_step = round(len(data)/10)

for index in range(0, len(data), batch_step):
    item_list = data[index:index+batch_step]
    
# example
from pymongo import MongoClient
mdb = MongoClient('120.133.26.xxx:20002', username='xt', password='xxxxxx')
image_ids = ["001", "002", "003", ...]

image_dict = {}
batch_step = round(len(image_ids)/10)
for idx in range(0, len(image_ids), batch_step):
    image_ids_part = image_ids[idx:idx + batch_step]
    image_infos = mdb['數據庫名']['圖片表名'].find({"image_id": {"$in": image_ids_part}})

    image_one = {}
    for image_info in image_infos:
        if image_info.get("image_size"):
            image_one[image_info.get("image_id")] = image_info
            image_dict.update(image_one)

  


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM