import base64
import time
import traceback
import urllib.request
import gevent
from gevent import monkey
import sys
monkey.patch_all()
import schedule
from gevent import pool
from pymongo import MongoClient
from datetime import datetime, timedelta
import logging
import json
import requests
import os
import uuid
from PIL import Image
import io
from loguru import logger
 
 
client = MongoClient('mongo: / / username: [email protected]:27017 /'Database client_db = client['data_base'] # table - Collection temp_data_col = client_db['temp_data'Mongo_condition_dict = {] def work_func(): mongo_condition_dict = {"create_time": {"$gte": "The 2021-04-15 T00:37:46. 110000"}} # filter_colum_dict = {"_id": 0."username": 1."password": 1
    }
 
    detail_list =temp_data_col.find(mongo_condition_dict, filter_colum_dict).batch_size(2000)
 
    logger.info('Start search Count Is {}'.format(detail_list.count()))
    temp_data_set = set()
    for detail_info indetail_list:
        logger.info(f"detail_info = {detail_info}")
        username, password = detail_info["username"], detail_info["password"]
        temp_data_set.add(username + The '-' + password)
 
    logger.info(f= {temp_data_set}")
    logger.info(f= {len(temp_data_set)}") # Write file by line, how to optimize? Open once, write line by line - to be optimizedfor data in temp_data_set:
        with open('./temp.txt'.'a') as f:
            f.write(data + '\n')
 
 
if __name__ == '__main__':
 
   work_func()
Copy the code