How to Build Progress Monitoring Using Advanced tqdm for Async, Parallel, Pandas, Logging, and High-Performance Workflows

Dataemia
1 Min Read


print("5) Concurrency progress: thread_map / process_map")
def cpuish(n: int) -> int:
   x = 0
   for i in range(50_000):
       x = (x + (n * i)) % 1_000_003
   return x


nums = list(range(80))
thread_results = thread_map(cpuish, nums, max_workers=8, desc="thread_map")
print("thread_map done:", len(thread_results))


proc_results = process_map(cpuish, nums[:20], max_workers=2, chunksize=2, desc="process_map")
print("process_map done:", len(proc_results))
print()


print("6) logging_redirect_tqdm (logs won’t break bars)")
logger = logging.getLogger("demo")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
logger.handlers = [handler]


with logging_redirect_tqdm():
   for k in tqdm(range(60), desc="Work with logs"):
       time.sleep(0.01)
       if k in (5, 25, 45):
           logger.info(f"checkpoint k={k}")
print()


print("7) asyncio progress (as_completed) — Colab/Jupyter-safe")
async def io_task(i: int):
   await asyncio.sleep(random.uniform(0.02, 0.12))
   return i, random.random()


async def run_async():
   tasks = [asyncio.create_task(io_task(i)) for i in range(80)]
   results = []
   for fut in tqdm(asyncio.as_completed(tasks), total=len(tasks), desc="async tasks"):
       results.append(await fut)
   return results


results = await run_async()
print("async done:", len(results), "results")



Source link

Share This Article
Leave a Comment

Leave a Reply

Your email address will not be published. Required fields are marked *

error: Content is protected !!