The process of doing python streaming processing in the backend

I tried to make some text report file from some data source, which takes a lot of time, and to simulate this I wrote the following code

I planned to do this using a thread, and the thought t.daemon = Truewould be to solve the goal, but the program will not exit until the operation is complete

import random
import threading
import time
import logging

logging.basicConfig(level=logging.DEBUG,
                    format='(%(threadName)-10s) %(message)s',
                    )


def worker():
    """thread worker function"""
    t = threading.currentThread()
    tag = random.randint(1, 64)
    file_name = "/tmp/t-%d.txt" % (tag)
    logging.debug('started writing file - %s', file_name)
    f = open(file_name, 'w')
    for x in xrange(2 ** tag):  # total no of lines is 2**tag
        f.write("%d\n" % x)
    logging.debug('ending')
    f.close()
    return

# to simulate 5 files
for i in range(5):
    t = threading.Thread(target=worker)
    t.daemon = True
    t.start()

main_thread = threading.currentThread()
for t in threading.enumerate():
    if t is main_thread:
        continue
    logging.debug('joining %s', t.getName())
    t.join()

When I deleted t.join(), then some data recorded before the program exited, and the program quits quickly, but adding t.join()allows the program to work to the end. Is there a way to exit the program, but the process must still be executed to complete the task in the backend.

+4
source share
1 answer

. , , , , , . .

: max 28, (, , ). , , ! .. "kill 13345", " 13345" ( 2 ** 25)

: , .

:

import logging 
import random
import multiprocessing
import time
import sys


#Make sure you don't write to stdout after this program stopped running and sub-processes are logging!
logging.basicConfig(level=logging.DEBUG,
                    format='(%(threadName)-10s) %(message)s',
                    )

def detach():
    p = multiprocessing.current_process()
    name = "worker" + str(p.pid)
    cc = multiprocessing.Process(name=name, target=worker)
    cc.daemon = False
    cc.start()
    logging.debug('Detached process: %s %s', p.name, p.pid)
    sys.stdout.flush()

def worker():
    """thread worker function"""
    #Should probably make sure there isn't already a thread processing this file already...
    tag = random.randint(5, 33) #Stop at 33 to make sure we don't take over the harddrive (8GB)
    file_name = "/tmp/t-%d.txt" % (tag)
    if tag > 26:
      logging.warning('\n\nThe detached process resulting from this may need to be killed by hand.\n')
    logging.debug('started writing file - %s', file_name)
    #Changed your code to use "with", available in any recent python version
    with open(file_name, 'w') as f:
        for x in xrange(2 ** tag):  # total no of lines is 2**tag
            f.write("%d\n" % x)
    return
#Stackoverflow: Keep scrolling to see more code!

# to simulate 5 files
for i in range(5):
    t = multiprocessing.Process(target=detach)
    t.daemon = False
    t.start()
    time.sleep(0.5)
    t.terminate()
    logging.debug("Terminating main program")
0

All Articles