python multiprocessing

With modern multi core processors, we can run more than 1 job at once.

import multiprocessing
from multicast_message_sender import multicast_message_sender
from time import sleep
import logging
import daiquiri

daiquiri.setup(level=logging.DEBUG)
logger = daiquiri.getLogger()

servers = 5
messages_to_send = 10
srv = []
srvprocess = []


def lcl_send(mcast_obj, format_str, count=10, delay=0.5):
    """

    :param interval:
    :param mcast_obj:
    :param format_str: Must look something like "Server {}"
    :param count:
    :return:
    """
    global logger

    for n in range(count):
        logger.info("Issuing Message")
        mcast_obj.send_msg(msg=format_str.format(n))
        sleep(delay)


logger.info("Starting")

for n in range(servers):
    logger.info("Group 224.1.1.{}".format(n))
    srv.append(multicast_message_sender(group='224.1.1.{}'.format(n), port=5007 + n))

count = 0
for n in range(servers):
    logger.info("Process server{}".format(count))
    Server_Process = multiprocessing.Process(name='server{}'.format(n),
                                             target=lcl_send,
                                             args=(srv[n], "Server {} Msg".format(n), messages_to_send,
                                                   2.0 * (1 + n / 10)))
    Server_Process.daemon = True
    srvprocess.append(Server_Process)

for n in range(servers):
    logger.info("Starting Server {}".format(count))
    srvprocess[n].start()

# Wait for the last process
logger.info("Waiting for last process to finish")
srvprocess[-1].join()
logger.info("All Done")

Note the multiprocessing.Process - supply the start function, and the arguments.