Updating a tk ProgressBar from a multiprocess.proccess in python3
Doing something similar, I ended up having to use a combination of threads and processes - the GUI front end had two threads: one for tkinter, and one reading from a multiprocessing.Queue and calling gui.update() - then the back-end processes would write updates into that Queue
This might be a strange approach, but it works for me. Copy and paste this code to a file and run it to see the result. It's ready to run.
I don't have the patience to explain my code right now, I might edit it another day.
Oh, and this is in Python 2.7 I started programming two months ago, so I have not idea if the difference is relevant.
# -*- coding: utf-8 -*-# threadsandprocesses.py# Importing modulesimport timeimport threadingimport multiprocessingimport Tkinter as tkiimport ttkclass Master(object): def __init__(self): self.mainw = tki.Tk() self.mainw.protocol("WM_DELETE_WINDOW", self.myclose) self.mainw.title("Progressbar") self.mainw.geometry('300x100+300+300') self.main = tki.Frame(self.mainw) self.RunButton = ttk.Button(self.main, text='Run', command=self.dostuff) self.EntryBox = ttk.Entry(self.main) self.EntryBox.insert(0, "Enter a number") self.progress = ttk.Progressbar(self.main, mode='determinate', value=0) self.main.pack(fill=tki.BOTH, expand=tki.YES) self.progress.pack(expand=tki.YES) self.EntryBox.pack(expand=tki.YES) self.RunButton.pack() print "The Master was created" def dostuff(self): print "The Master does no work himself" data = range(int(self.EntryBox.get())) S = Slave(self, data) print "The Master created a Slave to do his stuff" print "The Slave gets told to start his work" S.start() def myclose(self): self.mainw.destroy() return def nextstep(self): print "Good job, Slave, I see the result is" print Master.results.get()class Slave(threading.Thread): def __init__(self, guest, data): print "This is the Slave." print "Nowdays, Work is outsourced!" self.data = data self.guest = guest threading.Thread.__init__(self) def run(self): print "The Slave is outsourcing his work to Calcualte inc." time.sleep(1) Outsourcing = Calculate() Results = Outsourcing.run(self.guest, self.data) return Results# unwrapping outside a classdef calc(arg, **kwarg): return Calculate.calculate(*arg, **kwarg)class Calculate(object): def run(self, guest, data): print"This is Calculate inc. ... how can I help you?" time.sleep(1) maximum = int(guest.EntryBox.get()) guest.progress.configure(maximum=maximum, value=0) manager = multiprocessing.Manager() queue = manager.Queue() lock = manager.Lock() print "Things are setup and good to go" # Counting the number of available CPUs in System pool_size = multiprocessing.cpu_count() print "Your system has %d CPUs" % (pool_size) # Creating a pool of processes with the maximal number of CPUs possible pool = multiprocessing.Pool(processes=pool_size) Master.results = pool.map_async(calc, (zip([self]*len(data), [lock]*len(data), [queue]*len(data), data))) for job in range(1, maximum+1): queue.get() # this is an abuse I think, but works for me guest.progress.configure(value=job) # Properly close and end all processes, once we're done pool.close() pool.join() print "All done" guest.nextstep() return def calculate(self, lock, queue, indata): lock.acquire() print 'Reading values and starting work' lock.release() time.sleep(3) # some work results = indata # The works results lock.acquire() print 'Done' lock.release() queue.put("Finished!") return resultsif __name__ == '__main__': TheMaster = Master() TheMaster.mainw.mainloop()