由于UnpickleableError而无法Pool.map()函数吗? [英] Cannot Pool.map() function because of UnpickleableError?

查看:79
本文介绍了由于UnpickleableError而无法Pool.map()函数吗?的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

因此,我正在尝试对F进行多重处理.使用tkinter可以通过按下按钮来访问它.

So I am trying to multi process function F. Which is accessed by a button press with tkinter.

def f(x):
    global doom,results,info
    doom = doom + 1
    if check(x) == True:
        results.add(x)
        info.append(get_column_number(x))
    j.step(1)
    texx = "1/"+doom
    s.configure(text=texx)
    root.update()

该函数在如下函数中调用:

The function is called within a function like so:

def dojob():
    index = ['URLS'...]
    pool = Pool(processes=4)
    s.configure(text="Shifting Workload to cores..")
    root.update()     
    pool.map(f, index)

该按钮位于根窗口内.

我收到以下错误:

Exception in thread Thread-2:
Traceback (most recent call last):
  File "C:\Python27\lib\threading.py", line 808, in __bootstrap_inner
    self.run()
  File "C:\Python27\lib\threading.py", line 761, in run
    self.__target(*self.__args, **self.__kwargs)
  File "C:\Python27\lib\multiprocessing\pool.py", line 342, in _handle_tasks
    put(task)
UnpickleableError: Cannot pickle <type 'tkapp'> objects

我什至不知道泡菜是干什么的?帮助吗?

I do not even know what a pickle does? Help?

这是完整的代码:

from Tkinter import *
from ttk import *
from tkMessageBox import showinfo
from multiprocessing import Pool
import random
emails = set()
import urllib2
import urllib2 as urllib
########

CONSTANT_PAGECOUNT = 20

######
def f(x):
    global doom,emails,info
    doom = doom + 1
    if check(x) == True:
        print "",
        emails.add(x)
        info.append(get_column_number(x))
    j.step(1)
    texx = "Sk1nn1n "+str(doom)+'/'+str(CONSTANT_PAGECOUNT)+""
    s.configure(text=texx)
    root.update()
    return 0
def f(x):
    print ""
def showFile(site,info):
    top = Toplevel()
    top.title('Sites')
    x = Text(top)
    x.pack()
    i=0
    for site_url in site:

        x.insert(END,site_url)
        i=i+1

def get_column_number(url):
    return True

def check(url):
    return True

def getgoogleurl(search,siteurl=False,startr=0):
    if siteurl==False:
        return 'http://www.google.com/search?q='+urllib2.quote(search)+'&start='+str(startr)+'&oq='+urllib2.quote(search)
    else:
        return 'http://www.google.com/search?q=site:'+urllib2.quote(siteurl)+'%20'+urllib2.quote(search)+'&oq=site:'+urllib2.quote(siteurl)+'%20'+urllib2.quote(search)

def getgooglelinks(search,siteurl=False,startr=0):
   #google returns 403 without user agent
   headers = {'User-agent':'Mozilla/11.0'}
   req = urllib2.Request(getgoogleurl(search,siteurl,startr),None,headers)
   site = urllib2.urlopen(req)
   data = site.read()
   site.close()

   #no beatifulsoup because google html is generated with javascript
   start = data.find('<div id="res">')
   end = data.find('<div id="foot">')
   if data[start:end]=='':
      #error, no links to find
      return False
   else:
      links =[]
      data = data[start:end]
      start = 0
      end = 0        
      while start>-1 and end>-1:
          #get only results of the provided site
          if siteurl==False:
            start = data.find('<a href="/url?q=')
          else:
            start = data.find('<a href="/url?q='+str(siteurl))
          data = data[start+len('<a href="/url?q='):]
          end = data.find('&amp;sa=U&amp;ei=')
          if start>-1 and end>-1: 
              link =  urllib2.unquote(data[0:end])
              data = data[end:len(data)]
              if link.find('http')==0:
                  links.append(link)
      return links

def rip(results=15,accuracy=16):
    global e
    keyword = ''+str(e.get())
    if keyword.strip()=="":
        s.configure(text="Please enter a keyword")
        root.update()
        return 0
    linklist = []
    counter = 0
    doom = 0
    while counter < results:
        links = getgooglelinks(keyword,startr=counter)
        for link in links:
            if len(linklist) > CONSTANT_PAGECOUNT:
                s.configure(text="Proccessing..")
                root.update()
                return linklist
            else:
                doom = doom + 1
                linklist.append(link)
                texx = str(doom)+"/"+str(CONSTANT_PAGECOUNT)
                s.configure(text=texx)
                root.update()
        root.update()
        counter = counter+accuracy
    return linklist
def flip():
    global e
    emails = set()
    info = []
    keyword = ''+str(e.get())
    if keyword.strip()=="":
        s.configure(text="Please enter a keyword")
        root.update()
        return 0
    s.configure(text="Generating index..")
    root.update()
    doom = -1
    index = rip(CONSTANT_PAGECOUNT,10)
    if 1:
        try:
            pool = Pool(processes=4)
            #s.configure(text="Shifting Workload to cores..")
            #root.update()     
            pool.map(f, index)
            pool.close()
        except:
            print "The errors there.."
    j.config(value=CONSTANT_PAGECOUNT)
    if len(emails) > 0:
        filepath='relavant_list_'+str(random.randint(1,9999))+'.emList.txt'
        #print len(emails),
        #print "emails found."
        ggg = open(filepath,'a+')
        for x in emails:
            ggg.write(x+"\n")
        showinfo(
            str(len(emails))+" key word related sites found!",
            " sites are saved in "+str(filepath)
        )
        showFile(emails,info)
        s.configure(text=filepath)
    else:
        s.configure(text='No related sites found : (')
if __name__ == '__main__':
    ### CONSTANTS
    version = '1.0'
    ### END CONSTANTS
    root = Tk()
    root.title('Program v'+version)
    s = Style()
    s.theme_use('default')
    #print s.theme_names()
    s.configure("black.Horizontal.TProgressbar", foreground='blue', background='blue')
    j = Progressbar(root, style="black.Horizontal.TProgressbar", orient="vertical", length=200, mode="determinate", maximum=CONSTANT_PAGECOUNT, value=0)
    j.pack(side='right',fill='y')
    f = Frame(root)
    x = Frame(f)
    e = Entry(x,width=51)
    s = Label(x,width=50,anchor='center',text='Waiting for task..')
    Button(f,text='Generate List!',width=50,command=flip).pack(fill='both',expand=True)
    s.pack(side='bottom',fill='y',expand=True)
    e.pack(side='top',fill='both',expand=True)
    x.pack(side='top',fill='y',expand=True)
    f.pack(side='left',expand=True,fill="both")
    root.mainloop()

推荐答案

您正在泄漏tkinter对象.最有可能的原因是您尝试使用f()

You are leaking a tkinter object. Most likely because you are trying to update the interface from another process with the last line of f()

基于代码更新

函数f()__main__中的变量f之间会发生名称冲突,该变量将分配给主窗口并导致tkapp pickle错误.将函数重命名为def myfunc()或其他名称.还需要在pool.close()之后调用pool.join()

You have a name collision between your function f() and a variable f in your __main__ which gets assigned to your main window and causes the tkapp pickle error. Rename the function to def myfunc() or something. Also need to call pool.join() after pool.close()

这篇关于由于UnpickleableError而无法Pool.map()函数吗?的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆