900字范文,内容丰富有趣,生活中的好帮手!
900字范文 > python多线程url采集器 + github_python实现多线程采集的2个代码例子

python多线程url采集器 + github_python实现多线程采集的2个代码例子

时间:2022-06-12 02:55:08

相关推荐

python多线程url采集器 + github_python实现多线程采集的2个代码例子

代码一:

#!/usr/bin/python

# -*- coding: utf-8 -*-

#encoding=utf-8

import threading

import Queue

import sys

import urllib2

import re

import MySQLdb

#

# 数据库变量设置

#

DB_HOST = '127.0.0.1'

DB_USER = "XXXX"

DB_PASSWD = "XXXXXXXX"

DB_NAME = "xxxx"

#

# 变量设置

#

THREAD_LIMIT = 3

jobs = Queue.Queue(5)

singlelock = threading.Lock()

info = Queue.Queue()

def workerbee(inputlist):

for x in xrange(THREAD_LIMIT):

print 'Thead {0} started.'.format(x)

t = spider()

t.start()

for i in inputlist:

try:

jobs.put(i, block=True, timeout=5)

except:

singlelock.acquire()

print "The queue is full !"

singlelock.release()

# Wait for the threads to finish

singlelock.acquire() # Acquire the lock so we can print

print "Waiting for threads to finish."

singlelock.release() # Release the lock

jobs.join() # This command waits for all threads to finish.

# while not jobs.empty():

# print jobs.get()

def getTitle(url,time=10):

response = urllib2.urlopen(url,timeout=time)

html = response.read()

response.close()

reg = r'

(.*?)'

title = pile(reg).findall(html)

# title = title[0].decode('gb2312','replace').encode('utf-8')

title = title[0]

return title

class spider(threading.Thread):

def run(self):

while 1:

try:

job = jobs.get(True,1)

singlelock.acquire()

title = getTitle(job[1])

info.put([job[0],title], block=True, timeout=5)

# print 'This {0} is {1}'.format(job[1],title)

singlelock.release()

jobs.task_done()

except:

break;

if __name__ == '__main__':

con = None

urls = []

try:

con = MySQLdb.connect(DB_HOST,DB_USER,DB_PASSWD,DB_NAME)

cur = con.cursor()

cur.execute('SELECT id,url FROM `table_name` WHERE `status`=0 LIMIT 10')

rows = cur.fetchall()

for row in rows:

# print row

urls.append([row[0],row[1]])

workerbee(urls)

while not info.empty():

print info.get()

finally:

if con:

con.close()

代码二:

#!/usr/bin/python

# -*- coding: utf-8 -*-

#encoding=utf-8

#Filename:robot.py

import threading,Queue,sys,urllib2,re

#

# 变量设置

#

THREAD_LIMIT = 3 #设置线程数

jobs = Queue.Queue(5) #设置队列长度

singlelock = threading.Lock() #设置一个线程锁,避免重复调用

urls = ['/w/n/-04-28/1634703505.shtml','/w/n/-04-28/1246703487.shtml','/w/n/-04-28/1028703471.shtml','/w/n/-04-27/1015703426.shtml','/w/n/-04-26/1554703373.shtml','/w/n/-04-26/1512703346.shtml','/w/n/-04-26/1453703334.shtml','/w/n/-04-26/1451703333.shtml','/w/n/-04-26/1445703329.shtml','/w/n/-04-26/1434703322.shtml','/w/n/-04-26/1433703321.shtml','/w/n/-04-26/1433703320.shtml','/w/n/-04-26/1429703318.shtml','/w/n/-04-26/1429703317.shtml','/w/n/-04-26/1409703297.shtml','/w/n/-04-26/1406703296.shtml','/w/n/-04-26/1402703292.shtml','/w/n/-04-26/1353703286.shtml','/w/n/-04-26/1348703284.shtml','/w/n/-04-26/1327703275.shtml','/w/n/-04-26/1239703265.shtml','/w/n/-04-26/1238703264.shtml','/w/n/-04-26/1231703262.shtml','/w/n/-04-26/1229703261.shtml','/w/n/-04-26/1228703260.shtml','/w/n/-04-26/1223703259.shtml','/w/n/-04-26/1218703258.shtml','/w/n/-04-26/1202703254.shtml','/w/n/-04-26/1159703251.shtml','/w/n/-04-26/1139703233.shtml']

def workerbee(inputlist):

for x in xrange(THREAD_LIMIT):

print 'Thead {0} started.'.format(x)

t = spider()

t.start()

for i in inputlist:

try:

jobs.put(i, block=True, timeout=5)

except:

singlelock.acquire()

print "The queue is full !"

singlelock.release()

# Wait for the threads to finish

singlelock.acquire() # Acquire the lock so we can print

print "Waiting for threads to finish."

singlelock.release() # Release the lock

jobs.join() # This command waits for all threads to finish.

# while not jobs.empty():

# print jobs.get()

def getTitle(url,time=10):

response = urllib2.urlopen(url,timeout=time)

html = response.read()

response.close()

reg = r'

(.*?)'

title = pile(reg).findall(html)

title = title[0].decode('gb2312','replace').encode('utf-8')

return title

class spider(threading.Thread):

def run(self):

while 1:

try:

job = jobs.get(True,1)

singlelock.acquire()

title = getTitle(job)

print 'This {0} is {1}'.format(job,title)

singlelock.release()

jobs.task_done()

except:

break;

if __name__ == '__main__':

workerbee(urls)

本内容不代表本网观点和政治立场,如有侵犯你的权益请联系我们处理。
网友评论
网友评论仅供其表达个人看法,并不表明网站立场。