for id_url in range(1,page_num+1):
print "Start to get url:"+str(id_url)
url=pre+str(id_url)
content = urllib2.urlopen(url).read()
obj=json.loads(content)
datapart = obj["data"]
table = BeautifulSoup(datapart, parse_only=getonly)
for id,row in enumerate(table("tr")):
if((id_url!=1 and id==0)or(id==1)):continue
text = ''.join(row.findAll(text=True))
data = text.strip()
data_split = data.split('\n')
data_all.append(data_split)
cvsfile=open('D:\\eggs.csv', 'wb')
recordwriter=csv.writer(cvsfile,delimiter=',')
for line in data_all:
temp_list=[]
for field in line:
temp_list.append(field.encode('gbk'))
recordwriter.writerow(temp_list)