python爬蟲爬取鏈家二手房信息


#coding=utf-8  
import requests
from fake_useragent import UserAgent
from bs4 import BeautifulSoup
import json
import csv
import time


# 構建請求頭
userAgent = UserAgent()
headers = {
    'user-agent': userAgent .Chrome
}

# 聲明一個列表存儲字典
data_list = []


def start_spider(page):
    #設置重連次數
    requests.adapters.DEFAULT_RETRIES = 15
    s = requests.session()
    #設置連接活躍狀態為False
    s.keep_alive = False
    #爬取的url,默認爬取的南京的鏈家房產信息
    url = 'https://nj.lianjia.com/ershoufang/pg{}/'.format(page)
    # 請求url
    resp = requests.get(url, headers=headers,timeout=10)
    # 講返回體轉換成Beautiful
    soup = BeautifulSoup(resp.content, 'lxml')
    # 篩選全部的li標簽
    sellListContent = soup.select('.sellListContent li.LOGCLICKDATA')
    # 循環遍歷
    for sell in sellListContent:
        try:
            # 標題
            title = sell.select('div.title a')[0].string
            # 先抓取全部的div信息,再針對每一條進行提取
            houseInfo = list(sell.select('div.houseInfo')[0].stripped_strings)
            # 樓盤名字
            loupan = houseInfo[0]
            #對樓盤的信息進行分割
            info = houseInfo[0].split('|')
            # 房子類型
            house_type = info[1].strip()
            # 面積大小
            area = info[2].strip()
            # 房間朝向
            toward = info[3].strip()
            # 裝修類型
            renovation = info[4].strip()
            # 房屋地址
            positionInfo = ''.join(list(sell.select('div.positionInfo')[0].stripped_strings))
            # 房屋總價
            totalPrice = ''.join(list(sell.select('div.totalPrice')[0].stripped_strings))
            # 房屋單價
            unitPrice = list(sell.select('div.unitPrice')[0].stripped_strings)[0]

            # 聲明一個字典存儲數據
            data_dict = {}
            data_dict['title'] = title
            data_dict['loupan'] = loupan
            data_dict['house_type'] = house_type
            data_dict['area'] = area
            data_dict['toward'] = toward
            data_dict['renovation'] = renovation
            data_dict['positionInfo'] = positionInfo
            data_dict['totalPrice'] = totalPrice
            data_dict['unitPrice'] = unitPrice

            data_list.append(data_dict)
        except Exception as e:
            print(e)
            continue


def main():
    # 只爬取10頁
    for page in range(1, 10):
        start_spider(page)
        time.sleep(3)

    # 將數據寫入json文件
    with open('data_json.json', 'a+', encoding='utf-8') as f:
        json.dump(data_list, f, ensure_ascii=False, indent=4)
    print('json文件寫入完成')

    # 將數據寫入csv文件
    with open('./data_csv.csv', 'w', encoding='utf-8', newline='') as f:
        # 表頭
        print(data_list)
        title = data_list[0].keys()
        # 創建writer對象
        writer = csv.DictWriter(f, title)
        # 寫入表頭
        writer.writeheader()
        # 批量寫入數據
        writer.writerows(data_list)
    print('csv文件寫入完成')


if __name__ == '__main__':
    main()
  

  


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM