Skip to content

Commit

Permalink
Migrate 591 list spider from original code, #31
Browse files Browse the repository at this point in the history
  • Loading branch information
ddio committed Jun 10, 2019
1 parent f3b9ff8 commit 8dd4605
Show file tree
Hide file tree
Showing 12 changed files with 587 additions and 0 deletions.
21 changes: 21 additions & 0 deletions scrapy-package/LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
MIT License

Copyright (c) 2018 ddio

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
18 changes: 18 additions & 0 deletions scrapy-package/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# TW Rental House Utility for Scrapy

This package is built for crawling Taiwanese rental house related website using [Scrapy](https://scrapy.org/).
As behaviour of crawlers may differ from their goal, scale, and pipeline, this package provides only minimun feature set, which allow developer to list and decode a rental house webpage into structured data, without knowning too much about detail HTML and API structure of each website. In addition, this package is also designed for extensibility, which allow developers to insert customized callback, manipulate data, and integrate with existing crawler structure.

## Installation

## Basic Usage

## Handlers

## Contribution Guideline

Each spider class must:

1. Provide unit test
2. Runnable using scrapy command
3. Aviod unnecessary 3rd party dependency.
25 changes: 25 additions & 0 deletions scrapy-package/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
asn1crypto==0.24.0
attrs==19.1.0
Automat==0.7.0
cffi==1.12.3
constantly==15.1.0
cryptography==2.7
cssselect==1.0.3
hyperlink==19.0.0
idna==2.8
incremental==17.5.0
lxml==4.3.3
parsel==1.5.1
pyasn1==0.4.5
pyasn1-modules==0.2.5
pycparser==2.19
PyDispatcher==2.0.5
PyHamcrest==1.9.0
pyOpenSSL==19.0.0
queuelib==1.5.0
Scrapy==1.6.0
service-identity==18.1.0
six==1.12.0
Twisted==19.2.1
w3lib==1.20.0
zope.interface==4.6.0
1 change: 1 addition & 0 deletions scrapy-package/scrapy-tw-rental-house/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
__pycache__
1 change: 1 addition & 0 deletions scrapy-package/scrapy-tw-rental-house/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
name = "scrapy-tw-rental-house"
74 changes: 74 additions & 0 deletions scrapy-package/scrapy-tw-rental-house/items.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# -*- coding: utf-8 -*-

# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html

from scrapy import Field, Item


class GenericHouseItem(Item):
top_region = Field()
sub_region = Field()
deal_time = Field()
deal_status = Field()
n_day_deal = Field()
vendor = Field()
vendor_house_id = Field()
vendor_house_url = Field()
# price related
monthly_price = Field()
deposit_type = Field()
n_month_deposit = Field()
deposit = Field()
is_require_management_fee = Field()
monthly_management_fee = Field()
has_parking = Field()
is_require_parking_fee = Field()
monthly_parking_fee = Field()
per_ping_price = Field()
# other basic info
building_type = Field()
property_type = Field()
is_rooftop = Field()
floor = Field()
total_floor = Field()
dist_to_highest_floor = Field()
floor_ping = Field()
n_living_room = Field()
n_bed_room = Field()
n_bath_room = Field()
n_balcony = Field()
apt_feature_code = Field()
rough_address = Field()
# (latitude, longtitude) tuple of WGS84 coordinate
rough_coordinate = Field()
# boolean map
# eletricity: true, water: true, gas: true, internet: true, cable_tv: true
additional_fee = Field()
# school, park, dept_store, conv_store, traditional_mkt, night_mkt,
# hospital, police_office
living_functions = Field()
# subway, bus, public_bike, train, hsr
transportation = Field()
has_tenant_restriction = Field()
has_gender_restriction = Field()
gender_restriction = Field()
can_cook = Field()
allow_pet = Field()
has_perperty_registration = Field()
# undermined for now
facilities = Field()
contact = Field()
author = Field()
agent_org = Field()
imgs = Field()


class RawHouseItem(Item):
house_id = Field()
vendor = Field()
is_list = Field()
raw = Field()
dict = Field()
4 changes: 4 additions & 0 deletions scrapy-package/scrapy-tw-rental-house/spiders/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# This package will contain the spiders of your Scrapy project
#
# Please refer to the documentation for information on how to create and manage
# your spiders.
86 changes: 86 additions & 0 deletions scrapy-package/scrapy-tw-rental-house/spiders/all_591_cities.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
all_591_cities = [
{
"city": "台北市",
"id": "1"
},
{
"city": "新北市",
"id": "3"
},
{
"city": "桃園市",
"id": "6"
},
{
"city": "新竹市",
"id": "4"
},
{
"city": "新竹縣",
"id": "5"
},
{
"city": "基隆市",
"id": "2"
},
{
"city": "宜蘭縣",
"id": "21"
},
{
"city": "台中市",
"id": "8"
},
{
"city": "彰化縣",
"id": "10"
},
{
"city": "苗栗縣",
"id": "7"
},
{
"city": "雲林縣",
"id": "14"
},
{
"city": "南投縣",
"id": "11"
},
{
"city": "高雄市",
"id": "17"
},
{
"city": "台南市",
"id": "15"
},
{
"city": "嘉義市",
"id": "12"
},
{
"city": "屏東縣",
"id": "19"
},
{
"city": "嘉義縣",
"id": "13"
},
{
"city": "花蓮縣",
"id": "23"
},
{
"city": "台東縣",
"id": "22"
},
{
"city": "金門縣",
"id": "25"
},
{
"city": "澎湖縣",
"id": "24"
}
]
66 changes: 66 additions & 0 deletions scrapy-package/scrapy-tw-rental-house/spiders/enums.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
from enum import IntEnum
from os import path
import json


class DealStatusType(IntEnum):
OPENED = 0
NOT_FOUND = 1
DEAL = 2

UNKNOWN_ENUM = 0xffff

BuildingType = IntEnum('BuildingType', [
('公寓', 0),
('透天', 1),
('電梯大樓', 2),
('華廈', 3),
('辦公商業大樓', 4),
('倉庫', 5),
('店面(店鋪)', 6),
('廠辦', 7),
('工廠', 8),
('農舍', 9)
])

class PropertyType(IntEnum):
整層住家 = 0
獨立套房 = 1
分租套房 = 2
雅房 = 3
車位 = 4
其他 = 5
倉庫 = 6
場地 = 7
攤位 = 8


class ContactType(IntEnum):
屋主 = 0
代理人 = 1
房仲 = 2


class DepositType(IntEnum):
= 0
定額 = 1
面議 = 2
其他 = 3


class GenderType(IntEnum):
不限 = 0
= 1
= 2
其他 = 3


tw_regions_path = '{}/data/tw_regions.json'.format(
path.dirname(path.realpath(__file__)))

with open(tw_regions_path) as regions_file:
tw_regions = json.load(regions_file)

TopRegionType = IntEnum('TopRegionType', tw_regions['top_region'])

SubRegionType = IntEnum('SubRegionType', tw_regions['sub_region'])
Loading

0 comments on commit 8dd4605

Please sign in to comment.