No Description

settings.py 4.8KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. # -*- coding: utf-8 -*-
  2. # Scrapy settings for xiaowu project
  3. #
  4. # For simplicity, this file contains only settings considered important or
  5. # commonly used. You can find more settings consulting the documentation:
  6. #
  7. # https://doc.scrapy.org/en/latest/topics/settings.html
  8. # https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
  9. # https://doc.scrapy.org/en/latest/topics/spider-middleware.html
  10. BOT_NAME = 'xiaowu'
  11. SPIDER_MODULES = ['xiaowu.spiders']
  12. NEWSPIDER_MODULE = 'xiaowu.spiders'
  13. # Crawl responsibly by identifying yourself (and your website) on the user-agent
  14. #USER_AGENT = 'xiaowu (+http://www.yourdomain.com)'
  15. # Obey robots.txt rules
  16. ROBOTSTXT_OBEY = False
  17. # Configure maximum concurrent requests performed by Scrapy (default: 16)
  18. #CONCURRENT_REQUESTS = 32
  19. # Configure a delay for requests for the same website (default: 0)
  20. # See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay
  21. # See also autothrottle settings and docs
  22. #DOWNLOAD_DELAY = 3
  23. # The download delay setting will honor only one of:
  24. #CONCURRENT_REQUESTS_PER_DOMAIN = 16
  25. #CONCURRENT_REQUESTS_PER_IP = 16
  26. # Disable cookies (enabled by default)
  27. #COOKIES_ENABLED = False
  28. # Disable Telnet Console (enabled by default)
  29. #TELNETCONSOLE_ENABLED = False
  30. # Override the default request headers:
  31. #DEFAULT_REQUEST_HEADERS = {
  32. # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
  33. # 'Accept-Language': 'en',
  34. #}
  35. DEFAULT_REQUEST_HEADERS = {
  36. 'User-Agent': 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.94 Safari/537.36',
  37. 'authorization': 'oauth c3cef7c66a1843f8b3a9e6a1e3160e20',
  38. }
  39. # Enable or disable spider middlewares
  40. # See https://doc.scrapy.org/en/latest/topics/spider-middleware.html
  41. #SPIDER_MIDDLEWARES = {
  42. # 'xiaowu.middlewares.XiaowuSpiderMiddleware': 543,
  43. #}
  44. # Enable or disable downloader middlewares
  45. # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
  46. DOWNLOADER_MIDDLEWARES = {
  47. 'xiaowu.middlewares.RandomUserAgent': 1,
  48. 'xiaowu.middlewares.ProxyMiddleware': 100
  49. # 'xiaowu.middlewares.XiaowuDownloaderMiddleware': 543,
  50. }
  51. # DOWNLOADER_MIDDLEWARES = {
  52. # #'mySpider.middlewares.MyCustomDownloaderMiddleware': 543,
  53. # 'mySpider.middlewares.RandomUserAgent': 1,
  54. # 'mySpider.middlewares.ProxyMiddleware': 100
  55. # }
  56. USER_AGENTS = [
  57. "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)",
  58. "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)",
  59. "Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 3.0.04506.30)",
  60. "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/523.15 (KHTML, like Gecko, Safari/419.3) Arora/0.3 (Change: 287 c9dfb30)",
  61. "Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.6",
  62. "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2pre) Gecko/20070215 K-Ninja/2.1.1",
  63. "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9) Gecko/20080705 Firefox/3.0 Kapiko/3.0",
  64. "Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5"
  65. ]
  66. PROXIES = [
  67. {'ip_port': '114.215.107.94:60443', 'user_pass': ''},
  68. # {'ip_port': '61.135.217.7:80', 'user_pass': ''},
  69. # {'ip_port': '218.72.64.204:18118', 'user_pass': ''},
  70. # {'ip_port': '183.159.93.219:18118', 'user_pass': ''},
  71. ]
  72. COOKIES_ENABLED = False
  73. #延迟
  74. # DOWNLOAD_DELAY = 3
  75. # Enable or disable extensions
  76. # See https://doc.scrapy.org/en/latest/topics/extensions.html
  77. #EXTENSIONS = {
  78. # 'scrapy.extensions.telnet.TelnetConsole': None,
  79. #}
  80. # Configure item pipelines
  81. # See https://doc.scrapy.org/en/latest/topics/item-pipeline.html
  82. ITEM_PIPELINES = {
  83. 'xiaowu.pipelines.XiaowuPipeline': 300,
  84. #'xiaowu.pipelines.BaiduImagePipeline': 400,
  85. }
  86. # Enable and configure the AutoThrottle extension (disabled by default)
  87. # See https://doc.scrapy.org/en/latest/topics/autothrottle.html
  88. #AUTOTHROTTLE_ENABLED = True
  89. # The initial download delay
  90. #AUTOTHROTTLE_START_DELAY = 5
  91. # The maximum download delay to be set in case of high latencies
  92. #AUTOTHROTTLE_MAX_DELAY = 60
  93. # The average number of requests Scrapy should be sending in parallel to
  94. # each remote server
  95. #AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
  96. # Enable showing throttling stats for every response received:
  97. #AUTOTHROTTLE_DEBUG = False
  98. # Enable and configure HTTP caching (disabled by default)
  99. # See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
  100. #HTTPCACHE_ENABLED = True
  101. #HTTPCACHE_EXPIRATION_SECS = 0
  102. #HTTPCACHE_DIR = 'httpcache'
  103. #HTTPCACHE_IGNORE_HTTP_CODES = []
  104. #HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'