add_max #1
| @ -42,3 +42,13 @@ class DagensLunchMalmens(scrapy.Item): | ||||
|     friday = scrapy.Field() | ||||
|     scraped_by = scrapy.Field() | ||||
|     scraped_at = scrapy.Field() | ||||
| 
 | ||||
| class DagensLunchMax(scrapy.Item): | ||||
|     place = scrapy.Field() | ||||
|     monday = scrapy.Field() | ||||
|     tuesday = scrapy.Field() | ||||
|     wednesday = scrapy.Field() | ||||
|     thursday = scrapy.Field() | ||||
|     friday = scrapy.Field() | ||||
|     scraped_by = scrapy.Field() | ||||
|     scraped_at = scrapy.Field() | ||||
|  | ||||
							
								
								
									
										24
									
								
								dagens_lunch/spiders/max.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								dagens_lunch/spiders/max.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,24 @@ | ||||
| from datetime import datetime | ||||
| import scrapy | ||||
| from ..items import DagensLunchMax | ||||
| 
 | ||||
| class MaxSpider(scrapy.Spider): | ||||
|     name = "max" | ||||
|     allowed_domains = ["max.se"] | ||||
|     start_urls = ["https://max.se/maten/meny/maltider/dagens-lunch/"] | ||||
| 
 | ||||
|     def parse(self, response): | ||||
|         days = [] | ||||
|         lista = response.xpath("//div/div[2]/div[contains(@class, 'o-product-info')]/ul[contains(@class, 'o-product-info__variations')]") | ||||
|         for li in lista.xpath("./li/text()").getall(): | ||||
|             days.append(li.split(" ")[-1]) | ||||
|         return DagensLunchMax( | ||||
|             place="Max", | ||||
|             monday=days[0], | ||||
|             tuesday=days[1], | ||||
|             wednesday=days[2], | ||||
|             thursday=days[3], | ||||
|             friday=days[4], | ||||
|             scraped_by=self.__class__.__name__, | ||||
|             scraped_at=f"{datetime.now().isoformat()}" | ||||
|         ) | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user