Sushill

kMarket(functionalities)

Mar 13th, 2022 (edited)
340
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.05 KB | None | 0 0
  1. from bs4 import BeautifulSoup
  2. import requests
  3.  
  4. url = "https://kalimatimarket.gov.np/#commodityPricesDailyTable"
  5.  
  6. # Mimicking as a real person browsing a web from an iphone device, but it's not 100% effective:
  7. headers = {'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 12_2 like Mac OS X)'
  8.                          ' AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148'}
  9.  
  10. req = requests.get(url, headers=headers)  # requesting to Kalimati-market server:
  11. soup = BeautifulSoup(req.content, 'lxml')  # making a soup:
  12.  
  13. # html tag of the commodity price table, table body and finally tr i.e. table rows:
  14. table = soup.find('table', id='commodityDailyPrice').find('tbody').find_all('tr')
  15.  
  16.  
  17. # Date of the updated data as it is updated daily and this avoids overwriting of files.
  18. def date_header():
  19.     # this is the html of table i.e. commodity price table:
  20.     date = soup.find_all('div', id='commodityPricesDailyTable')
  21.     # Looping through table and scrape the day and date in commodity price table:
  22.     for dat in date:
  23.         return dat.find('h5').text
  24.  
  25.    
  26. # function to scrape datas from commodity table: values represents column to scrape:
  27. # Example: 1 scrape वस्तु datas and so on
  28. def kalimati_market(value):
  29.     # This technique is list comprehension:
  30.     # It stores pulled data and store in lists: Less code and runs fast: It's quite powerful in Python
  31.     lists = [(tab.find_all('td'))[value].text for tab in table]
  32.     return lists
  33.  
  34.  
  35. # if you don't want to use list-comprehension: Ideally you can use below method:
  36. # I've commented the function to avoid error. you can delete the above function and use this one if you want to avoid
  37. # list comprehension
  38. '''def kalimati_market(value):
  39.    lists = []  # creating empty lists to store values after scraping the needed datas:
  40.    for tab in table:  # looping through table and grabbing data from their respective columns
  41.        datas = tab.find_all('td')  # this html tag has contains intended datas that we need to pull and save in Excel sheet:
  42.        lists.append(datas[value].text) '''
  43.  
  44.  
Add Comment
Please, Sign In to add comment