Skip to content
This repository has been archived by the owner on Jun 29, 2024. It is now read-only.

python #54

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions Task1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import requests
def get_weather_data(api_key, city_name):
url = f"http://api.openweathermap.org/data/2.5/weather?q={city_name}&appid={api_key}&units=metric"
response = requests.get(url)
data = response.json()
return data
def display_current_weather(data):
print("Current Weather Conditions:")
print("---------------------------")
print(f"Weather: {data['weather'][0]['description']}")
print(f"Temperature: {data['main']['temp']}°C")
print(f"Humidity: {data['main']['humidity']}%")
print(f"Wind Speed: {data['wind']['speed']} m/s")
def get_forecast(api_key, city_name):
url = f"http://api.openweathermap.org/data/2.5/forecast?q={city_name}&appid={api_key}&units=metric"
response = requests.get(url)
data = response.json()
return data
def display_forecast(data):
print("\nWeather Forecast for the next 5 days:")
print("-------------------------------------")
for forecast in data['list']:
print(f"Date: {forecast['dt_txt']}")
print(f"Weather: {forecast['weather'][0]['description']}")
print(f"Temperature: {forecast['main']['temp']}°C")
print("")
def main():
api_key = 'b73f861c9b47fe465e680a5d330e01eb'
city_name = input("Enter city name: ")
current_weather_data = get_weather_data(api_key, city_name)
display_current_weather(current_weather_data)
forecast_data = get_forecast(api_key, city_name)
display_forecast(forecast_data)
if __name__ == "__main__":
main()
46 changes: 46 additions & 0 deletions Task2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import requests
from bs4 import BeautifulSoup
import csv
import json

def scrape_website(url):
# Send a GET request to the URL
response = requests.get(url)

# Parse HTML content
soup = BeautifulSoup(response.text, 'html.parser')

titles = [title.text.strip() for title in soup.find_all('h1')]

return titles

def save_to_csv(data, filename):
# Write data to a CSV file
with open(filename, 'w', newline='', encoding='utf-8') as csvfile:
writer = csv.writer(csvfile)
writer.writerow(['Title'])
writer.writerows(data)

def save_to_json(data, filename):
# Write data to a JSON file
with open(filename, 'w', encoding='utf-8') as jsonfile:
json.dump(data, jsonfile, indent=4)

def main( ):
url = input("Enter the URL of the website you want to scrape: ")

output_format = input("Enter 'csv' or 'json' to choose the output format: ").lower()
if output_format not in ['csv','json']:
print("Invalid output format. Please enter 'csv' or 'json'.")
return
extracted_data = scrape_website(url)
if output_format == 'csv':
filename = input("Enter the filename to store the data (without extension): ") + '.csv'
save_to_csv(extracted_data, filename)
print(f"Data has been saved to {filename}")
elif output_format == 'json':
filename = input("Enter the filename to store the data (without extension): ") + '.json'
save_to_json(extracted_data, filename)
print(f"Data has been saved to {filename}")
if __name__ == "__main__":
main()