import os
import getpass
import requests
import urllib.request
from bs4 import BeautifulSoup
def createFolder(directory):
try:
if not os.path.exists(directory):
os.makedirs(directory)
except OSError:
print('Error: Creating directory. ' + directory)
username = getpass.getuser()
directory = f"C:\\Users\\{username}\\Desktop\\er40r"
createFolder(directory)
os.chdir(directory)
url = "http://www.ackyshine.com/vichekesho:_home"
html = requests.get(url)
soup = BeautifulSoup(html.text, "html.parser")
try:
html.raise_for_status()
except Exception as exc:
print('There was a problem: %s' % (exc))
def mchanganyiko(lists):
items = lists[0].select('.list-pages-item')
try:
for i in range(21):
link = items[i].select('a')
visit = 'http://www.ackyshine.com' + link[0].get('href')
html = requests.get(visit)
soup = BeautifulSoup(html.text, "html.parser")
try:
html.raise_for_status()
except Exception as exc:
print('There was a problem: %s' % (exc))
header = soup.select('span strong')
paragraph = soup.select('p')
with open(f"mchanganyiko{i}.txt", "w+", encoding="utf-8") as word:
word.write(f'{header[2].getText()}\n')
word.close()
with open(f'mchanganyiko{i}.txt', "a", encoding="utf-8") as word:
word.write(f'\n{paragraph[5].getText()}\n')
word.close()
with open(f'mchanganyiko{i}.txt', "a", encoding="utf-8") as word:
word.write(f'\n{paragraph[6].getText()}\n')
word.close()
except Exception as exc:
print('Report if error is printed. # mchanganyiko')
def mipya(lists):
items = lists[1].select('.list-pages-item')
try:
for i in range(21):
link = items[i].select('a')
visit = 'http://www.ackyshine.com' + link[0].get('href')
html = requests.get(visit)
soup = BeautifulSoup(html.text, "html.parser")
try:
html.raise_for_status()
except Exception as exc:
print('There was a problem: %s' % (exc))
header = soup.select('span strong')
paragraph = soup.select('p')
with open(f"mipya{i}.txt", "w+", encoding="utf-8") as word:
word.write(f'{header[2].getText()}\n')
word.close()
with open(f'mipya{i}.txt', "a", encoding="utf-8") as word:
word.write(f'\n{paragraph[5].getText()}\n')
word.close()
with open(f'mipya{i}.txt', "a", encoding="utf-8") as word:
word.write(f'\n{paragraph[6].getText()}\n')
word.close()
except Exception as exc:
print("Report if error is printed. #mipya")
def specifics(url):
html = requests.get(url)
soup = BeautifulSoup(html.text, "html.parser")
try:
html.raise_for_status()
except Exception as exc:
print('There was a problem: %s' % (exc))
header = soup.select('span strong')
paragraph = soup.select('p')
with open("specifics.txt", "w+", encoding="utf-8") as word:
word.write(f'{header[2].getText()}\n')
word.close()
with open('specifics.txt', "a", encoding="utf-8") as word:
word.write(f'\n{paragraph[5].getText()}\n')
word.close()
with open('specifics.txt', "a", encoding="utf-8") as word:
word.write(f'\n{paragraph[6].getText()}\n')
word.close()
lists = soup.select('.col-sm-6')
specifics("http://www.ackyshine.com/vichekesho:hawa-wanaume-wanaopenda-wanawake-hovyo-barabarani")
mchanganyiko(lists)
mipya(lists)