This is my python code to crawling news data from https://bpbd.jatengprov.go.id/category/laporan-bencana/'
# -*- coding: utf-8 -*-
import scrapy
class BepeSpider(scrapy.Spider):
name = 'bepe'
allowed_domains = ['bpbd.jatengprov.go.id']
start_urls = ['https://bpbd.jatengprov.go.id/category/laporan-bencana/']
COUNT_MAX = 100
count = 0
def parse(self, response):
for quote in response.css('div.post'):
item = {
'judul': quote.css('h2.post-title > a::text').extract_first(),
'teks': quote.css('div.entrytext > p::text').extract_first(),
'tag': quote.css('div.up-bottom-border > p.postmetadata > a::text').extract(),
}
yield item
self.count = self.count + 1
#following pagination link
next_page_url = response.css('div.alignright > a::attr(href)').extract_first() #dapatkan link untuk selanjutnya
if (self.count < self.COUNT_MAX):
next_page_url = response.urljoin(next_page_url)
yield scrapy.Request(url=next_page_url, callback=self.parse)
Is there any way to INSERT my crawling data into mysql with such array like this?
item = {
'judul': quote.css('h2.post-title > a::text').extract_first(),
'teks': quote.css('div.entrytext > p::text').extract_first(),
'tag': quote.css('div.up-bottom-border > p.postmetadata > a::text').extract(),
}
I have tried code below but it couldnt insert any data
conn = Connection()
mycursor = conn.cursor()
sql = "insert into berita(judul, isi, tag) values(%s, %s, %s)"
item = {
'judul': quote.css('h2.post-title > a::text').extract_first(),
'teks': quote.css('div.entrytext > p::text').extract_first(),
'tag': quote.css('div.up-bottom-border > p.postmetadata > a::text').extract(),
}
val=(item['judul'], item['teks'], item['tag'])
mycursor.execute(sql,val)
conn.commit()
Sorry for my bad english and I hope anybody expert in python could help me