OiO.lk Community platform!

Oio.lk is an excellent forum for developers, providing a wide range of resources, discussions, and support for those in the developer community. Join oio.lk today to connect with like-minded professionals, share insights, and stay updated on the latest trends and technologies in the development field.
  You need to log in or register to access the solved answers to this problem.
  • You have reached the maximum number of guest views allowed
  • Please register below to remove this limitation

python inserting into Mariadb

  • Thread starter Thread starter Scott Bellefeuille
  • Start date Start date
S

Scott Bellefeuille

Guest
Good Evening techgods!, wondering if you can help little old me :) new to python and i'm learning to scrape from video's i'm able to get a tag of it working as an export to excel, what i want to do now, is have it connect to mariadb, because i'm using a raspberry pi for testing :) it seems to be connecting fine, just does not like something and i'm stumped.

so i take the same code and make a little bit of syntax change and i keep getting the following error.

File "C:/Users/scott/PycharmProjects/untitled/test.py", line 37, in cursor.execute(query) mariadb.ProgrammingError: You have an error in your SQL syntax; check the manual that corresponds to your MariaDB server version for the right syntax to use near '' at line 1

Code:
import requests
from bs4 import BeautifulSoup
import mariadb

url = 'https://dc.urbanturf.com/pipeline'
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
pipeline_items = soup.find_all('div', attrs={'class': 'pipeline-item'})

rows = []
columns = ['listing title', 'listing url', 'listing image url', 'location', 'Project type', 'Status', 'Size']

for item in pipeline_items:
    # title, image url, listing url
    listing_title = item.a['title']
    listing_url = item.a['href']
    listing_image_url = item.a.img['src']
    for p_tag in item.find_all('p'):
        if not p_tag.h2:
            if p_tag.span.text == 'Location:':
                p_tag.span.extract()
                property_location = p_tag.text.strip()
            elif p_tag.span.text == 'Project type:':
                p_tag.span.extract()
                property_type = p_tag.text.strip()
            elif p_tag.span.text == 'Status:':
                p_tag.span.extract()
                property_status = p_tag.text.strip()
            elif p_tag.span.text == 'Size:':
                p_tag.span.extract()
                property_size = p_tag.text.strip()
    conn = mariadb.connect(host="xxxxxxxx", port=3306, user="xxxxxxxxxx", passwd="xxxxxxxxxxxxx", db="test")
    # Creating a cursor object using the cursor() method
    cursor = conn.cursor()
    query = "INSERT INTO Properties(listing_title, listing_url, listing_image_url, property_location, 
             property_type, property_status, property_size)"

    cursor.execute(query)
    conn.commit()

    print("Data inserted")
    conn.close()

any help is greatly appreciated :)

New code. no more errors, just cant seem to get the data to import. it creates empty rows. the names of the table match that in the current code.

Code:
    import requests
from bs4 import BeautifulSoup
import mariadb

url = 'https://dc.urbanturf.com/pipeline'
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
pipeline_items = soup.find_all('div', attrs={'class': 'pipeline-item'})

rows = []
columns = ['listing_title', 'listing_url', 'listing_image_url', 'location', 'Project_type', 'Status', 'Sizes']

for item in pipeline_items:
    # title, image url, listing url
    listing_title = item.a['title']
    listing_url = item.a['href']
    listing_image_url = item.a.img['src']
    for p_tag in item.find_all('p'):
        if not p_tag.h2:
            if p_tag.span.text == 'Location:':
                p_tag.span.extract()
                property_location = p_tag.text.strip()
            elif p_tag.span.text == 'Project type:':
                p_tag.span.extract()
                property_type = p_tag.text.strip()
            elif p_tag.span.text == 'Status:':
                p_tag.span.extract()
                property_status = p_tag.text.strip()
            elif p_tag.span.text == 'Size:':
                p_tag.span.extract()
                property_size = p_tag.text.strip()
    conn = mariadb.connect(host="192.168.0.217", port=3306, user="", passwd="", db="test")
    # Create a cursor object using the cursor() method
    cursor = conn.cursor()
    query = "INSERT INTO Properties VALUES(listing_title, listing_url, listing_image_url, property_location, property_type, property_status, property_size) "
    cursor.execute(query)
    conn.commit()
    print("Data inserted")
    conn.close()

Working version encase someone is looking for something similar :)

Code:
import requests
from bs4 import BeautifulSoup
import mariadb

conn = mariadb.connect(host="192.168.0.217", port=3306, user="", passwd="", db="test")
# Create a cursor object using the cursor() method

url = 'https://dc.urbanturf.com/pipeline'
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
pipeline_items = soup.find_all('div', attrs={'class': 'pipeline-item'})

rows = []
columns = ['listing_title', 'listing_url', 'listing_image_url', 'location', 'Project_type', 'Status', 'Sizes']


try:
        with conn.cursor() as cursor:
            for item in pipeline_items:
                # title, image url, listing url
                listing_title = item.a['title']
                listing_url = item.a['href']
                listing_image_url = item.a.img['src']
                for p_tag in item.find_all('p'):
                    if not p_tag.h2:
                        if p_tag.span.text == 'Location:':
                            p_tag.span.extract()
                            property_location = p_tag.text.strip()
                        elif p_tag.span.text == 'Project type:':
                            p_tag.span.extract()
                            property_type = p_tag.text.strip()
                        elif p_tag.span.text == 'Status:':
                            p_tag.span.extract()
                            property_status = p_tag.text.strip()
                        elif p_tag.span.text == 'Size:':
                            p_tag.span.extract()
                            property_size = p_tag.text.strip()
                cursor = conn.cursor()
                query = "INSERT INTO Properties(listing_title, listing_url, listing_image_url, property_type, property_status, property_size) VALUES(%s,%s,%s,%s,%s,%s)"
                value_tuple = (listing_title, listing_url, listing_image_url, property_type, property_status, property_size)
                cursor.execute(query, value_tuple)
finally:
    conn.close()
<p>Good Evening techgods!, wondering if you can help little old me :)
new to python and i'm learning to scrape from video's i'm able to get a tag of it working as an export to excel, what i want to do now, is have it connect to mariadb, because i'm using a raspberry pi for testing :) it seems to be connecting fine, just does not like something and i'm stumped.</p>
<p>so i take the same code and make a little bit of syntax change and i keep getting the following error.</p>
<blockquote>
<p>File "C:/Users/scott/PycharmProjects/untitled/test.py", line 37, in

cursor.execute(query) mariadb.ProgrammingError: You have an error in your SQL syntax; check the manual that corresponds to your MariaDB
server version for the right syntax to use near '' at line 1</p>
</blockquote>
<pre><code>import requests
from bs4 import BeautifulSoup
import mariadb

url = 'https://dc.urbanturf.com/pipeline'
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
pipeline_items = soup.find_all('div', attrs={'class': 'pipeline-item'})

rows = []
columns = ['listing title', 'listing url', 'listing image url', 'location', 'Project type', 'Status', 'Size']

for item in pipeline_items:
# title, image url, listing url
listing_title = item.a['title']
listing_url = item.a['href']
listing_image_url = item.a.img['src']
for p_tag in item.find_all('p'):
if not p_tag.h2:
if p_tag.span.text == 'Location:':
p_tag.span.extract()
property_location = p_tag.text.strip()
elif p_tag.span.text == 'Project type:':
p_tag.span.extract()
property_type = p_tag.text.strip()
elif p_tag.span.text == 'Status:':
p_tag.span.extract()
property_status = p_tag.text.strip()
elif p_tag.span.text == 'Size:':
p_tag.span.extract()
property_size = p_tag.text.strip()
conn = mariadb.connect(host="xxxxxxxx", port=3306, user="xxxxxxxxxx", passwd="xxxxxxxxxxxxx", db="test")
# Creating a cursor object using the cursor() method
cursor = conn.cursor()
query = "INSERT INTO Properties(listing_title, listing_url, listing_image_url, property_location,
property_type, property_status, property_size)"

cursor.execute(query)
conn.commit()

print("Data inserted")
conn.close()
</code></pre>
<p>any help is greatly appreciated :)</p>
<p>New code.
no more errors, just cant seem to get the data to import.
it creates empty rows.
the names of the table match that in the current code.</p>
<pre><code> import requests
from bs4 import BeautifulSoup
import mariadb

url = 'https://dc.urbanturf.com/pipeline'
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
pipeline_items = soup.find_all('div', attrs={'class': 'pipeline-item'})

rows = []
columns = ['listing_title', 'listing_url', 'listing_image_url', 'location', 'Project_type', 'Status', 'Sizes']

for item in pipeline_items:
# title, image url, listing url
listing_title = item.a['title']
listing_url = item.a['href']
listing_image_url = item.a.img['src']
for p_tag in item.find_all('p'):
if not p_tag.h2:
if p_tag.span.text == 'Location:':
p_tag.span.extract()
property_location = p_tag.text.strip()
elif p_tag.span.text == 'Project type:':
p_tag.span.extract()
property_type = p_tag.text.strip()
elif p_tag.span.text == 'Status:':
p_tag.span.extract()
property_status = p_tag.text.strip()
elif p_tag.span.text == 'Size:':
p_tag.span.extract()
property_size = p_tag.text.strip()
conn = mariadb.connect(host="192.168.0.217", port=3306, user="", passwd="", db="test")
# Create a cursor object using the cursor() method
cursor = conn.cursor()
query = "INSERT INTO Properties VALUES(listing_title, listing_url, listing_image_url, property_location, property_type, property_status, property_size) "
cursor.execute(query)
conn.commit()
print("Data inserted")
conn.close()
</code></pre>
<p>Working version encase someone is looking for something similar :)</p>
<pre><code>import requests
from bs4 import BeautifulSoup
import mariadb

conn = mariadb.connect(host="192.168.0.217", port=3306, user="", passwd="", db="test")
# Create a cursor object using the cursor() method

url = 'https://dc.urbanturf.com/pipeline'
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
pipeline_items = soup.find_all('div', attrs={'class': 'pipeline-item'})

rows = []
columns = ['listing_title', 'listing_url', 'listing_image_url', 'location', 'Project_type', 'Status', 'Sizes']


try:
with conn.cursor() as cursor:
for item in pipeline_items:
# title, image url, listing url
listing_title = item.a['title']
listing_url = item.a['href']
listing_image_url = item.a.img['src']
for p_tag in item.find_all('p'):
if not p_tag.h2:
if p_tag.span.text == 'Location:':
p_tag.span.extract()
property_location = p_tag.text.strip()
elif p_tag.span.text == 'Project type:':
p_tag.span.extract()
property_type = p_tag.text.strip()
elif p_tag.span.text == 'Status:':
p_tag.span.extract()
property_status = p_tag.text.strip()
elif p_tag.span.text == 'Size:':
p_tag.span.extract()
property_size = p_tag.text.strip()
cursor = conn.cursor()
query = "INSERT INTO Properties(listing_title, listing_url, listing_image_url, property_type, property_status, property_size) VALUES(%s,%s,%s,%s,%s,%s)"
value_tuple = (listing_title, listing_url, listing_image_url, property_type, property_status, property_size)
cursor.execute(query, value_tuple)
finally:
conn.close()
</code></pre>
 

Latest posts

A
Replies
0
Views
1
Aarif Hussain A Nassar
A
F
Replies
0
Views
1
Fahmi Nur Fachrurozi
F
Top