gpt4 book ai didi

Python 数据从 Beautiful Soup Table 插入到 MYSQL

转载 作者:行者123 更新时间:2023-11-29 03:00:36 25 4
gpt4 key购买 nike

我正在尝试使用用 python 编写的漂亮汤将数据从 html 表插入到 MYSQL 表中。目前我可以通过创建一个临时 csv 文件来做到这一点,但它似乎在 Linux 中不起作用,而且效率似乎很低。下面是我使用 CSV 的代码,这是我想删除的部分。

import urllib2, base64, csv
from bs4 import BeautifulSoup
request = urllib2.Request("http://website.com")
base64string = base64.encodestring('%s:%s' % ('username','password')).replace('\n','')
request.add_header("Authorization", "Basic %s" % base64string)
result = urllib2.urlopen(request)
soup = BeautifulSoup(result.read())

table=soup.findAll('table')[6]

f = open('output.csv', 'w')

for row in table.findAll('tr'):
cells = row.findAll('td')
#For each "tr", assign each "td" to a variable.
if len(cells) == 19:
column1 = cells[0].find(text=True)
column2 = cells[1].find(text=True)
column3 = cells[2].find(text=True)
column4 = cells[3].find(text=True)
column5 = cells[4].find(text=True)
column6 = cells[5].find(text=True)
column7 = cells[6].find(text=True)
column8 = cells[7].find(text=True)
column9 = cells[8].find(text=True)
column10 = cells[9].find(text=True)
column11 = cells[10].find(text=True)
column12 = cells[11].find(text=True)
column13 = cells[12].find(text=True)
column14 = cells[13].find(text=True)
column15 = cells[14].find(text=True)
column16 = cells[15].find(text=True)
column17 = cells[16].find(text=True)
column18 = cells[17].find(text=True)
column19 = cells[18].find(text=True)
TOTAL_AD_CALLS = column6.replace(',','')
TOTAL_US_AD_CALLS = column7.replace(',','')
TOTAL_NON_US_AD_CALLS = column8.replace(',','')
TOTAL_NON_CLEAN_US_AD_CALLS = column10.replace(',','')
TOTAL_CLEAN_US_AD_CALLS = column12.replace(',','')
UNUSABLE_AD_CALLS = column13.replace(',','')
NO_ADS_RETURNED = column14.replace(',','')
PSAS_RETURNED = column15.replace(',','')
ADS_RETURNED = column16.replace(',','')
TOTAL_RETURNED = column17.replace(',','')
TOTAL_IMPRESSIONS = column18.replace(',','')
Engagements = column19.replace(',','')
NOT_US_RATE = column9.replace('%','')
NOT_CLEAN_RATE = column11.replace('%','')



#district can be a list of lists, so we want to iterate through the top level lists first...

write_to_file = column1 + "," + column2 + "," + column3 + "," + column4 + "," + column5 + "," + TOTAL_AD_CALLS + "," + TOTAL_US_AD_CALLS + "," + TOTAL_NON_US_AD_CALLS + "," + NOT_US_RATE + "," + TOTAL_NON_CLEAN_US_AD_CALLS + "," + NOT_CLEAN_RATE + "," + TOTAL_CLEAN_US_AD_CALLS + "," + UNUSABLE_AD_CALLS + "," + NO_ADS_RETURNED + "," + PSAS_RETURNED + "," + ADS_RETURNED + "," + TOTAL_RETURNED + "," + TOTAL_IMPRESSIONS + "," + Engagements + "\n"
print write_to_file
f.write(write_to_file)
f.close()



import MySQLdb
import os
import string

# Open database connection
db = MySQLdb.connect(host="ipadress", # your host, usually localhost
user="admin", # your username
passwd="other", # your password
db="dailies") # name of the data base

cursor=db.cursor()

#Query under testing
sql = """LOAD DATA LOCAL INFILE 'output.csv' \
INTO TABLE PYTHON_TEST \
FIELDS TERMINATED BY ',' \
OPTIONALLY ENCLOSED BY '"' \
LINES TERMINATED BY '\r\n' \
IGNORE 0 LINES;;"""

#LINES TERMINATED BY '\r\n' \

try:
# Execute the SQL command
cursor.execute(sql)
# Commit your changes in the database
db.commit()
except:
# Rollback in case there is any error
db.rollback()

# disconnect from server
db.close()

最佳答案

我想通了。如果其他人遇到类似问题,希望这会有所帮助。

import urllib2, base64, csv
from bs4 import BeautifulSoup
import MySQLdb
import os
import string
import datetime

request = urllib2.Request("website.com")
base64string = base64.encodestring('%s:%s' % ('username','password')).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
result = urllib2.urlopen(request)
soup = BeautifulSoup(result.read())

#print(soup.prettify())
#table=soup.find('table', {"class":"resultsTable,ruler,sortable"})[0]


# Open database connection
db = MySQLdb.connect(host="ipaddress", # your host, usually localhost
user="username", # your username
passwd="password", # your password
db="databsae") # name of the data base

cursor=db.cursor()

table=soup.findAll('table')[6]

#print table

for row in table.findAll('tr'):
cells = row.findAll('td')
#For each "tr", assign each "td" to a variable.
if len(cells) == 19:
column1 = cells[0].find(text=True)
column2 = cells[1].find(text=True)
column3 = cells[2].find(text=True)
column4 = cells[3].find(text=True)
column5 = cells[4].find(text=True)
column6 = cells[5].find(text=True)
column7 = cells[6].find(text=True)
column8 = cells[7].find(text=True)
column9 = cells[8].find(text=True)
column10 = cells[9].find(text=True)
column11 = cells[10].find(text=True)
column12 = cells[11].find(text=True)
column13 = cells[12].find(text=True)
column14 = cells[13].find(text=True)
column15 = cells[14].find(text=True)
column16 = cells[15].find(text=True)
column17 = cells[16].find(text=True)
column18 = cells[17].find(text=True)
column19 = cells[18].find(text=True)
TOTAL_AD_CALLS = column6.replace(',','')
TOTAL_US_AD_CALLS = column7.replace(',','')
TOTAL_NON_US_AD_CALLS = column8.replace(',','')
TOTAL_NON_CLEAN_US_AD_CALLS = column10.replace(',','')
TOTAL_CLEAN_US_AD_CALLS = column12.replace(',','')
UNUSABLE_AD_CALLS = column13.replace(',','')
NO_ADS_RETURNED = column14.replace(',','')
PSAS_RETURNED = column15.replace(',','')
ADS_RETURNED = column16.replace(',','')
TOTAL_RETURNED = column17.replace(',','')
TOTAL_IMPRESSIONS = column18.replace(',','')
Engagements = column19.replace(',','')
NOT_US_RATE = column9.replace('%','')
NOT_CLEAN_RATE = column11.replace('%','')
Created = datetime.datetime.now()

print Engagements


cursor.execute ("INSERT INTO PYTHON_TEST (REPORT_TYPE, THE_DATE, PARENT_ID, SITE_ID,SITE_NAME, TOTAL_AD_CALLS, TOTAL_US_AD_CALLS, TOTAL_NON_US_AD_CALLS, NOT_US_RATE, TOTAL_NON_CLEAN_US_AD_CALLS, NOT_CLEAN_RATE, TOTAL_CLEAN_US_AD_CALLS, UNUSABLE_AD_CALLS, NO_ADS_RETURNED, PSAS_RETURNED, ADS_RETURNED, TOTAL_RETURNED, TOTAL_IMPRESSIONS, ENGAGEMENTS, CREATED) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);", (column1, column2, column3, column4, column5, TOTAL_AD_CALLS, TOTAL_US_AD_CALLS, TOTAL_NON_US_AD_CALLS, NOT_US_RATE ,TOTAL_NON_CLEAN_US_AD_CALLS, NOT_CLEAN_RATE, TOTAL_CLEAN_US_AD_CALLS, UNUSABLE_AD_CALLS, NO_ADS_RETURNED, PSAS_RETURNED, ADS_RETURNED, TOTAL_RETURNED, TOTAL_IMPRESSIONS, Engagements, Created))

#cursor.execute ("INSERT INTO PYTHON_TEST (ENGAGEMENTS,434 ) VALUES (%s);", (Engagements))
db.commit()
'''
cursor.execute ("INSERT INTO PYTHON_TEST VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);", (column1, column2, column3, column4, column5, TOTAL_AD_CALLS, TOTAL_US_AD_CALLS, TOTAL_NON_US_AD_CALLS, NOT_US_RATE ,TOTAL_NON_CLEAN_US_AD_CALLS, NOT_CLEAN_RATE, TOTAL_CLEAN_US_AD_CALLS, UNUSABLE_AD_CALLS, NO_ADS_RETURNED, PSAS_RETURNED, ADS_RETURNED, TOTAL_RETURNED, TOTAL_IMPRESSIONS, Engagements, Created, Created))'''


# disconnect from server
db.close()

关于Python 数据从 Beautiful Soup Table 插入到 MYSQL,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/24286700/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com