gpt4 book ai didi

python - 尝试收集API数据并发送到本地mysql

转载 作者:行者123 更新时间:2023-11-29 15:46:17 24 4
gpt4 key购买 nike

我无法让我创建的这个脚本正常工作。它需要收集API数据(返回JSON)我想将特定数据保存到MYSQL

尝试了代码但没有让它工作......各种“预期缩进 block ”错误

from __future__ import print_function
import requests
import re
import MySQLdb
import json

data = requests.get('https://newsapi.org/v2/top-headlines?country=us&apiKey=xxxxxxxxxxxxxxxxxxxx')

HOST = "localhost"
USER = "root"
PASSWD = "user"
DATABASE = "something"

def store_data(articles, source, auther, title, description, url, timestamp, content):
db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO table (articles, source, auther, title, description, url, timestamp, content) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
cursor.execute(insert_query, (articles, source, auther, title, description, url, timestamp, content))
db.commit()
cursor.close()
db.close()
return

# db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")# cursor = db.cursor()

def on_data(self, data): #This is the meat of the script...it connects to your mongoDB and stores the tweet
try:
datajson = json.loads(data) # grab the wanted data from the Tweet
articles = datajson['articles']
source = datajson['articles']['source']['name']
auther = datajson['articles']['auther']
title = datajson['articles']['title']
description = datajson['articles']['description']
url = datajson['articles']['url']
timestamp = parser.parse(datajson['articles']['publishedAt'])
content = datajson['articles']['content']

# insert the data into the MySQL database
store_data(articles, source, auther, title, description, url, timestamp, content)
except Exception as e:
print(e)

我希望将输出存储到 mysql 表中...但是在尝试运行脚本时出现错误。我还需要让它无休止地运行,直到杀死进程/ session ....

最佳答案

from __future__ import print_function
import requests
import MySQLdb
from dateutil import parser

HOST = "localhost"
USER = "root"
PASSWD = "ssss!"
DATABASE = "sss"

def store_data(articles):
db=MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE, charset="utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO usa_news (articles) VALUES (%s)")
cursor.execute(insert_query, (articles,))
db.commit()
cursor.close()
db.close()
return


# api-endpoint
URL = "https://newsapi.org/v2/sources?apiKey=ssssssssss"


# API given here
country = "us"


# defining a params dict for the parameters to be sent to the API
PARAMS = {'country':country}

# sending get request and saving the response as response object
r = requests.get(url = URL, params= PARAMS)

# extracting data in json format
data = r.json()

# extracting latitude, longitude and formatted address
# of the first matching location
articles = data['sources'][0]['id']

# printing the output
print("article name:%s"
%(articles))

#insert the data into the MySQL database
store_data(articles)

终于成功了!

关于python - 尝试收集API数据并发送到本地mysql,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/57007019/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com