在Python3中,我们可以使用多种库来保存爬取到的数据。以下是一些建议的库和方法:
import sqlite3
# 创建数据库连接
conn = sqlite3.connect('example.db')
cursor = conn.cursor()
# 创建表
cursor.execute('''CREATE TABLE IF NOT EXISTS data (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT NOT NULL,
url TEXT NOT NULL)''')
# 插入数据
def insert_data(title, url):
cursor.execute("INSERT INTO data (title, url) VALUES (?, ?)", (title, url))
conn.commit()
# 查询数据
def query_data():
cursor.execute("SELECT * FROM data")
return cursor.fetchall()
# 更新数据
def update_data(id, title, url):
cursor.execute("UPDATE data SET title = ?, url = ? WHERE id = ?", (title, url, id))
conn.commit()
# 关闭数据库连接
conn.close()
import json
data = [
{"title": "Title 1", "url": "https://example.com/1"},
{"title": "Title 2", "url": "https://example.com/2"},
]
# 保存为JSON文件
with open('data.json', 'w') as f:
json.dump(data, f)
import csv
data = [
{"title": "Title 1", "url": "https://example.com/1"},
{"title": "Title 2", "url": "https://example.com/2"},
]
# 保存为CSV文件
with open('data.csv', 'w', newline='', encoding='utf-8') as f:
writer = csv.DictWriter(f, fieldnames=["title", "url"])
writer.writeheader()
for row in data:
writer.writerow(row)
pip install pymongo
),然后连接到MongoDB数据库,插入数据,查询和更新数据。from pymongo import MongoClient
# 创建数据库连接
client = MongoClient('mongodb://localhost:27017/')
db = client['example_db']
collection = db['data']
# 插入数据
def insert_data(title, url):
collection.insert_one({"title": title, "url": url})
# 查询数据
def query_data():
return collection.find()
# 更新数据
def update_data(id, title, url):
collection.update_one({"_id": id}, {"$set": {"title": title, "url": url}})
# 关闭数据库连接
client.close()
根据你的需求和数据类型,可以选择合适的库和方法来保存爬取到的数据。