gpt4 book ai didi

python - 在 Azure Cosmos DB 中插入时分区键不起作用

转载 作者:行者123 更新时间:2023-12-03 01:32:42 25 4
gpt4 key购买 nike

我正在尝试使用存储过程在 Azure CosmosDB 集合中插入文档。早些时候,我没有使用分区键,并且此代码可以正常工作,但我开始知道,对于删除/更新,需要分区键,因此我也传递了分区键,但在运行脚本时,它给出了以下错误:

Requests originating from scripts cannot reference partition keys other than the one for which client request was submitted.

这是代码:

import azure.cosmos.cosmos_client as cosmos_client
import azure.cosmos.documents as documents
import pandas as pd
import numpy as np
import json
import time
config = {
'ENDPOINT': 'PUT_YOUR_ENDPOINT',
'PRIMARYKEY': 'PRIMARYKEY',
'DATABASE': 'DB_NAME'
}

# Initialize the Cosmos client
client = cosmos_client.CosmosClient(url_connection=config['ENDPOINT'], auth={
'masterKey': config['PRIMARYKEY']})

try:
db_id = config['DATABASE']
db_query = "select * from r where r.id = '{0}'".format(db_id)
db = list(client.QueryDatabases(db_query))[0]
db_link = db['_self']
except Exception as e:
db = client.CreateDatabase({'id': config['DATABASE']},options={"offerThroughput": 10000})
db_link = db['_self']

def get_data_from_collection(filter, collection_id, only_country_level=False):
coll_query = "select * from r where r.id = '{0}'".format(collection_id)
coll = list(client.QueryContainers(db_link, coll_query))[0]
#print(coll)
coll_link = coll['_self']
offer = list(client.QueryOffers('SELECT * FROM c WHERE c.resource = \'{0}\''.format(coll_link)))[0]
offer['content']['offerThroughput'] = 10000
offer = client.ReplaceOffer(offer['_self'], offer)
print('Replaced Offer. Offer Throughput is now \'{0}\''.format(offer['content']['offerThroughput']))

data = []
query = 'select value f.data from collection f'
docs = (client.QueryItems(coll_link,query,{'enableCrossPartitionQuery': True}))#(client.QueryItems(coll_link,query).fetch_next_block())
for i in docs:
#print("docs: ",(i))
data.extend(np.array(i).flatten())

offer = list(client.QueryOffers('SELECT * FROM c WHERE c.resource = \'{0}\''.format(coll_link)))[0]
offer['content']['offerThroughput'] = 400
offer = client.ReplaceOffer(offer['_self'], offer)
print('Replaced Offer. Offer Throughput is now \'{0}\''.format(offer['content']['offerThroughput']))
#print(len(np.array(data).flatten()))
#print("flat data: ",(np.array(data).flatten()))
return np.array(data).flatten()

def store_in_cosmosDB(collection_name, dataframe):
# Create container options
options = {
'offerThroughput': 10000
}

container_definition = {'id': collection_name,
'partitionKey':
{
'paths': ['/Country'],
'kind': documents.PartitionKind.Hash
}
}

try:
# Create a container
container = client.CreateContainer(db_link, container_definition, options)
coll_link = container['_self']
except Exception as e:
coll_id = collection_name
coll_query = "select * from r where r.id = '{0}'".format(coll_id)
coll = list(client.QueryContainers(db_link, coll_query))[0]
coll_link = coll['_self']
offer = list(client.QueryOffers('SELECT * FROM c WHERE c.resource = \'{0}\''.format(coll_link)))[0]
offer['content']['offerThroughput'] = 10000
offer = client.ReplaceOffer(offer['_self'], offer)
print('Replaced Offer. Offer Throughput is now \'{0}\''.format(offer['content']['offerThroughput']))

query = """SELECT VALUE f._self FROM collection f"""
#delete_bulk(query,client,coll_link)

sproc = {
'id': 'storedProcedure',
'body': (
'function (data) {' +
'data = JSON.parse(data);' +
' var client = getContext().getCollection();' +
# ' for(var i=0;i<data.length;i++){' +
' client.createDocument(client.getSelfLink(), data, {}, function(err, docCreated, options) { ' +
' if(err){ throw new Error(\'Error while creating document: \' + err.message);}' +
' else {' +
' getContext().getResponse().setBody(1);' +
' }' +
' });}')
}

try:
# Create a container
created_sproc = client.CreateStoredProcedure(coll_link, sproc)
proc_link = created_sproc['_self']
except Exception as e:
proc_id = sproc['id']
proc_query = "select * from r where r.id = '{0}'".format(proc_id)
proc = list(client.QueryStoredProcedures(coll_link, proc_query))[0]
proc_link = proc['_self']


#dataframe.drop_duplicates(inplace=True)
dataframe.fillna("(blank)",inplace=True)
dataframe.replace([np.inf, -np.inf], "(blank)",inplace=True)
df_list = np.array_split(dataframe, 50)
size = 0
start_time = time.time()
# for split_df in df_list:
# json_temp = dict()
# json_temp['data'] = split_df.to_dict('records')
# size += len(split_df)
# st = str(json_temp).replace('\'','"')
# #print(st)
client.ExecuteStoredProcedure(proc_link, json.dumps(dataframe.to_dict('records'), default=str), {'partitionKey':'India'})

print("split size is ",size)
# for item in df.to_dict('records'):
# client.CreateItem(container['_self'],item)


print("--- %s seconds ---" % (time.time() - start_time))

offer = list(client.QueryOffers('SELECT * FROM c WHERE c.resource = \'{0}\''.format(coll_link)))[0]
offer['content']['offerThroughput'] = 400
offer = client.ReplaceOffer(offer['_self'], offer)
print('Replaced Offer. Offer Throughput is now \'{0}\''.format(offer['content']['offerThroughput']))

data = pd.DataFrame({'Country':['India','India','India'], 'num_Patients':[12,36,100]})

try:
store_in_cosmosDB("dummy_data",data,None)
print('Uploading Done!!!')

except Exception as e:
raise e

最佳答案

您收到此错误的原因是因为您尝试在存储过程中操作的文档中的 PartitionKey 值是一个具有 ['India','India ','India'] 而当您执行存储过程时,为其指定的值是 India

值应该匹配。

关于python - 在 Azure Cosmos DB 中插入时分区键不起作用,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/60753466/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com