gpt4 book ai didi

java - "Cached Item Was Locked"导致 Hibernate 中的 Select 语句

转载 作者:塔克拉玛干 更新时间:2023-11-02 20:20:30 24 4
gpt4 key购买 nike

我无法按照我想要的方式让一些缓存与 hibernate 一起工作。我创建了一些示例代码来复制我遇到的这个问题。

我有一个包含自身实例的对象。例如,一个部件由多个部件组成。

当更新的对象进入时,我真的需要最小化 Hibernate 使用的 select 语句。查看日志后,我看到这个导致 SELECT 语句的日志输出:

缓存项已锁定:com.cache.dataobject.Part.parts#1

我可以在我的注释映射、xml 文件、缓存提供程序或逻辑中更改什么以防止缓存项被锁定?我真的很想去掉那个 select 语句。

我包括了实体、数据对象、我正在测试的代码和日志输出。

hibernate 版本:3.4

EHCache 版本:1.2.3(包含在 Hibernate 下载中)

零件数据对象:

package com.cache.dataobject;

import java.io.Serializable;
import java.lang.String;
import java.util.List;

import javax.persistence.*;

import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;

import static javax.persistence.CascadeType.ALL;

/**
* Entity implementation class for Entity: Part
*
*/
@Entity
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public class Part implements Serializable {

private int id;
private String name;
private static final long serialVersionUID = 1L;
private Part mainPart;
private List<Part> parts;

public Part() {
super();
}

@Id
public int getId() {
return this.id;
}

public void setId(int id) {
this.id = id;
}

@Column(name = "PART_NAME")
public String getName() {
return this.name;
}

public void setName(String name) {
this.name = name;
}

@ManyToOne(cascade = ALL)
public Part getMainPart() {
return mainPart;
}

public void setMainPart(Part mainPart) {
this.mainPart = mainPart;
}

@OneToMany(cascade = ALL)
@JoinColumn(name = "mainPart_id", referencedColumnName = "id")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public List<Part> getParts() {
return parts;
}

public void setParts(List<Part> parts) {
this.parts = parts;
}

}

缓存道:

package com.cache.dao;

import java.util.List;

import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;

import com.cache.dataobject.Part;

/**
* Session Bean implementation class CacheDao
*/
@Stateless(mappedName = "ejb/CacheDao")
public class CacheDao implements CacheDaoRemote {

@PersistenceContext(unitName="CacheProjectUnit")
EntityManager em;

/**
* Default constructor.
*/
public CacheDao() {
// TODO Auto-generated constructor stub
}

public Part addPart(Part part){
System.out.println("CALLED PERSIST");
em.persist(part);
return part;
}

public Part updatePart(Part part){
System.out.println("CALLED MERGE");
em.merge(part);
return part;
}

}

测试客户端代码:

package com.cache.dao;

import java.util.ArrayList;
import java.util.List;

import javax.naming.InitialContext;
import javax.naming.NamingException;

import com.cache.dao.CacheDaoRemote;
import com.cache.dataobject.Part;


public class test {

/**
* @param args
*/
public static void main(String[] args) {
InitialContext ctx;
try {
ctx = new InitialContext();
CacheDaoRemote dao = (CacheDaoRemote) ctx.lookup("ejb/CacheDao");
Part computer = new Part();
computer.setId(1);
computer.setName("Computer");

List<Part> parts = new ArrayList<Part>();

Part cpu = new Part();
cpu.setId(2);
cpu.setName("CPU");

Part monitor = new Part();
monitor.setId(3);
monitor.setName("Monitor");

parts.add(cpu);
parts.add(monitor);

computer.setParts(parts);

dao.addPart(computer);

computer.setName("DellComputer");

dao.updatePart(computer);


} catch (NamingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

}

}

Persistence.xml

<?xml version="1.0" encoding="UTF-8"?>
<persistence version="1.0"
xmlns="http://java.sun.com/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_1_0.xsd">
<persistence-unit name="CacheProjectUnit">
<provider>org.hibernate.ejb.HibernatePersistence</provider>
<!-- JNDI name of the database resource to use -->
<jta-data-source>jdbc/H2Pool</jta-data-source>
<properties>
<!-- The database dialect to use -->
<property name="hibernate.dialect" value="org.hibernate.dialect.H2Dialect" />
<!-- drop and create tables at deployment -->
<property name="hibernate.hbm2ddl.auto" value="create-drop" />
<property name="hibernate.max_fetch_depth" value="3" />
<property name="hibernate.cache.provider_class" value="org.hibernate.cache.EhCacheProvider" />
</properties>
</persistence-unit>
</persistence>

EhCache.xml

<ehcache>
<diskStore path="java.io.tmpdir"/>

<defaultCache
maxElementsInMemory="10000"
eternal="false"
timeToIdleSeconds="120"
timeToLiveSeconds="120"
overflowToDisk="true"
diskPersistent="false"
diskExpiryThreadIntervalSeconds="120"
memoryStoreEvictionPolicy="LRU"
/>


<cache name = "com.cache.dataobject.Part"
maxElementsInMemory="100000"
eternal="true"
diskPersistent="false"
timeToIdleSeconds="0"
timeToLiveSeconds="0"
/>


<cache name = "com.cache.dataobject.Part.parts"
maxElementsInMemory="100000"
eternal="true"
diskPersistent="false"
timeToIdleSeconds="0"
timeToLiveSeconds="0"
/>


</ehcache>

输出日志:

INFO: CALLED PERSIST
FINEST: Cache lookup: com.cache.dataobject.Part#1
FINE: key: com.cache.dataobject.Part#1
FINE: Element for com.cache.dataobject.Part#1 is null
FINEST: Cache miss: com.cache.dataobject.Part#1
FINEST: Cache lookup: com.cache.dataobject.Part#2
FINE: key: com.cache.dataobject.Part#2
FINE: Element for com.cache.dataobject.Part#2 is null
FINEST: Cache miss: com.cache.dataobject.Part#2
FINEST: Cache lookup: com.cache.dataobject.Part#3
FINE: key: com.cache.dataobject.Part#3
FINE: Element for com.cache.dataobject.Part#3 is null
FINEST: Cache miss: com.cache.dataobject.Part#3
FINEST: Invalidating: com.cache.dataobject.Part.parts#1
FINE: key: com.cache.dataobject.Part.parts#1
FINE: Element for com.cache.dataobject.Part.parts#1 is null
FINE: insert into Part (mainPart_id, PART_NAME, id) values (?, ?, ?)
FINE: insert into Part (mainPart_id, PART_NAME, id) values (?, ?, ?)
FINE: insert into Part (mainPart_id, PART_NAME, id) values (?, ?, ?)
FINE: update Part set mainPart_id=? where id=?
FINE: update Part set mainPart_id=? where id=?
FINEST: Inserting: com.cache.dataobject.Part#1
FINE: key: com.cache.dataobject.Part#1
FINE: Element for com.cache.dataobject.Part#1 is null
FINEST: Inserted: com.cache.dataobject.Part#1
FINEST: Inserting: com.cache.dataobject.Part#2
FINE: key: com.cache.dataobject.Part#2
FINE: Element for com.cache.dataobject.Part#2 is null
FINEST: Inserted: com.cache.dataobject.Part#2
FINEST: Inserting: com.cache.dataobject.Part#3
FINE: key: com.cache.dataobject.Part#3
FINE: Element for com.cache.dataobject.Part#3 is null
FINEST: Inserted: com.cache.dataobject.Part#3
FINEST: Releasing: com.cache.dataobject.Part.parts#1
FINE: key: com.cache.dataobject.Part.parts#1

INFO: CALLED MERGE
FINEST: Cache lookup: com.cache.dataobject.Part#1
FINE: key: com.cache.dataobject.Part#1
FINEST: Cache hit: com.cache.dataobject.Part#1
FINEST: Cache lookup: com.cache.dataobject.Part#1
FINE: key: com.cache.dataobject.Part#1
FINEST: Cache hit: com.cache.dataobject.Part#1
FINEST: Cache lookup: com.cache.dataobject.Part#2
FINE: key: com.cache.dataobject.Part#2
FINEST: Cache hit: com.cache.dataobject.Part#2
FINEST: Cache lookup: com.cache.dataobject.Part#2
FINE: key: com.cache.dataobject.Part#2
FINEST: Cache hit: com.cache.dataobject.Part#2
FINEST: Cache lookup: com.cache.dataobject.Part#3
FINE: key: com.cache.dataobject.Part#3
FINEST: Cache hit: com.cache.dataobject.Part#3
FINEST: Cache lookup: com.cache.dataobject.Part#3
FINE: key: com.cache.dataobject.Part#3
FINEST: Cache hit: com.cache.dataobject.Part#3
FINEST: Cache lookup: com.cache.dataobject.Part.parts#1
FINE: key: com.cache.dataobject.Part.parts#1
FINEST: Cached item was locked: com.cache.dataobject.Part.parts#1
FINE: select parts0_.mainPart_id as mainPart3_1_, parts0_.id as id1_, parts0_.id as id18_0_, parts0_.mainPart_id as mainPart3_18_0_, parts0_.PART_NAME as PART2_18_0_ from Part parts0_ where parts0_.mainPart_id=?
FINEST: Caching: com.cache.dataobject.Part.parts#1
FINE: key: com.cache.dataobject.Part.parts#1
FINEST: Cached: com.cache.dataobject.Part.parts#1
FINEST: Invalidating: com.cache.dataobject.Part.parts#2
FINE: key: com.cache.dataobject.Part.parts#2
FINE: Element for com.cache.dataobject.Part.parts#2 is null
FINEST: Invalidating: com.cache.dataobject.Part.parts#3
FINE: key: com.cache.dataobject.Part.parts#3
FINE: Element for com.cache.dataobject.Part.parts#3 is null
FINEST: Invalidating: com.cache.dataobject.Part.parts#1
FINE: key: com.cache.dataobject.Part.parts#1
FINEST: Invalidating: com.cache.dataobject.Part#1
FINE: key: com.cache.dataobject.Part#1
FINE: update Part set mainPart_id=?, PART_NAME=? where id=?
FINE: update Part set mainPart_id=null where mainPart_id=?
FINE: update Part set mainPart_id=null where mainPart_id=?
FINEST: Updating: com.cache.dataobject.Part#1
FINE: key: com.cache.dataobject.Part#1
FINEST: Updated: com.cache.dataobject.Part#1
FINEST: Releasing: com.cache.dataobject.Part.parts#2
FINE: key: com.cache.dataobject.Part.parts#2
FINEST: Releasing: com.cache.dataobject.Part.parts#3
FINE: key: com.cache.dataobject.Part.parts#3
FINEST: Releasing: com.cache.dataobject.Part.parts#1
FINE: key: com.cache.dataobject.Part.parts#1

最佳答案

您可能已经看到了这一点,但是有一个开放的 Hibernate 错误似乎与您的问题有关 - "2nd level cached collections are locked causing a cache miss". .

根据该错误,修复可能是为您的添加/更新调用使用新 session 。您可以获得 EntityManagerFactory 而不是 EntityManager 并为每次调用请求一个新的实体管理器。显然,这是否合适取决于代码的更广泛上下文。

关于java - "Cached Item Was Locked"导致 Hibernate 中的 Select 语句,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/643700/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com