stopping point
This commit is contained in:
parent
54caf644a2
commit
f2d926a3f8
26 changed files with 943 additions and 220 deletions
26
BaseIntegrationTest.java
Normal file
26
BaseIntegrationTest.java
Normal file
|
@ -0,0 +1,26 @@
|
|||
package com.example.crud;
|
||||
|
||||
import example.domain.Event;
|
||||
import org.junit.Before;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.cassandra.core.cql.CqlIdentifier;
|
||||
import org.springframework.data.cassandra.core.CassandraAdminOperations;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration(classes = CassandraConfiguration.class)
|
||||
public abstract class BaseIntegrationTest {
|
||||
|
||||
@Autowired
|
||||
private CassandraAdminOperations adminTemplate;
|
||||
|
||||
@Before
|
||||
public void resetKeySpace() {
|
||||
adminTemplate.dropTable(CqlIdentifier.cqlId("event"));
|
||||
adminTemplate.createTable(true, CqlIdentifier.cqlId("event"), Event.class, new HashMap<String, Object>());
|
||||
}
|
||||
}
|
219
CRUDTest.java
Normal file
219
CRUDTest.java
Normal file
|
@ -0,0 +1,219 @@
|
|||
package com.example.crud.entities;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import lombok.extern.java.Log;
|
||||
import org.junit.*;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* The Class CRUDTest.
|
||||
*/
|
||||
public class CRUDTest
|
||||
{
|
||||
private Log log = LogFactory.getLog(getClass().getName());
|
||||
|
||||
/** The Constant PU. */
|
||||
private static final String PU = "cassandra_pu";
|
||||
|
||||
/** The emf. */
|
||||
private static EntityManagerFactory emf;
|
||||
|
||||
/** The em. */
|
||||
private EntityManager em;
|
||||
|
||||
/**
|
||||
* Sets the up before class.
|
||||
*
|
||||
* @throws Exception
|
||||
* the exception
|
||||
*/
|
||||
@BeforeClass
|
||||
public static void SetUpBeforeClass() throws Exception
|
||||
{
|
||||
Map propertyMap = new HashMap();
|
||||
//propertyMap.put(PersistenceProperties.KUNDERA_DDL_AUTO_PREPARE, "create");
|
||||
propertyMap.put("kundera.batch.size", "5");
|
||||
propertyMap.put(CassandraConstants.CQL_VERSION, CassandraConstants.CQL_VERSION_3_0);
|
||||
emf = Persistence.createEntityManagerFactory(PU, propertyMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the up.
|
||||
*
|
||||
* @throws Exception
|
||||
* the exception
|
||||
*/
|
||||
@Before
|
||||
public void setUp() throws Exception
|
||||
{
|
||||
em = emf.createEntityManager();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test crud operations.
|
||||
*
|
||||
* @throws Exception
|
||||
* the exception
|
||||
*/
|
||||
@Test
|
||||
public void testCRUDOperations() throws Exception
|
||||
{
|
||||
testInsert();
|
||||
testMerge();
|
||||
testUpdate();
|
||||
testCache();
|
||||
testTransaction();
|
||||
testBatch();
|
||||
testQueryBuilder();
|
||||
testRemove();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test insert.
|
||||
*
|
||||
* @throws Exception
|
||||
* the exception
|
||||
*/
|
||||
private void testInsert() throws Exception
|
||||
{
|
||||
Person p = new Person();
|
||||
p.setPersonId("101");
|
||||
p.setPersonFirstName("James");
|
||||
p.setPersonLastName("Bond");
|
||||
p.setAge(24);
|
||||
//p.addEmail("007@mi6.gov");
|
||||
em.persist(p);
|
||||
em.flush();
|
||||
|
||||
Person person = em.find(Person.class, "101");
|
||||
Assert.assertNotNull(person);
|
||||
Assert.assertEquals("101", person.getPersonId());
|
||||
Assert.assertEquals("James Bond", person.getPersonName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Test merge.
|
||||
*/
|
||||
private void testMerge()
|
||||
{
|
||||
Person person = em.find(Person.class, "101");
|
||||
person.setPersonLastName("Blond");
|
||||
//person.addEmail("jamesbond@gmail.com");
|
||||
person = em.merge(person);
|
||||
em.flush();
|
||||
|
||||
Person p2 = em.find(Person.class, "101");
|
||||
Assert.assertEquals("Blond", p2.getPersonLastName());
|
||||
}
|
||||
|
||||
private void testCache() {
|
||||
Cache cache = emf.getCache();
|
||||
cache.evictAll();
|
||||
log.info("Person in Cache: " + cache.contains(Person.class, "101"));
|
||||
Person person = em.find(Person.class, "101");
|
||||
log.info("Person in Cache: " + cache.contains(Person.class, person.getPersonId()));
|
||||
cache.evictAll();
|
||||
log.info("Person in Cache: " + cache.contains(Person.class, person.getPersonId()));
|
||||
}
|
||||
|
||||
private void testUpdate()
|
||||
{
|
||||
Person person = em.find(Person.class, "101");
|
||||
/*
|
||||
// In Query set Paramater.
|
||||
queryString = "Update PersonCassandra p SET p.personName = 'Kuldeep' WHERE p.personId IN :idList";
|
||||
|
||||
List<String> id = new ArrayList<String>();
|
||||
id.add("1");
|
||||
id.add("2");
|
||||
id.add("3");
|
||||
|
||||
cqlQuery = parseAndCreateUpdateQuery(kunderaQuery, emf, em, pu, PersonCassandra.class, Integer.MAX_VALUE);
|
||||
KunderaQuery kunderaQuery = getQueryObject(queryString, emf);
|
||||
kunderaQuery.setParameter("idList", id);
|
||||
|
||||
PersistenceDelegator pd = getPersistenceDelegator(em, getpd);
|
||||
EntityManagerFactoryImpl.KunderaMetadata kunderaMetadata = ((EntityManagerFactoryImpl) emf).getKunderaMetadataInstance();
|
||||
|
||||
CassQuery query = new CassQuery(kunderaQuery, pd, kunderaMetadata);
|
||||
query.setMaxResults(maxResult);
|
||||
if(ttl != null)
|
||||
{
|
||||
query.applyTTL(ttl);
|
||||
}
|
||||
|
||||
String cqlQuery = query.createUpdateQuery(kunderaQuery);
|
||||
return cqlQuery;
|
||||
*/
|
||||
person.setPersonFirstName("Jim");
|
||||
em.flush();
|
||||
}
|
||||
|
||||
private void testTransaction()
|
||||
{
|
||||
EntityTransaction txn = em.getTransaction();
|
||||
txn.begin();
|
||||
Person person = new Person();
|
||||
person.setPersonFirstName("Fred");
|
||||
person.setPersonLastName("Johnson");
|
||||
person.setAge(22);
|
||||
em.persist(person);
|
||||
txn.commit();
|
||||
}
|
||||
|
||||
private void testBatch()
|
||||
{
|
||||
}
|
||||
|
||||
private void testQueryBuilder()
|
||||
{
|
||||
String table = em.getMetamodel().entity(Person.class).getName();
|
||||
Select q = QueryBuilder.select().all().from(table);
|
||||
Query query = em.createQuery(q.getQueryString());
|
||||
query.getResultList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test remove.
|
||||
*/
|
||||
private void testRemove()
|
||||
{
|
||||
Person p = em.find(Person.class, "101");
|
||||
em.remove(p);
|
||||
|
||||
Person p1 = em.find(Person.class, "101");
|
||||
Assert.assertNull(p1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tear down.
|
||||
*
|
||||
* @throws Exception
|
||||
* the exception
|
||||
*/
|
||||
@After
|
||||
public void tearDown() throws Exception
|
||||
{
|
||||
em.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Tear down after class.
|
||||
*
|
||||
* @throws Exception
|
||||
* the exception
|
||||
*/
|
||||
@AfterClass
|
||||
public static void tearDownAfterClass() throws Exception
|
||||
{
|
||||
if (emf != null)
|
||||
{
|
||||
emf.close();
|
||||
emf = null;
|
||||
}
|
||||
}
|
||||
}
|
39
CacheProperties.java
Normal file
39
CacheProperties.java
Normal file
|
@ -0,0 +1,39 @@
|
|||
package com.example.crud;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.PropertySource;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@Component
|
||||
@PropertySource("application.properties")
|
||||
public class CacheProperties {
|
||||
@Value("${cache.enabled}")
|
||||
private boolean enabled;
|
||||
|
||||
@Value("${cache.names}")
|
||||
private String[] cacheNames;
|
||||
|
||||
public List<String> getCacheNameList() {
|
||||
return Collections.unmodifiableList(Arrays.asList(cacheNames));
|
||||
}
|
||||
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
public void setEnabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
public String[] getCacheNames() {
|
||||
return cacheNames;
|
||||
}
|
||||
|
||||
public void setCacheNames(String[] cacheNames) {
|
||||
this.cacheNames = cacheNames;
|
||||
}
|
||||
}
|
43
CassandraConfiguration.java
Normal file
43
CassandraConfiguration.java
Normal file
|
@ -0,0 +1,43 @@
|
|||
package com.example.crud;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.PropertySource;
|
||||
import org.springframework.core.env.Environment;
|
||||
import org.springframework.data.cassandra.config.CassandraClusterFactoryBean;
|
||||
import org.springframework.data.cassandra.config.java.AbstractCassandraConfiguration;
|
||||
import org.springframework.data.cassandra.mapping.BasicCassandraMappingContext;
|
||||
import org.springframework.data.cassandra.mapping.CassandraMappingContext;
|
||||
import org.springframework.data.cassandra.repository.config.EnableCassandraRepositories;
|
||||
|
||||
@Configuration
|
||||
@PropertySource(value = {"classpath:cassandra.properties"})
|
||||
@EnableCassandraRepositories(basePackages = {"example"})
|
||||
public class CassandraConfiguration extends AbstractCassandraConfiguration {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(CassandraConfiguration.class);
|
||||
|
||||
@Autowired
|
||||
private Environment environment;
|
||||
|
||||
@Bean
|
||||
public CassandraClusterFactoryBean cluster() {
|
||||
CassandraClusterFactoryBean cluster = new CassandraClusterFactoryBean();
|
||||
cluster.setContactPoints(environment.getProperty("cassandra.contactpoints"));
|
||||
cluster.setPort(Integer.parseInt(environment.getProperty("cassandra.port")));
|
||||
return cluster;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getKeyspaceName() {
|
||||
return environment.getProperty("cassandra.keyspace");
|
||||
}
|
||||
|
||||
@Bean
|
||||
public CassandraMappingContext cassandraMapping() throws ClassNotFoundException {
|
||||
return new BasicCassandraMappingContext();
|
||||
}
|
||||
}
|
42
CassandraTemplateIntegrationTest.java
Normal file
42
CassandraTemplateIntegrationTest.java
Normal file
|
@ -0,0 +1,42 @@
|
|||
pacakage com.example.crud;
|
||||
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.datastax.driver.core.utils.UUIDs;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import example.domain.Event;
|
||||
import org.hamcrest.core.IsEqual;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.cassandra.core.CassandraOperations;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.hasItem;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class CassandraTemplateIntegrationTest extends BaseIntegrationTest {
|
||||
|
||||
public static final String TIME_BUCKET = "2014-01-01";
|
||||
|
||||
@Autowired
|
||||
private CassandraOperations cassandraTemplate;
|
||||
|
||||
@Test
|
||||
public void supportsPojoToCqlMappings() {
|
||||
Event event = new Event(UUIDs.timeBased(), "type1", TIME_BUCKET, ImmutableSet.of("tag1", "tag3"));
|
||||
cassandraTemplate.insert(event);
|
||||
|
||||
Select select = QueryBuilder.select().from("event").where(QueryBuilder.eq("type", "type1")).and(QueryBuilder.eq("bucket", TIME_BUCKET)).limit(10);
|
||||
|
||||
Event retrievedEvent = cassandraTemplate.selectOne(select, Event.class);
|
||||
|
||||
assertThat(retrievedEvent, IsEqual.equalTo(event));
|
||||
|
||||
List<Event> retrievedEvents = cassandraTemplate.select(select, Event.class);
|
||||
|
||||
assertThat(retrievedEvents.size(), is(1));
|
||||
assertThat(retrievedEvents, hasItem(event));
|
||||
}
|
||||
}
|
56
CqlTemplateIntegrationTest.java
Normal file
56
CqlTemplateIntegrationTest.java
Normal file
|
@ -0,0 +1,56 @@
|
|||
package com.example.crud;
|
||||
|
||||
import com.datastax.driver.core.PreparedStatement;
|
||||
import com.datastax.driver.core.ResultSet;
|
||||
import com.datastax.driver.core.Statement;
|
||||
import com.datastax.driver.core.querybuilder.Insert;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.datastax.driver.core.utils.UUIDs;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.hamcrest.core.Is;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.cassandra.core.CqlOperations;
|
||||
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
public class CqlTemplateIntegrationTest extends BaseIntegrationTest {
|
||||
|
||||
public static final String TIME_BUCKET = "2014-01-01";
|
||||
|
||||
@Autowired
|
||||
private CqlOperations cqlTemplate;
|
||||
|
||||
@Test
|
||||
public void allowsExecutingCqlStatements() {
|
||||
insertEventUsingCqlString();
|
||||
insertEventUsingStatementBuildWithQueryBuilder();
|
||||
insertEventUsingPreparedStatement();
|
||||
|
||||
ResultSet resultSet1 = cqlTemplate.query("select * from event where type='type2' and bucket='" + TIME_BUCKET + "'");
|
||||
|
||||
assertThat(resultSet1.all().size(), Is.is(2));
|
||||
|
||||
Select select = QueryBuilder.select().from("event").where(QueryBuilder.eq("type", "type1")).and(QueryBuilder.eq("bucket", TIME_BUCKET)).limit(10);
|
||||
ResultSet resultSet2 = cqlTemplate.query(select);
|
||||
|
||||
assertThat(resultSet2.all().size(), Is.is(1));
|
||||
}
|
||||
|
||||
private void insertEventUsingCqlString() {
|
||||
cqlTemplate.execute("insert into event (id, type, bucket, tags) values (" + UUIDs.timeBased() + ", 'type1', '" + TIME_BUCKET + "', {'tag2', 'tag3'})");
|
||||
}
|
||||
|
||||
private void insertEventUsingStatementBuildWithQueryBuilder() {
|
||||
Insert insertStatement = QueryBuilder.insertInto("event").value("id", UUIDs.timeBased()).value("type", "type2")
|
||||
.value("bucket", TIME_BUCKET).value("tags", ImmutableSet.of("tag1"));
|
||||
cqlTemplate.execute(insertStatement);
|
||||
}
|
||||
|
||||
private void insertEventUsingPreparedStatement() {
|
||||
PreparedStatement preparedStatement = cqlTemplate.getSession().prepare("insert into event (id, type, bucket, tags) values (?, ?, ?, ?)");
|
||||
Statement insertStatement = preparedStatement.bind(UUIDs.timeBased(), "type2", TIME_BUCKET, ImmutableSet.of("tag1", "tag2"));
|
||||
cqlTemplate.execute(insertStatement);
|
||||
}
|
||||
}
|
3
README
3
README
|
@ -17,3 +17,6 @@ netstat -nr | grep 239.9.9.9
|
|||
ping -t 1 -c 2 239.9.9.9
|
||||
sudo tcpdump -vvv -ni en0 host 239.9.9.9
|
||||
sudo route -v delete -inet 239.9.9.9
|
||||
|
||||
mvn clean versions:use-latest-versions scm:checkin deploy -Dmessage="update versions" -DperformRelease=true
|
||||
http://www.mojohaus.org/versions-maven-plugin/index.html
|
||||
|
|
37
RedisCacheConfiguration.java
Normal file
37
RedisCacheConfiguration.java
Normal file
|
@ -0,0 +1,37 @@
|
|||
@Configuration
|
||||
@EnableCaching
|
||||
public class CacheConfig {
|
||||
|
||||
@Bean
|
||||
public JedisConnectionFactory redisConnectionFactory() {
|
||||
return new JedisConnectionFactory();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public RedisTemplate<String, String> redisTemplate(RedisConnectionFactory cf) {
|
||||
RedisTemplate<String, String> redisTemplate = new RedisTemplate<String, String>();
|
||||
redisTemplate.setConnectionFactory(cf);
|
||||
return redisTemplate;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public CacheManager cacheManager(RedisTemplate redisTemplate) {
|
||||
return new RedisCacheManager(redisTemplate);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KeyGenerator keyGenerator() {
|
||||
return new KeyGenerator() {
|
||||
@Override
|
||||
public Object generate(Object o, Method method, Object... params) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(o.getClass().getName());
|
||||
sb.append(method.getName());
|
||||
for (Object param : params) {
|
||||
sb.append(param.toString());
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
0
TESTS
Normal file
0
TESTS
Normal file
5
application.properties
Normal file
5
application.properties
Normal file
|
@ -0,0 +1,5 @@
|
|||
################
|
||||
# Caching
|
||||
################
|
||||
cache.enabled = true
|
||||
cache.names = crudCache
|
31
applicationContext.xml
Normal file
31
applicationContext.xml
Normal file
|
@ -0,0 +1,31 @@
|
|||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:metrics="http://www.ryantenney.com/schema/metrics" xmlns:aop="http://www.springframework.org/schema/aop"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans
|
||||
http://www.springframework.org/schema/beans/spring-beans.xsd
|
||||
http://www.ryantenney.com/schema/metrics
|
||||
http://www.ryantenney.com/schema/metrics/metrics.xsd http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd">
|
||||
|
||||
<!-- Creates a MetricRegistry bean -->
|
||||
<metrics:metric-registry id="metricRegistry" />
|
||||
|
||||
<!-- Creates a HealthCheckRegistry bean (Optional) -->
|
||||
<metrics:health-check-registry id="health" />
|
||||
|
||||
<!-- Registers BeanPostProcessors with Spring which proxy beans and capture metrics -->
|
||||
<!-- Include this once per context (once in the parent context and in any subcontexts) -->
|
||||
<metrics:annotation-driven metric-registry="metricRegistry" />
|
||||
|
||||
<!-- Example reporter definiton. Supported reporters include jmx, slf4j, graphite, and others. -->
|
||||
<!-- Reporters should be defined only once, preferably in the parent context -->
|
||||
<metrics:reporter type="console" metric-registry="metricRegistry" period="1m" />
|
||||
|
||||
<!-- Register metric beans (Optional) -->
|
||||
<!-- The metrics in this example require metrics-jvm -->
|
||||
<metrics:register metric-registry="metricRegistry">
|
||||
<bean metrics:name="jvm.gc" class="com.codahale.metrics.jvm.GarbageCollectorMetricSet" />
|
||||
<bean metrics:name="jvm.memory" class="com.codahale.metrics.jvm.MemoryUsageGaugeSet" />
|
||||
<bean metrics:name="jvm.thread-states" class="com.codahale.metrics.jvm.ThreadStatesGaugeSet" />
|
||||
<bean metrics:name="jvm.fd.usage" class="com.codahale.metrics.jvm.FileDescriptorRatioGauge" />
|
||||
</metrics:register>
|
||||
</beans>
|
76
crud.iml
76
crud.iml
|
@ -15,13 +15,22 @@
|
|||
<facet type="Spring" name="Spring">
|
||||
<configuration>
|
||||
<fileset id="fileset" name="Spring Application Context" removed="false">
|
||||
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/ApplicationConfig.java</file>
|
||||
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/CacheConfiguration.java</file>
|
||||
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/EhCacheConfiguration.java</file>
|
||||
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/metrics/MetricsConfiguration.java</file>
|
||||
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/metrics/SpringConfiguringClass.java</file>
|
||||
<file>file://$MODULE_DIR$/applicationContext.xml</file>
|
||||
</fileset>
|
||||
<fileset id="fileset2" name="Spring Application Context (2)" removed="false">
|
||||
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/ApplicationConfiguration.java</file>
|
||||
</fileset>
|
||||
<fileset id="fileset3" name="Spring Application Context (3)" removed="false">
|
||||
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/ApplicationConfiguration.java</file>
|
||||
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/CacheConfiguration.java</file>
|
||||
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/EhCacheConfiguration.java</file>
|
||||
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/metrics/MetricsConfiguration.java</file>
|
||||
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/metrics/SpringConfiguringClass.java</file>
|
||||
</fileset>
|
||||
</configuration>
|
||||
</facet>
|
||||
<facet type="AspectJ" name="AspectJ">
|
||||
|
@ -41,16 +50,24 @@
|
|||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-core:5.1.0-m3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-api-jpa:5.1.0-m3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:javax.persistence:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: ch.qos.logback:logback-classic:1.0.13" level="project" />
|
||||
<orderEntry type="library" name="Maven: ch.qos.logback:logback-core:1.0.13" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:javax.persistence:2.2.0-m3" level="project" />
|
||||
<orderEntry type="library" name="Maven: ch.qos.logback:logback-classic:1.2.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: ch.qos.logback:logback-core:1.2.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.25" level="project" />
|
||||
<orderEntry type="library" name="Maven: ch.qos.logback:logback-access:1.2.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.slf4j:jcl-over-slf4j:1.7.25" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.slf4j:log4j-over-slf4j:1.7.7" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.slf4j:jcl-over-slf4j:1.8.0-alpha2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.slf4j:log4j-over-slf4j:1.8.0-alpha2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-mongodb:5.1.0-m3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.mongodb:mongo-java-driver:3.4.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-cassandra:5.1.0-m3" level="project" />
|
||||
<orderEntry type="module-library">
|
||||
<library name="Maven: org.datanucleus:datanucleus-cassandra:5.1.0-m4-SNAPSHOT">
|
||||
<CLASSES>
|
||||
<root url="jar://$MODULE_DIR$/lib/datanucleus-cassandra-5.1.0-m4-SNAPSHOT.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC />
|
||||
<SOURCES />
|
||||
</library>
|
||||
</orderEntry>
|
||||
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-core:3.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.netty:netty-handler:4.0.44.Final" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.netty:netty-buffer:4.0.44.Final" level="project" />
|
||||
|
@ -69,44 +86,45 @@
|
|||
<orderEntry type="library" name="Maven: com.github.jnr:jnr-x86asm:1.0.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.github.jnr:jnr-posix:3.0.27" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.github.jnr:jnr-constants:0.9.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-extras:3.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.datastax.cassandra:cassandra-driver-mapping:3.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.hdrhistogram:HdrHistogram:2.1.9" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-cache:5.0.0-release" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-guava:5.0.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-jodatime:5.0.0-release" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.cache:cache-api:1.0.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-cache:5.1.0-m1" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.cache:cache-api:1.0.0-PFD" level="project" />
|
||||
<orderEntry type="library" name="Maven: net.sf.ehcache:ehcache-core:2.6.11" level="project" />
|
||||
<orderEntry type="library" name="Maven: net.sf.ehcache:ehcache-jgroupsreplication:1.7" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.jgroups:jgroups:3.1.0.Final" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-context:5.0.0.M5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-aop:5.0.0.M5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-beans:5.0.0.M5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-core:5.0.0.M5" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-logging:commons-logging:1.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-expression:5.0.0.M5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-context-support:5.0.0.M5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework.data:spring-data-jpa:2.0.0.M4" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework.data:spring-data-commons:2.0.0.M4" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.aspectj:aspectjrt:1.8.10" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework.data:spring-data-keyvalue:2.0.0.M4" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-orm:5.0.0.M5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-jdbc:5.0.0.M5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-tx:5.0.0.M5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-context:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-aop:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-beans:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-core:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-jcl:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-expression:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-context-support:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework.data:spring-data-jpa:1.6.6.RELEASE" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework.data:spring-data-commons:1.8.6.RELEASE" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.aspectj:aspectjrt:1.8.6" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework.data:spring-data-keyvalue:unknown" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-orm:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-jdbc:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-tx:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework.security:spring-security-web:5.0.0.M2" level="project" />
|
||||
<orderEntry type="library" name="Maven: aopalliance:aopalliance:1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework.security:spring-security-core:5.0.0.M2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-web:5.0.0.M5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-aspects:5.0.0.M5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-web:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.springframework:spring-aspects:5.0.0.RC2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.aspectj:aspectjweaver:1.9.0.BETA-5" level="project" />
|
||||
<orderEntry type="library" name="Maven: joda-time:joda-time:2.9.9" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.projectlombok:lombok:1.16.16" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-core:3.2.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.codahale.metrics:metrics-annotation:3.2.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-logback:3.2.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-jvm:3.2.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-healthchecks:3.2.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-graphite:3.2.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-ehcache:3.2.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: net.sf.ehcache:ehcache:2.8.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.ryantenney.metrics:metrics-spring:3.1.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-annotation:3.1.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.riemann:metrics3-riemann-reporter:0.4.5" level="project" />
|
||||
|
|
50
pom.xml
50
pom.xml
|
@ -17,7 +17,7 @@
|
|||
<org.datanucleus.version>[5.1.0-m3, 5.9)</org.datanucleus.version>
|
||||
<aspectj.version>1.8.10</aspectj.version>
|
||||
<metrics.version>3.2.2</metrics.version>
|
||||
<spring.version>5.0.0.M5</spring.version>
|
||||
<spring.version>5.0.0.RC2</spring.version>
|
||||
<spring-data.version>Kay-M4</spring-data.version>
|
||||
</properties>
|
||||
|
||||
|
@ -46,7 +46,7 @@
|
|||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-framework-bom</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
<version>5.0.0.RC2</version>
|
||||
<scope>import</scope>
|
||||
<type>pom</type>
|
||||
</dependency>
|
||||
|
@ -54,7 +54,7 @@
|
|||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-releasetrain</artifactId>
|
||||
<version>${spring-data.version}</version>
|
||||
<version>1.4.6.RELEASE</version>
|
||||
<scope>import</scope>
|
||||
<type>pom</type>
|
||||
</dependency>
|
||||
|
@ -77,13 +77,13 @@
|
|||
<dependency>
|
||||
<groupId>org.datanucleus</groupId>
|
||||
<artifactId>javax.persistence</artifactId>
|
||||
<version>2.1.0</version>
|
||||
<version>2.2.0-m3</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<version>1.0.13</version>
|
||||
<version>1.2.3</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
|
@ -93,12 +93,12 @@
|
|||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jcl-over-slf4j</artifactId>
|
||||
<version>1.7.25</version>
|
||||
<version>1.8.0-alpha2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>log4j-over-slf4j</artifactId>
|
||||
<version>1.7.7</version>
|
||||
<version>1.8.0-alpha2</version>
|
||||
</dependency>
|
||||
|
||||
<!-- MongoDB -->
|
||||
|
@ -117,24 +117,41 @@
|
|||
<dependency>
|
||||
<groupId>org.datanucleus</groupId>
|
||||
<artifactId>datanucleus-cassandra</artifactId>
|
||||
<version>${org.datanucleus.version}</version>
|
||||
<!-- version>${org.datanucleus.version}</version -->
|
||||
<type>jar</type>
|
||||
<scope>system</scope>
|
||||
<version>5.1.0-m4-SNAPSHOT</version>
|
||||
<systemPath>${project.basedir}/lib/datanucleus-cassandra-5.1.0-m4-SNAPSHOT.jar</systemPath>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.datastax.cassandra</groupId>
|
||||
<artifactId>cassandra-driver-core</artifactId>
|
||||
<version>3.2.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.datastax.cassandra</groupId>
|
||||
<artifactId>cassandra-driver-extras</artifactId>
|
||||
<version>3.2.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.datastax.cassandra</groupId>
|
||||
<artifactId>cassandra-driver-mapping</artifactId>
|
||||
<version>3.2.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hdrhistogram</groupId>
|
||||
<artifactId>HdrHistogram</artifactId>
|
||||
<version>[2.1.8,)</version>
|
||||
<version>2.1.9</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.datanucleus</groupId>
|
||||
<artifactId>datanucleus-cache</artifactId>
|
||||
<version>5.0.0-release</version>
|
||||
<version>5.1.0-m1</version>
|
||||
<!--
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
|
@ -145,6 +162,7 @@
|
|||
-->
|
||||
</dependency>
|
||||
|
||||
<!--
|
||||
<dependency>
|
||||
<groupId>org.datanucleus</groupId>
|
||||
<artifactId>datanucleus-guava</artifactId>
|
||||
|
@ -157,7 +175,6 @@
|
|||
<version>5.0.0-release</version>
|
||||
</dependency>
|
||||
|
||||
<!--
|
||||
<dependency>
|
||||
<groupId>org.datanucleus</groupId>
|
||||
<artifactId>datanucleus-java8</artifactId>
|
||||
|
@ -168,7 +185,7 @@
|
|||
<dependency>
|
||||
<groupId>javax.cache</groupId>
|
||||
<artifactId>cache-api</artifactId>
|
||||
<version>1.0.0</version>
|
||||
<version>1.0.0-PFD</version>
|
||||
</dependency>
|
||||
|
||||
<!--
|
||||
|
@ -309,6 +326,11 @@
|
|||
<version>${metrics.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.dropwizard.metrics</groupId>
|
||||
<artifactId>metrics-ehcache</artifactId>
|
||||
<version>${metrics.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.ryantenney.metrics</groupId>
|
||||
|
@ -431,7 +453,7 @@
|
|||
</dependency>
|
||||
</dependencies>
|
||||
</plugin>
|
||||
<!-- DataNucleus compile time weaving
|
||||
<!-- DataNucleus compile time weaving -->
|
||||
<plugin>
|
||||
<groupId>org.datanucleus</groupId>
|
||||
<artifactId>datanucleus-maven-plugin</artifactId>
|
||||
|
@ -451,7 +473,7 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
-->
|
||||
|
||||
<!-- -->
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package com.example.crud;
|
||||
|
||||
import com.codahale.metrics.ehcache.InstrumentedEhcache;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
package com.example.crud;
|
||||
|
||||
import com.codahale.metrics.ehcache.InstrumentedEhcache;
|
||||
import com.datastax.driver.core.*;
|
||||
import com.datastax.driver.core.querybuilder.QueryBuilder;
|
||||
import com.datastax.driver.core.querybuilder.Select;
|
||||
import com.example.crud.entities.*;
|
||||
import com.example.crud.repositories.InventoryRepository;
|
||||
import com.example.crud.repositories.PersonRepository;
|
||||
|
@ -16,67 +20,113 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
// TODO:
|
||||
// * LOCAL_QUORUM
|
||||
// * compound primary keys
|
||||
// * pillar for DDL
|
||||
// * metrics
|
||||
// * com.datastax.driver.core.Cluster.builder().withQueryOptions(new QueryOptions().setConsistencyLevel(ConsistencyLevel.QUORUM))
|
||||
// * https://github.com/brndnmtthws/metrics-cassandra (c* as a sink for metrics)
|
||||
// * https://github.com/addthis/metrics-reporter-config
|
||||
|
||||
|
||||
/**
|
||||
* Controlling application for the DataNucleus Tutorial using JPA.
|
||||
* Uses the "persistence-unit" called "Tutorial".
|
||||
*/
|
||||
public class Main {
|
||||
Logger log = LoggerFactory.getLogger(getClass().getName());
|
||||
|
||||
EntityManagerFactory cassandraEntityManagerFactory;
|
||||
EntityManagerFactory mongoEntityManagerFactory;
|
||||
Cluster cluster;
|
||||
|
||||
String personId;
|
||||
|
||||
public static void main(String args[]) {
|
||||
Logger log = LoggerFactory.getLogger(Main.class);//getClass().getName());
|
||||
Main main = new Main();
|
||||
}
|
||||
|
||||
public Main() {
|
||||
System.setProperty("DEBUG.MONGO", "true"); // Enable MongoDB logging in general
|
||||
System.setProperty("DB.TRACE", "true"); // Enable DB operation tracing
|
||||
|
||||
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
|
||||
ctx.scan("com.example");
|
||||
ctx.register(ApplicationConfiguration.class);
|
||||
ctx.refresh();
|
||||
|
||||
// Enable MongoDB logging in general
|
||||
System.setProperty("DEBUG.MONGO", "true");
|
||||
|
||||
// Enable DB operation tracing
|
||||
System.setProperty("DB.TRACE", "true");
|
||||
|
||||
// Create an EntityManagerFactory for this "persistence-unit"
|
||||
// See the file "META-INF/persistence.xml"
|
||||
EntityManagerFactory cassandraEntityManagerFactory = Persistence.createEntityManagerFactory("crud");
|
||||
EntityManagerFactory mongoEntityManagerFactory = Persistence.createEntityManagerFactory("mongo");
|
||||
|
||||
// TODO:
|
||||
// * LOCAL_QUORUM
|
||||
// * compound primary keys
|
||||
// * pillar for DDL
|
||||
// * metrics
|
||||
// * com.datastax.driver.core.Cluster.builder().withQueryOptions(new QueryOptions().setConsistencyLevel(ConsistencyLevel.QUORUM))
|
||||
// * https://github.com/brndnmtthws/metrics-cassandra (c* as a sink for metrics)
|
||||
// * https://github.com/addthis/metrics-reporter-config
|
||||
|
||||
EntityManager em;
|
||||
EntityTransaction tx;
|
||||
JpaRepositoryFactory factory;
|
||||
cassandraEntityManagerFactory = Persistence.createEntityManagerFactory("crud");
|
||||
mongoEntityManagerFactory = Persistence.createEntityManagerFactory("mongo");
|
||||
|
||||
cluster = ctx.getBean(Cluster.class);
|
||||
/*
|
||||
Configuration configuration = cluster.getConfiguration();
|
||||
Metadata metadata = cluster.getMetadata();
|
||||
log.debug(configuration);
|
||||
log.debug(metadata); */
|
||||
/*
|
||||
Session session = ctx.getBean(Session.class);
|
||||
Select s = QueryBuilder.select().all().from("kc", "inventory");
|
||||
s.setConsistencyLevel(ConsistencyLevel.LOCAL_QUORUM);
|
||||
ResultSet rs = session.execute(s);
|
||||
log.debug(rs.toString());
|
||||
*/
|
||||
/*
|
||||
//org.datanucleus.api.jpa.JPAEntityTransaction tx = (org.datanucleus.api.jpa.JPAEntityTransaction)pm.currentTransaction();
|
||||
//tx.setOption("transaction.isolation", 2);
|
||||
|
||||
// Add a person to MongoDB
|
||||
em = mongoEntityManagerFactory.createEntityManager();
|
||||
Person person;
|
||||
/*
|
||||
factory = new JpaRepositoryFactory(em);
|
||||
PersonRepository repository = factory.getRepository(PersonRepository.class);
|
||||
person = new Person();
|
||||
person.setPersonFirstName("James");
|
||||
person.setPersonLastName("Bond");
|
||||
person.setAge(42);
|
||||
repository.save(person);
|
||||
*/
|
||||
|
||||
this.a().b().c().e().f().g().z();
|
||||
cassandraEntityManagerFactory.close();
|
||||
}
|
||||
|
||||
public Main a() {
|
||||
EntityManager em;
|
||||
EntityTransaction tx;
|
||||
|
||||
em = cassandraEntityManagerFactory.createEntityManager();
|
||||
JpaRepositoryFactory factory = new JpaRepositoryFactory(em);
|
||||
tx = em.getTransaction();
|
||||
try {
|
||||
tx.begin();
|
||||
person = new Person();
|
||||
InventoryRepository repository = factory.getRepository(InventoryRepository.class);
|
||||
Inventory inventory = repository.findByName("My Inventory");
|
||||
if (inventory == null) {
|
||||
inventory = new Inventory("My Inventory");
|
||||
}
|
||||
log.debug("SpringData/JPA: " + inventory.toString());
|
||||
inventory.setDescription("This is my updated description.");
|
||||
tx.rollback();
|
||||
}
|
||||
catch (Exception e) {
|
||||
log.error(">> Exception in bulk delete of data", e);
|
||||
System.err.println("Error in bulk delete of data : " + e.getMessage());
|
||||
} finally {
|
||||
if (tx.isActive()) {
|
||||
tx.rollback();
|
||||
}
|
||||
em.close();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public Main b() {
|
||||
EntityManager em;
|
||||
EntityTransaction tx;
|
||||
|
||||
// Add a person to MongoDB
|
||||
em = mongoEntityManagerFactory.createEntityManager();
|
||||
tx = em.getTransaction();
|
||||
try {
|
||||
tx.begin();
|
||||
Person person = new Person();
|
||||
person.setPersonFirstName("James");
|
||||
person.setPersonLastName("Bond");
|
||||
person.setAge(42);
|
||||
personId = person.getPersonId();
|
||||
em.merge(person);
|
||||
|
||||
List<ObjectProvider> objs = ((JPAEntityManager) em).getExecutionContext().getObjectsToBeFlushed();
|
||||
|
@ -90,6 +140,12 @@ public class Main {
|
|||
}
|
||||
em.close(); // This will detach all current managed objects
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public Main c() {
|
||||
EntityManager em;
|
||||
EntityTransaction tx;
|
||||
|
||||
// Persistence of a Product and a Book.
|
||||
em = cassandraEntityManagerFactory.createEntityManager();
|
||||
|
@ -118,7 +174,6 @@ public class Main {
|
|||
catch (Exception e) {
|
||||
log.error(">> Exception persisting data", e);
|
||||
System.err.println("Error persisting data : " + e.getMessage());
|
||||
return;
|
||||
} finally {
|
||||
if (tx.isActive()) {
|
||||
tx.rollback();
|
||||
|
@ -126,7 +181,13 @@ public class Main {
|
|||
em.close();
|
||||
}
|
||||
cassandraEntityManagerFactory.getCache().evictAll();
|
||||
System.out.println("");
|
||||
log.debug("");
|
||||
return this;
|
||||
}
|
||||
|
||||
public Main d() {
|
||||
EntityManager em;
|
||||
EntityTransaction tx;
|
||||
|
||||
// Perform a retrieve of the Inventory and detach it (by closing the EM)
|
||||
em = cassandraEntityManagerFactory.createEntityManager();
|
||||
|
@ -140,19 +201,18 @@ public class Main {
|
|||
// Note : you could achieve the same by either
|
||||
// 1). access the Inventory.products field before commit
|
||||
// 2). set fetch=EAGER for the Inventory.products field
|
||||
System.out.println("Executing find() on Inventory");
|
||||
log.debug("Executing find() on Inventory");
|
||||
EntityGraph allGraph = em.getEntityGraph("allProps");
|
||||
Map hints = new HashMap();
|
||||
hints.put("javax.persistence.loadgraph", allGraph);
|
||||
hints.put("javax.persistence.loadgraph", allGraph); // <- not yet supported, ignore this
|
||||
inv = em.find(Inventory.class, "My Inventory", hints);
|
||||
System.out.println("Retrieved Inventory as " + inv);
|
||||
log.debug("Retrieved Inventory as " + inv);
|
||||
|
||||
tx.commit();
|
||||
}
|
||||
catch (Exception e) {
|
||||
log.error(">> Exception performing find() on data", e);
|
||||
System.err.println("Error performing find() on data : " + e.getMessage());
|
||||
return;
|
||||
} finally {
|
||||
if (tx.isActive()) {
|
||||
tx.rollback();
|
||||
|
@ -160,38 +220,54 @@ public class Main {
|
|||
em.close(); // This will detach all current managed objects
|
||||
}
|
||||
for (Product prod : inv.getProducts()) {
|
||||
System.out.println(">> After Detach : Inventory has a product=" + prod);
|
||||
log.debug(">> After Detach : Inventory has a product=" + prod);
|
||||
}
|
||||
System.out.println("");
|
||||
log.debug("");
|
||||
return this;
|
||||
}
|
||||
|
||||
public Main e() {
|
||||
EntityManager em;
|
||||
EntityTransaction tx;
|
||||
|
||||
// Update a person to MongoDB
|
||||
em = mongoEntityManagerFactory.createEntityManager();
|
||||
tx = em.getTransaction();
|
||||
try {
|
||||
tx.begin();
|
||||
person = em.find(Person.class, person.getPersonId());
|
||||
if (personId == null) {
|
||||
tx.rollback();
|
||||
} else {
|
||||
Person person = em.find(Person.class, personId);
|
||||
person.setPersonLastName("Blunder");
|
||||
person.setAge(43);
|
||||
tx.commit();
|
||||
}
|
||||
} finally {
|
||||
if (tx.isActive()) {
|
||||
tx.rollback();
|
||||
}
|
||||
em.close(); // This will detach all current managed objects
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public Main f() {
|
||||
EntityManager em;
|
||||
EntityTransaction tx;
|
||||
|
||||
// Perform some query operations
|
||||
em = cassandraEntityManagerFactory.createEntityManager();
|
||||
tx = em.getTransaction();
|
||||
try {
|
||||
tx.begin();
|
||||
System.out.println("Executing Query for Products with price below 150.00");
|
||||
log.debug("Executing Query for Products with price below 150.00");
|
||||
Query q = em.createQuery("SELECT p FROM Product p WHERE p.price < 150.00 ORDER BY p.price");
|
||||
List results = q.getResultList();
|
||||
Iterator iter = results.iterator();
|
||||
while (iter.hasNext()) {
|
||||
Object obj = iter.next();
|
||||
System.out.println("> " + obj);
|
||||
log.debug("> " + obj);
|
||||
// Give an example of an update
|
||||
if (obj instanceof Book) {
|
||||
Book b = (Book) obj;
|
||||
|
@ -202,45 +278,27 @@ public class Main {
|
|||
m.setDescription(m.getDescription() + " SPECIAL");
|
||||
}
|
||||
}
|
||||
|
||||
tx.commit();
|
||||
}
|
||||
catch (Exception e) {
|
||||
log.error(">> Exception querying data", e);
|
||||
System.err.println("Error querying data : " + e.getMessage());
|
||||
return;
|
||||
} finally {
|
||||
if (tx.isActive()) {
|
||||
tx.rollback();
|
||||
}
|
||||
em.close();
|
||||
}
|
||||
System.out.println("");
|
||||
|
||||
em = cassandraEntityManagerFactory.createEntityManager();
|
||||
factory = new JpaRepositoryFactory(em);
|
||||
tx = em.getTransaction();
|
||||
try {
|
||||
tx.begin();
|
||||
InventoryRepository repository = factory.getRepository(InventoryRepository.class);
|
||||
Inventory inventory = repository.findByName("My Inventory");
|
||||
System.out.println("SpringData/JPA: " + inventory.toString());
|
||||
inventory.setDescription("This is my updated description.");
|
||||
tx.rollback();
|
||||
}
|
||||
catch (Exception e) {
|
||||
log.error(">> Exception in bulk delete of data", e);
|
||||
System.err.println("Error in bulk delete of data : " + e.getMessage());
|
||||
return;
|
||||
} finally {
|
||||
if (tx.isActive()) {
|
||||
tx.rollback();
|
||||
}
|
||||
em.close();
|
||||
log.debug("");
|
||||
return this;
|
||||
}
|
||||
|
||||
public Main g() {
|
||||
EntityManager em;
|
||||
EntityTransaction tx;
|
||||
|
||||
em = cassandraEntityManagerFactory.createEntityManager();
|
||||
factory = new JpaRepositoryFactory(em);
|
||||
JpaRepositoryFactory factory = new JpaRepositoryFactory(em);
|
||||
tx = em.getTransaction();
|
||||
try {
|
||||
tx.begin();
|
||||
|
@ -253,14 +311,18 @@ public class Main {
|
|||
catch (Exception e) {
|
||||
log.error(">> Exception in bulk delete of data", e);
|
||||
System.err.println("Error in bulk delete of data : " + e.getMessage());
|
||||
return;
|
||||
} finally {
|
||||
if (tx.isActive()) {
|
||||
tx.rollback();
|
||||
}
|
||||
em.close();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public Main z() {
|
||||
EntityManager em;
|
||||
EntityTransaction tx;
|
||||
|
||||
// Clean out the database
|
||||
cassandraEntityManagerFactory.getCache().evictAll();
|
||||
|
@ -269,17 +331,17 @@ public class Main {
|
|||
try {
|
||||
tx.begin();
|
||||
|
||||
System.out.println("Deleting all products from persistence");
|
||||
inv = (Inventory) em.find(Inventory.class, "My Inventory");
|
||||
log.debug("Deleting all products from persistence");
|
||||
Inventory inv = (Inventory) em.find(Inventory.class, "My Inventory");
|
||||
|
||||
System.out.println("Clearing out Inventory");
|
||||
log.debug("Clearing out Inventory");
|
||||
inv.clearProducts();
|
||||
em.flush();
|
||||
|
||||
System.out.println("Deleting Inventory");
|
||||
log.debug("Deleting Inventory");
|
||||
em.remove(inv);
|
||||
|
||||
System.out.println("Deleting all products from persistence");
|
||||
log.debug("Deleting all products from persistence");
|
||||
Query q = em.createQuery("SELECT p FROM Product p");
|
||||
List<Product> products = q.getResultList();
|
||||
int numDeleted = 0;
|
||||
|
@ -287,23 +349,19 @@ public class Main {
|
|||
em.remove(prod);
|
||||
numDeleted++;
|
||||
}
|
||||
System.out.println("Deleted " + numDeleted + " products");
|
||||
log.debug("Deleted " + numDeleted + " products");
|
||||
|
||||
tx.commit();
|
||||
}
|
||||
catch (Exception e) {
|
||||
log.error(">> Exception in bulk delete of data", e);
|
||||
System.err.println("Error in bulk delete of data : " + e.getMessage());
|
||||
return;
|
||||
} finally {
|
||||
if (tx.isActive()) {
|
||||
tx.rollback();
|
||||
}
|
||||
em.close();
|
||||
}
|
||||
|
||||
System.out.println("");
|
||||
System.out.println("End of Tutorial");
|
||||
cassandraEntityManagerFactory.close();
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,34 +1,44 @@
|
|||
package com.example.crud.entities;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import lombok.Data;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
import org.datanucleus.api.jpa.annotations.DatastoreId;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.io.Serializable;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Definition of an Inventory of products.
|
||||
*/
|
||||
@Data @Entity
|
||||
@ToString
|
||||
@DatastoreId
|
||||
@NamedEntityGraph(name = "allProps",
|
||||
attributeNodes = { @NamedAttributeNode("name"), @NamedAttributeNode("products") })
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
@Getter @Setter
|
||||
@IdClass(Inventory.ID.class)
|
||||
public class Inventory extends AbstractAuditableEntity<String> {
|
||||
|
||||
@Id
|
||||
private String name=null;
|
||||
|
||||
@Id
|
||||
private String region=null;
|
||||
|
||||
@Basic
|
||||
private String description;
|
||||
|
||||
@OneToMany(cascade = { CascadeType.PERSIST, CascadeType.MERGE, CascadeType.DETACH }, fetch = FetchType.EAGER)
|
||||
private Set<Product> products = new HashSet<Product>();
|
||||
|
||||
public Inventory() {
|
||||
}
|
||||
|
||||
@OneToMany(cascade = { CascadeType.PERSIST, CascadeType.MERGE, CascadeType.DETACH }, fetch = FetchType.EAGER)
|
||||
private Set<Product> products = new HashSet<Product>();
|
||||
|
||||
public Inventory(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
@ -37,7 +47,7 @@ public class Inventory extends AbstractAuditableEntity<String> {
|
|||
products.add(product);
|
||||
}
|
||||
|
||||
public Iterable<Product> getProducts() {
|
||||
public ImmutableSet<Product> getProducts() {
|
||||
return ImmutableSet.copyOf(products);
|
||||
}
|
||||
|
||||
|
@ -48,4 +58,9 @@ public class Inventory extends AbstractAuditableEntity<String> {
|
|||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
class ID implements Serializable {
|
||||
String name;
|
||||
String region;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,8 +48,10 @@ public class Person extends AbstractAuditableEntity {
|
|||
* @Temporal(TemporalType.DATE) private Date updated;
|
||||
*/
|
||||
|
||||
/*
|
||||
@OneToMany(mappedBy = "seller")
|
||||
private List<Product> products;
|
||||
*/
|
||||
|
||||
public Person() {
|
||||
}
|
||||
|
|
|
@ -40,9 +40,9 @@ public class Product extends AbstractAuditableEntity {
|
|||
|
||||
/**
|
||||
* Seller of this product.
|
||||
*/
|
||||
@ManyToOne(optional = false)
|
||||
private Person seller;
|
||||
*/
|
||||
|
||||
/**
|
||||
* Default constructor.
|
||||
|
|
|
@ -7,13 +7,9 @@ import ch.qos.logback.core.joran.spi.JoranException;
|
|||
import com.codahale.metrics.*;
|
||||
import com.codahale.metrics.health.HealthCheckRegistry;
|
||||
import com.codahale.metrics.jvm.*;
|
||||
import com.codahale.metrics.logback.InstrumentedAppender;
|
||||
import com.datastax.driver.core.Cluster;
|
||||
import com.datastax.driver.core.Session;
|
||||
import com.ryantenney.metrics.spring.config.annotation.EnableMetrics;
|
||||
import com.ryantenney.metrics.spring.config.annotation.MetricsConfigurerAdapter;
|
||||
import org.datanucleus.store.StoreManager;
|
||||
import org.datanucleus.store.connection.ManagedConnection;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
@ -21,7 +17,6 @@ import org.springframework.context.annotation.Bean;
|
|||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -37,7 +32,7 @@ public class MetricsConfiguration extends MetricsConfigurerAdapter {
|
|||
|
||||
private final Logger log = (Logger)LoggerFactory.getLogger(getClass().getName());
|
||||
|
||||
private MetricRegistry metricRegistry = new MetricRegistry();
|
||||
private MetricRegistry metricRegistry = SharedMetricRegistries.getOrCreate("dunno");
|
||||
|
||||
private HealthCheckRegistry healthCheckRegistry = new HealthCheckRegistry();
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package com.example.crud.repositories;
|
||||
|
||||
import com.datastax.driver.mapping.annotations.QueryParameters;
|
||||
import com.example.crud.entities.Inventory;
|
||||
import org.datanucleus.api.jpa.annotations.ReadOnly;
|
||||
import org.springframework.cache.annotation.CacheEvict;
|
||||
|
@ -10,7 +11,6 @@ import org.springframework.data.jpa.repository.Query;
|
|||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.beans.Transient;
|
||||
import java.util.List;
|
||||
|
||||
//@Metrics(registry = "${this.registry}")
|
||||
|
@ -19,7 +19,9 @@ public interface InventoryRepository extends JpaRepository<Inventory, String>, J
|
|||
//@Metered(name = "${this.id}")
|
||||
@Transactional
|
||||
@Cacheable(value = "inventory", key = "#name")
|
||||
Inventory findByName(String name);
|
||||
@QueryParameters(consistency="QUORUM")
|
||||
//Query(value="select * from inventory where firstName = :name", nativeQuery=true)
|
||||
Inventory findByName(@Param("name") String name);
|
||||
|
||||
@ReadOnly
|
||||
@Query(value = "select * from inventory where product_id_eid contains :productId allow filtering",
|
||||
|
|
|
@ -46,7 +46,6 @@
|
|||
<basic name="description">
|
||||
<column length="255"/>
|
||||
</basic>
|
||||
<one-to-many name="seller"/>
|
||||
</attributes>
|
||||
</entity>
|
||||
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
timeToLiveSeconds="600"
|
||||
overflowToDisk="true"
|
||||
transactionalMode="off">
|
||||
<!-- cacheDecoratorFactory class="com.codahale.metrics.ehcache.InstrumentedCacheDecoratorFactory"
|
||||
properties="metric-registry-name=crud"/ -->
|
||||
<cacheEventListenerFactory class="net.sf.ehcache.distribution.jgroups.JGroupsCacheReplicatorFactory"
|
||||
properties="replicateAsynchronously=true,replicatePuts=true,replicateUpdates=true,replicateUpdatesViaCopy=false,replicateRemovals=true"/>
|
||||
</cache>
|
||||
|
|
33
src/main/resources/jgroups-l2-cache-tcp-aws-native-s3.xml
Normal file
33
src/main/resources/jgroups-l2-cache-tcp-aws-native-s3.xml
Normal file
|
@ -0,0 +1,33 @@
|
|||
<!--
|
||||
https://github.com/jgroups-extras/native-s3-ping
|
||||
https://github.com/belaban/jgroups-docker
|
||||
-->
|
||||
<config>
|
||||
<TCP
|
||||
external_addr="${JGROUPS_EXTERNAL_ADDR:match-interface:eth0}"
|
||||
bind_addr="site_local,match-interface:eth0"
|
||||
bind_port="${TCP_PORT:7800}"
|
||||
/>
|
||||
<!--
|
||||
Uses an S3 bucket to discover members in the cluster.
|
||||
- If "mybucket" doesn't exist, it will be created (requires permissions)
|
||||
-->
|
||||
<org.jgroups.aws.s3.NATIVE_S3_PING
|
||||
region_name="${S3_REGION:us-east-1}"
|
||||
bucket_name="${S3_BUCKET:mybucket}"
|
||||
/>
|
||||
<MERGE3 max_interval="30000" min_interval="10000"/>
|
||||
<FD_SOCK external_addr="${JGROUPS_EXTERNAL_ADDR}"
|
||||
start_port="${FD_SOCK_PORT:9000}"/>
|
||||
<FD_ALL timeout="10000" interval="3000"/>
|
||||
<pbcast.NAKACK2/>
|
||||
<UNICAST3/>
|
||||
<pbcast.STABLE desired_avg_gossip="50000"
|
||||
max_bytes="8m"/>
|
||||
<pbcast.GMS print_local_addr="true" join_timeout="3000"
|
||||
view_bundling="true"/>
|
||||
<UFC max_credits="2M" min_threshold="0.4"/>
|
||||
<MFC max_credits="2M" min_threshold="0.4"/>
|
||||
<FRAG2 frag_size="60K" />
|
||||
</config>
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration scan="true">
|
||||
<!-- ====================================================================== -->
|
||||
<!-- Changing the pattern requires a change in our logstash parsing module -->
|
||||
<!-- ====================================================================== -->
|
||||
<property name="java-pattern" value="%d{ISO8601, UTC} %-5p [%.15t] %c{1}: %m %X{OBJECT_ID}%X{CLIENT_IP}%X{ELEMENT_ID}%X{USER_ID}%X{CONNECTION_ID}%X{REQUEST_ID}%X{CLIENT_ID}%n"/>
|
||||
<property name="cpp-pattern" value="%d{ISO8601, UTC} %-5p %c{1}: %m %X{OBJECT_ID} %X{ELEMENT_ID}%n"/>
|
||||
<property name="timer-pattern" value="%d{ISO8601, UTC} %-5p %c: %m \\(%F, line %L\\) %X{OBJECT_ID} %X{ELEMENT_ID}%n"/>
|
||||
<property name="tree-pattern" value="%d{ISO8601, UTC} %-5p %c: %m%n"/>
|
||||
|
||||
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>${java-pattern}</pattern>
|
||||
</encoder>
|
||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
|
||||
<level>TRACE</level>
|
||||
</filter>
|
||||
</appender>
|
||||
|
||||
<appender name="consoleasync" class="ch.qos.logback.classic.AsyncAppender">
|
||||
<queueSize>10000</queueSize>
|
||||
<discardingThreshold>0</discardingThreshold>
|
||||
<appender-ref ref="console"/>
|
||||
</appender>
|
||||
|
||||
<category name="com.example.crud" level="TRACE" additivity="true"/>
|
||||
|
||||
<logger name="com.mchange.v2.resourcepool" level="INFO"/>
|
||||
|
||||
<logger name="com.mchange.v2.c3p0" level="INFO"/>
|
||||
|
||||
<logger name="org.logicalcobwebs.proxool" level="INFO"/>
|
||||
|
||||
<logger name="DataNucleus" level="TRACE"/>
|
||||
|
||||
<logger name="org.springframework" level="TRACE"/>
|
||||
|
||||
<logger name="org.springframework.cache" level="Trace"/>
|
||||
|
||||
<logger name="org.springdata.cassandra" level="TRACE"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Cluster" level="INFO"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Session" level="INFO"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.RequestHandler" level="TRACE"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Connection" level="INFO"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Message" level="INFO"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.SLOW" level="INFO"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.NORMAL" level="INFO"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.FAST" level="INFO"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.ERROR" level="INFO"/>
|
||||
|
||||
<logger name="net.sf.ehcache" level="TRACE"/>
|
||||
|
||||
<logger name="org.jgroups" level="TRACE"/>
|
||||
|
||||
<logger name="org.mongodb.driver" level="TRACE"/>
|
||||
|
||||
<logger name="net.spy.memcached" level="TRACE"/>
|
||||
|
||||
<!-- Root logger -->
|
||||
<root>
|
||||
<level value="info" />
|
||||
<appender-ref ref="consoleasync" />
|
||||
</root>
|
||||
|
||||
</configuration>
|
1
src/main/resources/logback.xml
Symbolic link
1
src/main/resources/logback.xml
Symbolic link
|
@ -0,0 +1 @@
|
|||
logback.xml_TRACE
|
74
src/main/resources/logback.xml_TRACE
Normal file
74
src/main/resources/logback.xml_TRACE
Normal file
|
@ -0,0 +1,74 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration scan="true">
|
||||
<!-- ====================================================================== -->
|
||||
<!-- Changing the pattern requires a change in our logstash parsing module -->
|
||||
<!-- ====================================================================== -->
|
||||
<property name="java-pattern" value="%d{ISO8601, UTC} %-5p [%.15t] %c{1}: %m %X{OBJECT_ID}%X{CLIENT_IP}%X{ELEMENT_ID}%X{USER_ID}%X{CONNECTION_ID}%X{REQUEST_ID}%X{CLIENT_ID}%n"/>
|
||||
<property name="cpp-pattern" value="%d{ISO8601, UTC} %-5p %c{1}: %m %X{OBJECT_ID} %X{ELEMENT_ID}%n"/>
|
||||
<property name="timer-pattern" value="%d{ISO8601, UTC} %-5p %c: %m \\(%F, line %L\\) %X{OBJECT_ID} %X{ELEMENT_ID}%n"/>
|
||||
<property name="tree-pattern" value="%d{ISO8601, UTC} %-5p %c: %m%n"/>
|
||||
|
||||
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>${java-pattern}</pattern>
|
||||
</encoder>
|
||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
|
||||
<level>TRACE</level>
|
||||
</filter>
|
||||
</appender>
|
||||
|
||||
<appender name="consoleasync" class="ch.qos.logback.classic.AsyncAppender">
|
||||
<queueSize>10000</queueSize>
|
||||
<discardingThreshold>0</discardingThreshold>
|
||||
<appender-ref ref="console"/>
|
||||
</appender>
|
||||
|
||||
<category name="com.example.crud" level="TRACE" additivity="true"/>
|
||||
|
||||
<logger name="com.mchange.v2.resourcepool" level="TRACE"/>
|
||||
|
||||
<logger name="com.mchange.v2.c3p0" level="TRACE"/>
|
||||
|
||||
<logger name="org.logicalcobwebs.proxool" level="TRACE"/>
|
||||
|
||||
<logger name="DataNucleus" level="TRACE"/>
|
||||
|
||||
<logger name="org.springframework" level="TRACE"/>
|
||||
|
||||
<logger name="org.springframework.cache" level="Trace"/>
|
||||
|
||||
<logger name="org.springdata.cassandra" level="TRACE"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Cluster" level="TRACE"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Session" level="TRACE"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.RequestHandler" level="TRACE"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Connection" level="TRACE"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Message" level="TRACE"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.SLOW" level="TRACE"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.NORMAL" level="TRACE"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.FAST" level="TRACE"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.ERROR" level="TRACE"/>
|
||||
|
||||
<logger name="net.sf.ehcache" level="TRACE"/>
|
||||
|
||||
<logger name="org.jgroups" level="TRACE"/>
|
||||
|
||||
<logger name="org.mongodb.driver" level="TRACE"/>
|
||||
|
||||
<logger name="net.spy.memcached" level="TRACE"/>
|
||||
|
||||
<!-- Root logger -->
|
||||
<root>
|
||||
<level value="info" />
|
||||
<appender-ref ref="consoleasync" />
|
||||
</root>
|
||||
|
||||
</configuration>
|
74
src/main/resources/logback.xml_WARN
Normal file
74
src/main/resources/logback.xml_WARN
Normal file
|
@ -0,0 +1,74 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration scan="true">
|
||||
<!-- ====================================================================== -->
|
||||
<!-- Changing the pattern requires a change in our logstash parsing module -->
|
||||
<!-- ====================================================================== -->
|
||||
<property name="java-pattern" value="%d{ISO8601, UTC} %-5p [%.15t] %c{1}: %m %X{OBJECT_ID}%X{CLIENT_IP}%X{ELEMENT_ID}%X{USER_ID}%X{CONNECTION_ID}%X{REQUEST_ID}%X{CLIENT_ID}%n"/>
|
||||
<property name="cpp-pattern" value="%d{ISO8601, UTC} %-5p %c{1}: %m %X{OBJECT_ID} %X{ELEMENT_ID}%n"/>
|
||||
<property name="timer-pattern" value="%d{ISO8601, UTC} %-5p %c: %m \\(%F, line %L\\) %X{OBJECT_ID} %X{ELEMENT_ID}%n"/>
|
||||
<property name="tree-pattern" value="%d{ISO8601, UTC} %-5p %c: %m%n"/>
|
||||
|
||||
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>${java-pattern}</pattern>
|
||||
</encoder>
|
||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
|
||||
<level>TRACE</level>
|
||||
</filter>
|
||||
</appender>
|
||||
|
||||
<appender name="consoleasync" class="ch.qos.logback.classic.AsyncAppender">
|
||||
<queueSize>10000</queueSize>
|
||||
<discardingThreshold>0</discardingThreshold>
|
||||
<appender-ref ref="console"/>
|
||||
</appender>
|
||||
|
||||
<category name="com.example.crud" additivity="true"/>
|
||||
|
||||
<logger name="com.mchange.v2.resourcepool"/>
|
||||
|
||||
<logger name="com.mchange.v2.c3p0"/>
|
||||
|
||||
<logger name="org.logicalcobwebs.proxool"/>
|
||||
|
||||
<logger name="DataNucleus"/>
|
||||
|
||||
<logger name="org.springframework"/>
|
||||
|
||||
<logger name="org.springframework.cache" level="Trace"/>
|
||||
|
||||
<logger name="org.springdata.cassandra"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Cluster"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Session"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.RequestHandler"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Connection"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.Message"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.SLOW"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.NORMAL"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.FAST"/>
|
||||
|
||||
<logger name="com.datastax.driver.core.QueryLogger.ERROR"/>
|
||||
|
||||
<logger name="net.sf.ehcache"/>
|
||||
|
||||
<logger name="org.jgroups"/>
|
||||
|
||||
<logger name="org.mongodb.driver"/>
|
||||
|
||||
<logger name="net.spy.memcached"/>
|
||||
|
||||
<!-- Root logger -->
|
||||
<root>
|
||||
<level value="WARN" />
|
||||
<appender-ref ref="consoleasync" />
|
||||
</root>
|
||||
|
||||
</configuration>
|
Loading…
Reference in a new issue