L2 cache via EhCache working, disributed via JGroups.

This commit is contained in:
Greg Burd 2017-06-22 11:40:59 -04:00
parent 5e5c53af8a
commit 54caf644a2
25 changed files with 1192 additions and 568 deletions

View file

@ -13,16 +13,19 @@
</configuration> </configuration>
</facet> </facet>
<facet type="Spring" name="Spring"> <facet type="Spring" name="Spring">
<configuration /> <configuration>
<fileset id="fileset" name="Spring Application Context" removed="false">
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/ApplicationConfig.java</file>
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/CacheConfiguration.java</file>
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/EhCacheConfiguration.java</file>
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/metrics/MetricsConfiguration.java</file>
<file>file://$MODULE_DIR$/src/main/java/com/example/crud/metrics/SpringConfiguringClass.java</file>
<file>file://$MODULE_DIR$/applicationContext.xml</file>
</fileset>
</configuration>
</facet> </facet>
<facet type="AspectJ" name="AspectJ"> <facet type="AspectJ" name="AspectJ">
<configuration> <configuration />
<option name="aspectPath">
<projectLibrary>
<option name="name" value="Maven: io.astefanutti.metrics.aspectj:metrics-aspectj:1.2.0" />
</projectLibrary>
</option>
</configuration>
</facet> </facet>
</component> </component>
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8"> <component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8">
@ -55,7 +58,6 @@
<orderEntry type="library" name="Maven: io.netty:netty-transport:4.0.44.Final" level="project" /> <orderEntry type="library" name="Maven: io.netty:netty-transport:4.0.44.Final" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-codec:4.0.44.Final" level="project" /> <orderEntry type="library" name="Maven: io.netty:netty-codec:4.0.44.Final" level="project" />
<orderEntry type="library" name="Maven: com.google.guava:guava:19.0" level="project" /> <orderEntry type="library" name="Maven: com.google.guava:guava:19.0" level="project" />
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-core:3.1.2" level="project" />
<orderEntry type="library" name="Maven: com.github.jnr:jnr-ffi:2.0.7" level="project" /> <orderEntry type="library" name="Maven: com.github.jnr:jnr-ffi:2.0.7" level="project" />
<orderEntry type="library" name="Maven: com.github.jnr:jffi:1.2.10" level="project" /> <orderEntry type="library" name="Maven: com.github.jnr:jffi:1.2.10" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: com.github.jnr:jffi:native:1.2.10" level="project" /> <orderEntry type="library" scope="RUNTIME" name="Maven: com.github.jnr:jffi:native:1.2.10" level="project" />
@ -67,25 +69,21 @@
<orderEntry type="library" name="Maven: com.github.jnr:jnr-x86asm:1.0.2" level="project" /> <orderEntry type="library" name="Maven: com.github.jnr:jnr-x86asm:1.0.2" level="project" />
<orderEntry type="library" name="Maven: com.github.jnr:jnr-posix:3.0.27" level="project" /> <orderEntry type="library" name="Maven: com.github.jnr:jnr-posix:3.0.27" level="project" />
<orderEntry type="library" name="Maven: com.github.jnr:jnr-constants:0.9.0" level="project" /> <orderEntry type="library" name="Maven: com.github.jnr:jnr-constants:0.9.0" level="project" />
<orderEntry type="library" name="Maven: org.hdrhistogram:HdrHistogram:2.1.9" level="project" />
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-cache:5.0.0-release" level="project" /> <orderEntry type="library" name="Maven: org.datanucleus:datanucleus-cache:5.0.0-release" level="project" />
<orderEntry type="library" name="Maven: org.infinispan:infinispan-embedded:9.0.2.Final" level="project" /> <orderEntry type="library" name="Maven: org.datanucleus:datanucleus-guava:5.0.1" level="project" />
<orderEntry type="library" name="Maven: org.jboss.spec.javax.transaction:jboss-transaction-api_1.1_spec:1.0.1.Final" level="project" /> <orderEntry type="library" name="Maven: org.datanucleus:datanucleus-jodatime:5.0.0-release" level="project" />
<orderEntry type="library" name="Maven: org.infinispan:infinispan-jcache:9.0.2.Final" level="project" />
<orderEntry type="library" name="Maven: org.infinispan:infinispan-core:9.0.2.Final" level="project" />
<orderEntry type="library" name="Maven: org.infinispan:infinispan-commons:9.0.2.Final" level="project" />
<orderEntry type="library" name="Maven: org.jgroups:jgroups:4.0.3.Final" level="project" />
<orderEntry type="library" name="Maven: com.github.ben-manes.caffeine:caffeine:2.4.0" level="project" />
<orderEntry type="library" name="Maven: org.jboss.marshalling:jboss-marshalling-osgi:2.0.0.Beta3" level="project" />
<orderEntry type="library" name="Maven: org.jboss.logging:jboss-logging:3.3.0.Final" level="project" />
<orderEntry type="library" name="Maven: org.infinispan:infinispan-jcache-commons:9.0.2.Final" level="project" />
<orderEntry type="library" name="Maven: javax.cache:cache-api:1.0.0" level="project" /> <orderEntry type="library" name="Maven: javax.cache:cache-api:1.0.0" level="project" />
<orderEntry type="library" name="Maven: org.ehcache:ehcache:3.3.1" level="project" /> <orderEntry type="library" name="Maven: net.sf.ehcache:ehcache-core:2.6.11" level="project" />
<orderEntry type="library" name="Maven: net.sf.ehcache:ehcache-jgroupsreplication:1.7" level="project" />
<orderEntry type="library" name="Maven: org.jgroups:jgroups:3.1.0.Final" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-context:5.0.0.M5" level="project" /> <orderEntry type="library" name="Maven: org.springframework:spring-context:5.0.0.M5" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-aop:5.0.0.M5" level="project" /> <orderEntry type="library" name="Maven: org.springframework:spring-aop:5.0.0.M5" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-beans:5.0.0.M5" level="project" /> <orderEntry type="library" name="Maven: org.springframework:spring-beans:5.0.0.M5" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-core:5.0.0.M5" level="project" /> <orderEntry type="library" name="Maven: org.springframework:spring-core:5.0.0.M5" level="project" />
<orderEntry type="library" name="Maven: commons-logging:commons-logging:1.2" level="project" /> <orderEntry type="library" name="Maven: commons-logging:commons-logging:1.2" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-expression:5.0.0.M5" level="project" /> <orderEntry type="library" name="Maven: org.springframework:spring-expression:5.0.0.M5" level="project" />
<orderEntry type="library" name="Maven: org.springframework:spring-context-support:5.0.0.M5" level="project" />
<orderEntry type="library" name="Maven: org.springframework.data:spring-data-jpa:2.0.0.M4" level="project" /> <orderEntry type="library" name="Maven: org.springframework.data:spring-data-jpa:2.0.0.M4" level="project" />
<orderEntry type="library" name="Maven: org.springframework.data:spring-data-commons:2.0.0.M4" level="project" /> <orderEntry type="library" name="Maven: org.springframework.data:spring-data-commons:2.0.0.M4" level="project" />
<orderEntry type="library" name="Maven: org.aspectj:aspectjrt:1.8.10" level="project" /> <orderEntry type="library" name="Maven: org.aspectj:aspectjrt:1.8.10" level="project" />
@ -102,10 +100,22 @@
<orderEntry type="library" name="Maven: joda-time:joda-time:2.9.9" level="project" /> <orderEntry type="library" name="Maven: joda-time:joda-time:2.9.9" level="project" />
<orderEntry type="library" name="Maven: org.projectlombok:lombok:1.16.16" level="project" /> <orderEntry type="library" name="Maven: org.projectlombok:lombok:1.16.16" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" /> <orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" />
<orderEntry type="library" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" /> <orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
<orderEntry type="library" name="Maven: io.astefanutti.metrics.aspectj:metrics-aspectj:1.2.0" level="project" /> <orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-core:3.2.2" level="project" />
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-annotation:3.2.2" level="project" /> <orderEntry type="library" name="Maven: com.codahale.metrics:metrics-annotation:3.2.2" level="project" />
<orderEntry type="library" name="Maven: org.glassfish.web:javax.el:2.2.6" level="project" /> <orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-logback:3.2.2" level="project" />
<orderEntry type="library" name="Maven: javax.el:javax.el-api:2.2.5" level="project" /> <orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-jvm:3.2.2" level="project" />
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-healthchecks:3.2.2" level="project" />
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-graphite:3.2.2" level="project" />
<orderEntry type="library" name="Maven: com.ryantenney.metrics:metrics-spring:3.1.3" level="project" />
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-annotation:3.1.2" level="project" />
<orderEntry type="library" name="Maven: io.riemann:metrics3-riemann-reporter:0.4.5" level="project" />
<orderEntry type="library" name="Maven: com.codahale.metrics:metrics-core:3.0.1" level="project" />
<orderEntry type="library" name="Maven: io.riemann:riemann-java-client:0.4.5" level="project" />
<orderEntry type="library" name="Maven: com.google.protobuf:protobuf-java:2.6.1" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty:3.6.1.Final" level="project" />
<orderEntry type="library" name="Maven: defunkt:logback-riemann-appender:0.4.0" level="project" />
<orderEntry type="library" name="Maven: com.aphyr:riemann-java-client:0.4.1" level="project" />
<orderEntry type="library" name="Maven: javax.inject:javax.inject:1" level="project" />
</component> </component>
</module> </module>

168
pom.xml
View file

@ -16,6 +16,7 @@
<maven.compiler.target>${java.version}</maven.compiler.target> <maven.compiler.target>${java.version}</maven.compiler.target>
<org.datanucleus.version>[5.1.0-m3, 5.9)</org.datanucleus.version> <org.datanucleus.version>[5.1.0-m3, 5.9)</org.datanucleus.version>
<aspectj.version>1.8.10</aspectj.version> <aspectj.version>1.8.10</aspectj.version>
<metrics.version>3.2.2</metrics.version>
<spring.version>5.0.0.M5</spring.version> <spring.version>5.0.0.M5</spring.version>
<spring-data.version>Kay-M4</spring-data.version> <spring-data.version>Kay-M4</spring-data.version>
</properties> </properties>
@ -29,6 +30,14 @@
<enabled>false</enabled> <enabled>false</enabled>
</snapshots> </snapshots>
</repository> </repository>
<repository>
<id>clojars.org</id>
<url>http://clojars.org/repo</url>
</repository>
<repository>
<id>maven.apache.org</id>
<url>http://repo.maven.apache.org/maven2/</url>
</repository>
</repositories> </repositories>
<dependencyManagement> <dependencyManagement>
@ -116,13 +125,26 @@
<version>3.2.0</version> <version>3.2.0</version>
</dependency> </dependency>
<dependency>
<groupId>org.hdrhistogram</groupId>
<artifactId>HdrHistogram</artifactId>
<version>[2.1.8,)</version>
</dependency>
<dependency> <dependency>
<groupId>org.datanucleus</groupId> <groupId>org.datanucleus</groupId>
<artifactId>datanucleus-cache</artifactId> <artifactId>datanucleus-cache</artifactId>
<version>5.0.0-release</version> <version>5.0.0-release</version>
<!--
<exclusions>
<exclusion>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache-core</artifactId>
</exclusion>
</exclusions>
-->
</dependency> </dependency>
<!--
<dependency> <dependency>
<groupId>org.datanucleus</groupId> <groupId>org.datanucleus</groupId>
<artifactId>datanucleus-guava</artifactId> <artifactId>datanucleus-guava</artifactId>
@ -135,6 +157,7 @@
<version>5.0.0-release</version> <version>5.0.0-release</version>
</dependency> </dependency>
<!--
<dependency> <dependency>
<groupId>org.datanucleus</groupId> <groupId>org.datanucleus</groupId>
<artifactId>datanucleus-java8</artifactId> <artifactId>datanucleus-java8</artifactId>
@ -142,63 +165,38 @@
</dependency> </dependency>
--> -->
<dependency>
<groupId>org.infinispan</groupId>
<artifactId>infinispan-embedded</artifactId>
<version>9.0.2.Final</version>
<exclusions>
<exclusion>
<groupId>org.jboss.slf4j</groupId>
<artifactId>slf4j-jboss-logging</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.infinispan</groupId>
<artifactId>infinispan-jcache</artifactId>
<version>9.0.2.Final</version>
</dependency>
<dependency> <dependency>
<groupId>javax.cache</groupId> <groupId>javax.cache</groupId>
<artifactId>cache-api</artifactId> <artifactId>cache-api</artifactId>
<version>1.0.0</version> <version>1.0.0</version>
</dependency> </dependency>
<!--
<dependency> <dependency>
<groupId>org.ehcache</groupId> <groupId>org.ehcache</groupId>
<artifactId>ehcache</artifactId> <artifactId>ehcache</artifactId>
<version>3.3.1</version> <version>3.3.1</version>
</dependency> </dependency>
-->
<!-- dependency> <dependency>
<groupId>org.infinispan</groupId> <groupId>net.sf.ehcache</groupId>
<artifactId>infinispan-spring4-embedded</artifactId> <artifactId>ehcache-core</artifactId>
<version>9.0.2.Final</version> <version>2.6.11</version>
<exclusions> </dependency>
<exclusion>
<groupId>org.jboss.slf4j</groupId> <dependency>
<artifactId>slf4j-jboss-logging</artifactId> <groupId>net.sf.ehcache</groupId>
</exclusion> <artifactId>ehcache</artifactId>
<exclusion> <version>2.10.4</version>
<groupId>org.slf4j</groupId> <type>pom</type>
<artifactId>slf4j-log4j12</artifactId> </dependency>
</exclusion>
<exclusion> <dependency>
<groupId>log4j</groupId> <groupId>net.sf.ehcache</groupId>
<artifactId>log4j</artifactId> <artifactId>ehcache-jgroupsreplication</artifactId>
</exclusion> <version>1.7</version>
</exclusions> </dependency>
</dependency -->
<!-- depending on a use case, one should use Spring Context or Spring Boot jars --> <!-- depending on a use case, one should use Spring Context or Spring Boot jars -->
<dependency> <dependency>
@ -207,6 +205,12 @@
<version>${spring.version}</version> <version>${spring.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.springframework.data</groupId> <groupId>org.springframework.data</groupId>
<artifactId>spring-data-jpa</artifactId> <artifactId>spring-data-jpa</artifactId>
@ -264,6 +268,67 @@
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
<version>${metrics.version}</version>
</dependency>
<!--
<dependency>
<groupId>com.codahale.metrics</groupId>
<artifactId>metrics-annotation</artifactId>
<version>${metrics-version}</version>
</dependency>
-->
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-logback</artifactId>
<version>3.2.2</version>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-jvm</artifactId>
<version>${metrics.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-healthchecks</artifactId>
<version>${metrics.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-graphite</artifactId>
<version>${metrics.version}</version>
</dependency>
<dependency>
<groupId>com.ryantenney.metrics</groupId>
<artifactId>metrics-spring</artifactId>
<version>3.1.3</version>
</dependency>
<dependency>
<groupId>io.riemann</groupId>
<artifactId>metrics3-riemann-reporter</artifactId>
<version>0.4.5</version>
</dependency>
<dependency>
<groupId>defunkt</groupId>
<artifactId>logback-riemann-appender</artifactId>
<version>0.4.0</version>
</dependency>
<!--
<dependency> <dependency>
<groupId>io.astefanutti.metrics.aspectj</groupId> <groupId>io.astefanutti.metrics.aspectj</groupId>
<artifactId>metrics-aspectj</artifactId> <artifactId>metrics-aspectj</artifactId>
@ -275,6 +340,13 @@
<artifactId>javax.el</artifactId> <artifactId>javax.el</artifactId>
<version>2.2.6</version> <version>2.2.6</version>
</dependency> </dependency>
-->
<dependency>
<groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId>
<version>1</version>
</dependency>
</dependencies> </dependencies>
@ -306,12 +378,12 @@
<artifactId>aspectj-maven-plugin</artifactId> <artifactId>aspectj-maven-plugin</artifactId>
<version>1.10</version> <version>1.10</version>
<configuration> <configuration>
<aspectLibraries> <!-- aspectLibraries>
<aspectLibrary> <aspectLibrary>
<groupId>io.astefanutti.metrics.aspectj</groupId> <groupId>io.astefanutti.metrics.aspectj</groupId>
<artifactId>metrics-aspectj</artifactId> <artifactId>metrics-aspectj</artifactId>
</aspectLibrary> </aspectLibrary>
</aspectLibraries> </aspectLibraries -->
</configuration> </configuration>
<executions> <executions>
<execution> <execution>

View file

@ -1,15 +1,14 @@
package com.example.crud; package com.example.crud;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Session;
import com.example.crud.entities.AbstractAuditableEntity; import com.example.crud.entities.AbstractAuditableEntity;
import com.example.crud.entities.AbstractEntity; import com.example.crud.entities.AbstractEntity;
import com.example.crud.entities.Product; import com.example.crud.entities.Product;
import org.datanucleus.ExecutionContext;
import org.datanucleus.enhancer.DataNucleusEnhancer; import org.datanucleus.enhancer.DataNucleusEnhancer;
import org.infinispan.configuration.cache.CacheMode; import org.datanucleus.store.StoreManager;
import org.infinispan.configuration.cache.ConfigurationBuilder; import org.datanucleus.store.connection.ManagedConnection;
import org.infinispan.configuration.global.GlobalConfigurationBuilder;
import org.infinispan.manager.DefaultCacheManager;
import org.infinispan.manager.EmbeddedCacheManager;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.EnableAspectJAutoProxy; import org.springframework.context.annotation.EnableAspectJAutoProxy;
@ -22,18 +21,17 @@ import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement; import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory; import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence; import javax.persistence.Persistence;
import java.net.UnknownHostException;
@Configuration @Configuration
@EnableJpaRepositories @EnableJpaRepositories
@EnableJpaAuditing @EnableJpaAuditing
@EnableScheduling @EnableScheduling
@EnableAspectJAutoProxy @EnableAspectJAutoProxy
@EnableCaching
@EnableTransactionManagement @EnableTransactionManagement
class ApplicationConfig { class ApplicationConfiguration {
@PostConstruct @PostConstruct
private void enhanceModelObjectBytecode() { private void enhanceModelObjectBytecode() {
@ -60,43 +58,30 @@ class ApplicationConfig {
return txManager; return txManager;
} }
@Bean // Auditing
public EmbeddedCacheManager cacheManager() {
return infinispanEmbeddedDistributedCacheManager();
}
@Bean @Bean
public AuditorAware<String> auditorAware() { public AuditorAware<String> auditorAware() {
return new UsernameAuditorAware(); return new UsernameAuditorAware();
} }
private EmbeddedCacheManager infinispanEmbeddedDistributedCacheManager() { // Cassandra
String nodeName = null; @Bean
try { public EntityManager entityManager() {
nodeName = java.net.InetAddress.getLocalHost().getHostName(); EntityManager em = entityManagerFactory().createEntityManager();
} catch (UnknownHostException e) { return em;
nodeName = "localhost"; }
}
// ConfigurationBuilder cb = new ConfigurationBuilder(); cb.addCluster("HighQCacheCluster").addClusterNode("jboss1ind1", 11222).addClusterNode("udit.local.com", 11222); RemoteCacheManager rmc = new RemoteCacheManager(cb.build());
DefaultCacheManager cacheManager = new DefaultCacheManager( @Bean
GlobalConfigurationBuilder.defaultClusteredBuilder() public Session session() {
.transport().nodeName(nodeName).addProperty("configurationFile", StoreManager storeManager = ((ExecutionContext)entityManager().getDelegate()).getNucleusContext().getStoreManager();
"jgroups-l2-cache-udp-largecluster.xml") ManagedConnection connection = storeManager.getConnection(-1);
.build(), Session session = (Session) connection.getConnection();
new ConfigurationBuilder() return session;
.clustering() }
.cacheMode(CacheMode.INVALIDATION_SYNC)
.build() @Bean
); public Cluster cluster() {
// The only way to get the "repl" cache to be exactly the same as the default cache is to not define it at all return session().getCluster();
cacheManager.defineConfiguration("dist", new ConfigurationBuilder()
.clustering()
.cacheMode(CacheMode.DIST_SYNC)
.hash().numOwners(2)
.build()
);
return cacheManager;
} }
} }

View file

@ -1,52 +0,0 @@
package com.example.crud;
import org.infinispan.notifications.Listener;
import org.infinispan.notifications.cachelistener.annotation.CacheEntryCreated;
import org.infinispan.notifications.cachelistener.annotation.CacheEntryModified;
import org.infinispan.notifications.cachelistener.annotation.CacheEntryRemoved;
import org.infinispan.notifications.cachelistener.annotation.TopologyChanged;
import org.infinispan.notifications.cachelistener.event.CacheEntryCreatedEvent;
import org.infinispan.notifications.cachelistener.event.CacheEntryModifiedEvent;
import org.infinispan.notifications.cachelistener.event.CacheEntryRemovedEvent;
import org.infinispan.notifications.cachelistener.event.TopologyChangedEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Listener(clustered = true)
public class CacheClusterListener {
private Logger log = LoggerFactory.getLogger(getClass().getName());
@CacheEntryCreated
public void observeAdd(CacheEntryCreatedEvent<String, String> event) {
if (event.isPre())
return;
log.info("Cache entry %s added in cache %s", event.getKey(), event.getCache());
}
@CacheEntryModified
public void observeUpdate(CacheEntryModifiedEvent<String, String> event) {
if (event.isPre())
return;
log.info("Cache entry %s = %s modified in cache %s", event.getKey(), event.getValue(), event.getCache());
}
@CacheEntryRemoved
public void observeRemove(CacheEntryRemovedEvent<String, String> event) {
if (event.isPre())
return;
log.info("Cache entry %s removed in cache %s", event.getKey(), event.getCache());
}
@TopologyChanged
public void observeTopologyChange(TopologyChangedEvent<String, String> event) {
if (event.isPre())
return;
log.info("Cache %s topology changed, new membership is %s", event.getCache().getName(), event.getConsistentHashAtEnd().getMembers());
}
}

View file

@ -0,0 +1,135 @@
package com.example.crud;
import com.google.common.collect.Lists;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.cache.CacheManager;
import org.springframework.cache.annotation.CachingConfigurer;
import org.springframework.cache.annotation.CachingConfigurerSupport;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.cache.concurrent.ConcurrentMapCache;
import org.springframework.cache.interceptor.CacheErrorHandler;
import org.springframework.cache.interceptor.CacheResolver;
import org.springframework.cache.interceptor.KeyGenerator;
import org.springframework.cache.support.CompositeCacheManager;
import org.springframework.cache.support.SimpleCacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import java.lang.reflect.Method;
import java.util.List;
import java.util.stream.Collectors;
@Configuration
//@ComponentScan("com.example.crud")
//@PropertySource("application.properties")
@EnableCaching
public class CacheConfiguration extends CachingConfigurerSupport {
//private final CacheProperties cacheProperties;
//@Autowired
//public CacheConfiguration(CacheProperties cacheProperties) {
// this.cacheProperties = cacheProperties;
// }
@Qualifier("ehCacheCacheManager")
@Autowired(required = false)
private CacheManager ehCacheCacheManager;
/*
@Qualifier("redisCacheManager")
@Autowired(required = false)
private CacheManager redisCacheManager;
*/
@Bean
@Override
public CacheManager cacheManager() {
// if (cacheProperties.isEnabled()) {
List<CacheManager> cacheManagers = Lists.newArrayList();
if (this.ehCacheCacheManager != null) {
cacheManagers.add(this.ehCacheCacheManager);
}
/*
if (this.redisCacheManager != null) {
cacheManagers.add(this.redisCacheManager);
}
*/
CompositeCacheManager cacheManager = new CompositeCacheManager();
cacheManager.setCacheManagers(cacheManagers);
cacheManager.setFallbackToNoOpCache(false);
return cacheManager;
/*
} else {
SimpleCacheManager cacheManager = new SimpleCacheManager();
List<ConcurrentMapCache> caches = cacheProperties.getCacheNameList()
.stream()
.map(cacheName -> new ConcurrentMapCache(cacheName))
.collect(Collectors.toList());
cacheManager.setCaches(caches);
return cacheManager;
}
*/
}
@Bean
@Override
public CacheResolver cacheResolver() {
return null;
}
@Bean
@Override
public KeyGenerator keyGenerator() {
/* Simplistic KeyGenerator example:
return new KeyGenerator() {
@Override
public Object generate(Object o, Method method, Object... params) {
StringBuilder sb = new StringBuilder();
sb.append(o.getClass().getName());
sb.append(method.getName());
for (Object param : params) {
sb.append(param.toString());
}
return sb.toString();
}
};
*/
// Same logic as the DefaultKeyGenerator
return new KeyGenerator() {
public static final int NO_PARAM_KEY = 0;
public static final int NULL_PARAM_KEY = 53;
public Object generate(Object target, Method method, Object... params) {
if (params.length == 1) {
return (params[0] == null ? NULL_PARAM_KEY : params[0]);
}
if (params.length == 0) {
return NO_PARAM_KEY;
}
int hashCode = 17;
for (Object object : params) {
hashCode = 31 * hashCode + (object == null ? NULL_PARAM_KEY : object.hashCode());
}
return Integer.valueOf(hashCode);
}
};
}
@Bean
@Override
public CacheErrorHandler errorHandler () {
return null;
}
}

View file

@ -0,0 +1,26 @@
package com.example.crud;
import org.springframework.cache.ehcache.EhCacheManagerFactoryBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.cache.ehcache.EhCacheCacheManager;
@Configuration
public class EhCacheConfiguration {
@Bean
public EhCacheCacheManager ehCacheCacheManager() {
return new EhCacheCacheManager(ehCacheManagerFactoryBean().getObject());
}
@Bean
public EhCacheManagerFactoryBean ehCacheManagerFactoryBean() {
EhCacheManagerFactoryBean cacheManagerFactoryBean = new EhCacheManagerFactoryBean();
cacheManagerFactoryBean.setConfigLocation(new ClassPathResource("ehcache.xml"));
cacheManagerFactoryBean.setShared(true);
return cacheManagerFactoryBean;
}
}

View file

@ -1,236 +0,0 @@
package com.example.crud;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Lock;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jgroups.*;
import org.jgroups.blocks.locking.LockService;
public class JGroups {
private Logger log = LoggerFactory.getLogger(getClass().getName());
/* this variable indicates whether I have become the master or I'm just a client*/
public volatile AtomicBoolean becomeMaster = new AtomicBoolean(false);
/* The address of the server if we are a client or of ourself if we are
* server */
public String serverAddress;
/* A channel on which to acquire a lock, so that only one can become server */
private JChannel lockChannel;
/* A shared channel ffor communication between client and master*/
private JChannel communicationChannel;
private LockService lockService;
/* A thread which tries to acquire a lock */
private Thread acquiringThread;
/* A thread which listens for the server ip which may change */
private Thread listeningThread;
/* A thread which lists the status and initializes the acquiring thread*/
private Thread statusThread;
private String name;
/* If we pass from being a client to being a server we must stop the listening
* thread however we cannot call listeningThread.stop() but instead we change
* the stopListening boolean to true */
private boolean stopListening = false;
/* This lock communicates I have finally become either master or client so
* the serverAddress and becomeMaster variables are correctly set */
public final Object finishedLock = new Object();
public static void main(String[] args) throws Exception {
//System.setProperty("jgroups.udp.mcast_addr", "127.0.0.1");
Thread.currentThread().setName("MyMainThread");
Random rand = new Random();
JGroups master = new JGroups("Node" + rand.nextInt(10));
master.lockChannel = new JChannel(JGroups.class.getClassLoader().getResource(
"jgroups-l2-cache-udp-largecluster.xml"));
master.lockChannel.connect("lock-channel");
master.communicationChannel = new JChannel(
JGroups.class.getClassLoader().getResource("jgroups-l2-cache-udp-largecluster.xml"));
master.communicationChannel.connect("communication-channel");
master.lockService = new LockService(master.lockChannel);
master.startStatusPrinterThread();
}
public JGroups(String name) {
this.name = name;
}
public JGroups() {
try {
Thread.currentThread().setName("MyMainThread");
Random rand = new Random();
this.name = ("Node" + rand.nextInt(10));
lockChannel = new JChannel(JGroups.class.getClassLoader().getResource("/resource/udp.xml"));
lockChannel.connect("lock-channel");
communicationChannel = new JChannel(JGroups.class.getClassLoader().getResource("/resource/udp.xml"));
communicationChannel.connect("communication-channel");
lockService = new LockService(lockChannel);
startStatusPrinterThread();
}
catch (Exception ex) {
log.error(ex.getStackTrace().toString());
}
}
public void startAcquiringThread() {
acquiringThread = new Thread() {
@Override
public void run() {
while (true) {
//if you have become Master send your ip every now and then
if (becomeMaster.get()) {
try {
StringBuffer buffer = new StringBuffer("serverip " + serverAddress);
communicationChannel.send(new Message(null, buffer));
}
catch (Exception ex) {
log.error(ex.getStackTrace().toString());
}
} else {
try {
Thread.currentThread().setName(name + "AcquiringThread");
Lock lock = lockService.getLock("serverLock");
if (lock.tryLock(4, TimeUnit.SECONDS)) {
becomeMaster.set(true);
stopListening = true;
/* Now that I'm server I must find out my own ip address on which to listen */
Enumeration<NetworkInterface> networkInterfaces;
try {
networkInterfaces = NetworkInterface.getNetworkInterfaces();
for (NetworkInterface netint : Collections.list(networkInterfaces)) {
Enumeration<InetAddress> inetAddresses = netint.getInetAddresses();
for (InetAddress inetAddress : Collections.list(inetAddresses)) {
if (isIPAddress(inetAddress.getHostAddress())
&& !inetAddress.getHostAddress().equals("127.0.0.1")) {
serverAddress = inetAddress.getHostAddress();
}
}
}
/* I notify to the rest of the program I have correctly initialized
* becomeMaster and serverAddress */
synchronized (finishedLock) {
finishedLock.notify();
}
}
catch (Exception ex) {
log.error(ex.getStackTrace().toString());
System.exit(0);
}
log.info(Thread.currentThread().getName()
+ ": I acquired lock! will become master! my ip is " + serverAddress);
} else {
becomeMaster.set(false);
stopListening = false;
if (listeningThread == null || !listeningThread.isAlive()) {
if (!stopListening) {
//??? this codnition might be useless
startListeningThread();
}
}
}
}
catch (Exception e) {
e.printStackTrace();
}
}
try {
sleep(5000L);
}
catch (InterruptedException ex) {
log.error(ex.getStackTrace().toString());
}
}
}
};
acquiringThread.setDaemon(true);
acquiringThread.start();
}
public void startListeningThread() {
listeningThread = new Thread() {
@Override
public void run() {
try {
while (true) {
Thread.currentThread().setName(name + "ListeningThread");
communicationChannel.setReceiver(new ReceiverAdapter() {
@Override
public void receive(Message msg) {
if (msg.getObject() != null) {
String leaderServerAddress = (msg.getObject().toString().substring(9));
if (isIPAddress(leaderServerAddress)) {
serverAddress = leaderServerAddress;
log.info(name + " Master server has ip" + serverAddress);
/* I notify to the rest of the program I have correctly initialized
* becomeMaster and serverAddress */
synchronized (finishedLock) {
finishedLock.notify();
}
} else {
log.info(name + ": discarded message " + msg.getObject().toString());
}
}
}
});
sleep(10000L);
if (stopListening) {
return;
}
}
}
catch (Exception e) {
e.printStackTrace();
}
}
};
listeningThread.setDaemon(true);
listeningThread.start();
}
private void startStatusPrinterThread() {
statusThread = new Thread() {
@Override
public void run() {
Thread.currentThread().setName(name + "StatusPrinterThread");
startAcquiringThread();
while (true) {
try {
if (becomeMaster.get()) {
log.info(name + " startStatusPrinterThread(): I am happily a Master!");
} else {
if (!acquiringThread.isAlive()) {
startAcquiringThread();
}
}
sleep(5000L);
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
}
};
statusThread.setDaemon(true);
statusThread.start();
}
private static boolean isIPAddress(String str) {
Pattern ipPattern = Pattern.compile("^([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\."
+ "([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\."
+ "([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\."
+ "([01]?\\d\\d?|2[0-4]\\d|25[0-5])$");
return ipPattern.matcher(str).matches();
}
}

View file

@ -2,10 +2,11 @@ package com.example.crud;
import com.example.crud.entities.*; import com.example.crud.entities.*;
import com.example.crud.repositories.InventoryRepository; import com.example.crud.repositories.InventoryRepository;
import org.datanucleus.util.NucleusLogger; import com.example.crud.repositories.PersonRepository;
import org.infinispan.Cache; import org.datanucleus.api.jpa.JPAEntityManager;
import org.infinispan.configuration.cache.ConfigurationBuilder; import org.datanucleus.state.ObjectProvider;
import org.infinispan.manager.DefaultCacheManager; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.data.jpa.repository.support.JpaRepositoryFactory; import org.springframework.data.jpa.repository.support.JpaRepositoryFactory;
@ -21,66 +22,101 @@ import java.util.Map;
*/ */
public class Main { public class Main {
public static void cacheTest() {
// Construct a simple local cache manager with default configuration
DefaultCacheManager cacheManager = new DefaultCacheManager();
// Define local cache configuration
cacheManager.defineConfiguration("local", new ConfigurationBuilder().build());
// Obtain the local cache
Cache<String, String> cache = cacheManager.getCache("local");
// Register a listener
cache.addListener(new CacheClusterListener());
// Store some values
cache.put("key1", "value1");
cache.put("key2", "value2");
cache.put("key1", "newValue");
// Stop the cache manager and release all resources
cacheManager.stop();
}
public static void main(String args[]) { public static void main(String args[]) {
//cacheTest(); Logger log = LoggerFactory.getLogger(Main.class);//getClass().getName());
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(); AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
ctx.register(ApplicationConfig.class); ctx.scan("com.example");
ctx.register(ApplicationConfiguration.class);
ctx.refresh(); ctx.refresh();
// Enable MongoDB logging in general
System.setProperty("DEBUG.MONGO", "true");
// Enable DB operation tracing
System.setProperty("DB.TRACE", "true");
// Create an EntityManagerFactory for this "persistence-unit" // Create an EntityManagerFactory for this "persistence-unit"
// See the file "META-INF/persistence.xml" // See the file "META-INF/persistence.xml"
EntityManagerFactory emf = Persistence.createEntityManagerFactory("crud"); EntityManagerFactory cassandraEntityManagerFactory = Persistence.createEntityManagerFactory("crud");
EntityManagerFactory emf_mongo = Persistence.createEntityManagerFactory("mongo"); EntityManagerFactory mongoEntityManagerFactory = Persistence.createEntityManagerFactory("mongo");
//MergingPersistenceUnitmanager
// TODO: // TODO:
// * types: int, bool, etc. // * LOCAL_QUORUM
// * Set<> // * compound primary keys
// * L2/Caching via Infinispan (embedded, clustered) // * pillar for DDL
// * MergingPersistenceUnitmanager // * metrics
// * Draft/(Fluent)Builder Immutable Entites // * com.datastax.driver.core.Cluster.builder().withQueryOptions(new QueryOptions().setConsistencyLevel(ConsistencyLevel.QUORUM))
// * https://github.com/brndnmtthws/metrics-cassandra (c* as a sink for metrics)
// * https://github.com/addthis/metrics-reporter-config
EntityManager em;
EntityTransaction tx;
JpaRepositoryFactory factory;
//org.datanucleus.api.jpa.JPAEntityTransaction tx = (org.datanucleus.api.jpa.JPAEntityTransaction)pm.currentTransaction();
//tx.setOption("transaction.isolation", 2);
// Add a person to MongoDB
em = mongoEntityManagerFactory.createEntityManager();
Person person;
/*
factory = new JpaRepositoryFactory(em);
PersonRepository repository = factory.getRepository(PersonRepository.class);
person = new Person();
person.setPersonFirstName("James");
person.setPersonLastName("Bond");
person.setAge(42);
repository.save(person);
*/
tx = em.getTransaction();
try {
tx.begin();
person = new Person();
person.setPersonFirstName("James");
person.setPersonLastName("Bond");
person.setAge(42);
em.merge(person);
List<ObjectProvider> objs = ((JPAEntityManager) em).getExecutionContext().getObjectsToBeFlushed();
for (Object o : objs) {
log.debug("to be flushed: " + o.toString());
}
tx.commit();
} finally {
if (tx.isActive()) {
tx.rollback();
}
em.close(); // This will detach all current managed objects
}
// Persistence of a Product and a Book. // Persistence of a Product and a Book.
EntityManager em = emf.createEntityManager(); em = cassandraEntityManagerFactory.createEntityManager();
EntityTransaction tx = em.getTransaction(); tx = em.getTransaction();
try { try {
tx.begin(); tx.begin();
Inventory inv = em.merge(new Inventory("My Inventory")); Inventory inv = em.merge(new Inventory("My Inventory"));
inv.setDescription("This is my initial description.");
Product product = new Product("Sony Discman", "A standard discman from Sony", 200.00); Product product = new Product("Sony Discman", "A standard discman from Sony", 200.00);
inv.addProduct(product); inv.addProduct(product);
Book book = new Book("Lord of the Rings by Tolkien", "The classic story", 49.99, "JRR Tolkien", Book book = new Book("Lord of the Rings by Tolkien", "The classic story", 49.99, "JRR Tolkien",
"12345678", "MyBooks Factory"); "12345678", "MyBooks Factory");
Magazine magazine = new Magazine("Field and Stream", "A hunter's guide to the outdoors.", 3.29, "F&S, Inc.", "23984729347", "F&S, Inc."); Magazine magazine = new Magazine("Field and Stream", "A hunter's guide to the outdoors.", 3.29, "F&S, Inc.",
"23984729347", "F&S, Inc.");
//product.setSeller(person);
//book.setSeller(person);
//magazine.setSeller(person);
inv.addProduct(book); inv.addProduct(book);
inv.addProduct(magazine); inv.addProduct(magazine);
em.persist(inv); em.persist(inv);
tx.commit(); tx.commit();
// SpringBeanAutowiringSupport.processInjectionBasedOnCurrentContext(inv);
// SpringBeanAutowiringSupport.processInjectionBasedOnCurrentContext(product);
// System.out.println("Product and Book have been persisted, inventory: " + inv.getPrimaryKey().toString() + ", product: " + product.getPrimaryKey().toString());
} }
catch (Exception e) { catch (Exception e) {
NucleusLogger.GENERAL.error(">> Exception persisting data", e); log.error(">> Exception persisting data", e);
System.err.println("Error persisting data : " + e.getMessage()); System.err.println("Error persisting data : " + e.getMessage());
return; return;
} finally { } finally {
@ -89,11 +125,11 @@ public class Main {
} }
em.close(); em.close();
} }
emf.getCache().evictAll(); cassandraEntityManagerFactory.getCache().evictAll();
System.out.println(""); System.out.println("");
// Perform a retrieve of the Inventory and detach it (by closing the EM) // Perform a retrieve of the Inventory and detach it (by closing the EM)
em = emf.createEntityManager(); em = cassandraEntityManagerFactory.createEntityManager();
tx = em.getTransaction(); tx = em.getTransaction();
Inventory inv = null; Inventory inv = null;
try { try {
@ -114,7 +150,7 @@ public class Main {
tx.commit(); tx.commit();
} }
catch (Exception e) { catch (Exception e) {
NucleusLogger.GENERAL.error(">> Exception performing find() on data", e); log.error(">> Exception performing find() on data", e);
System.err.println("Error performing find() on data : " + e.getMessage()); System.err.println("Error performing find() on data : " + e.getMessage());
return; return;
} finally { } finally {
@ -128,17 +164,14 @@ public class Main {
} }
System.out.println(""); System.out.println("");
// Add a person to MongoDB // Update a person to MongoDB
em = emf_mongo.createEntityManager(); em = mongoEntityManagerFactory.createEntityManager();
tx = em.getTransaction(); tx = em.getTransaction();
Person person;
try { try {
tx.begin(); tx.begin();
person = new Person(); person = em.find(Person.class, person.getPersonId());
person.setPersonFirstName("James"); person.setPersonLastName("Blunder");
person.setPersonLastName("Bond"); person.setAge(43);
person.setAge(42);
em.merge(person);
tx.commit(); tx.commit();
} finally { } finally {
if (tx.isActive()) { if (tx.isActive()) {
@ -148,7 +181,7 @@ public class Main {
} }
// Perform some query operations // Perform some query operations
em = emf.createEntityManager(); em = cassandraEntityManagerFactory.createEntityManager();
tx = em.getTransaction(); tx = em.getTransaction();
try { try {
tx.begin(); tx.begin();
@ -173,7 +206,7 @@ public class Main {
tx.commit(); tx.commit();
} }
catch (Exception e) { catch (Exception e) {
NucleusLogger.GENERAL.error(">> Exception querying data", e); log.error(">> Exception querying data", e);
System.err.println("Error querying data : " + e.getMessage()); System.err.println("Error querying data : " + e.getMessage());
return; return;
} finally { } finally {
@ -184,16 +217,54 @@ public class Main {
} }
System.out.println(""); System.out.println("");
em = emf.createEntityManager(); em = cassandraEntityManagerFactory.createEntityManager();
JpaRepositoryFactory factory = new JpaRepositoryFactory(em); factory = new JpaRepositoryFactory(em);
InventoryRepository repository = factory.getRepository(InventoryRepository.class); tx = em.getTransaction();
Inventory inventory = repository.findByName("My Inventory"); try {
System.out.println("SpringData/JPA: " + inventory.toString()); tx.begin();
em.close(); InventoryRepository repository = factory.getRepository(InventoryRepository.class);
Inventory inventory = repository.findByName("My Inventory");
System.out.println("SpringData/JPA: " + inventory.toString());
inventory.setDescription("This is my updated description.");
tx.rollback();
}
catch (Exception e) {
log.error(">> Exception in bulk delete of data", e);
System.err.println("Error in bulk delete of data : " + e.getMessage());
return;
} finally {
if (tx.isActive()) {
tx.rollback();
}
em.close();
}
em = cassandraEntityManagerFactory.createEntityManager();
factory = new JpaRepositoryFactory(em);
tx = em.getTransaction();
try {
tx.begin();
InventoryRepository repository = factory.getRepository(InventoryRepository.class);
Inventory inventory = repository.findByName("My Inventory");
inventory.setDescription("This is the final description.");
repository.save(inventory);
tx.commit();
}
catch (Exception e) {
log.error(">> Exception in bulk delete of data", e);
System.err.println("Error in bulk delete of data : " + e.getMessage());
return;
} finally {
if (tx.isActive()) {
tx.rollback();
}
em.close();
}
// Clean out the database // Clean out the database
emf.getCache().evictAll(); cassandraEntityManagerFactory.getCache().evictAll();
em = emf.createEntityManager(); em = cassandraEntityManagerFactory.createEntityManager();
tx = em.getTransaction(); tx = em.getTransaction();
try { try {
tx.begin(); tx.begin();
@ -221,7 +292,7 @@ public class Main {
tx.commit(); tx.commit();
} }
catch (Exception e) { catch (Exception e) {
NucleusLogger.GENERAL.error(">> Exception in bulk delete of data", e); log.error(">> Exception in bulk delete of data", e);
System.err.println("Error in bulk delete of data : " + e.getMessage()); System.err.println("Error in bulk delete of data : " + e.getMessage());
return; return;
} finally { } finally {
@ -233,6 +304,6 @@ public class Main {
System.out.println(""); System.out.println("");
System.out.println("End of Tutorial"); System.out.println("End of Tutorial");
emf.close(); cassandraEntityManagerFactory.close();
} }
} }

View file

@ -10,17 +10,23 @@ import java.io.Serializable;
@MappedSuperclass @MappedSuperclass
public abstract class AbstractEntity<ID extends Serializable> { public abstract class AbstractEntity<ID extends Serializable> {
@Version
protected long version;
/*
// SpringBeanAutowiringSupport.processInjectionBasedOnCurrentContext(inv);
// SpringBeanAutowiringSupport.processInjectionBasedOnCurrentContext(product);
//System.out.println("Product and Book have been persisted, inventory: " + inv.getPrimaryKey().toString() + ", product: " + product.getPrimaryKey().toString());
@Transient @Transient
@Autowired @Autowired
EntityManagerFactory emf; EntityManagerFactory emf;
@Version
protected long version;
public ID getPrimaryKey() { public ID getPrimaryKey() {
final PersistenceUnitUtil util = emf.getPersistenceUnitUtil(); final PersistenceUnitUtil util = emf.getPersistenceUnitUtil();
Object id = util.getIdentifier(this); Object id = util.getIdentifier(this);
return (ID)id; return (ID)id;
} }
*/
} }

View file

@ -29,6 +29,9 @@ public class Book extends Product
@Basic @Basic
private String publisher = null; private String publisher = null;
@Basic
private boolean paperback = false;
/** /**
* Default Constructor. * Default Constructor.
**/ **/

View file

@ -20,6 +20,9 @@ public class Inventory extends AbstractAuditableEntity<String> {
@Id @Id
private String name=null; private String name=null;
@Basic
private String description;
@OneToMany(cascade = { CascadeType.PERSIST, CascadeType.MERGE, CascadeType.DETACH }, fetch = FetchType.EAGER) @OneToMany(cascade = { CascadeType.PERSIST, CascadeType.MERGE, CascadeType.DETACH }, fetch = FetchType.EAGER)
private Set<Product> products = new HashSet<Product>(); private Set<Product> products = new HashSet<Product>();
@ -42,4 +45,7 @@ public class Inventory extends AbstractAuditableEntity<String> {
products.clear(); products.clear();
} }
public void setDescription(String description) {
this.description = description;
}
} }

View file

@ -51,6 +51,13 @@ public class Person extends AbstractAuditableEntity {
@OneToMany(mappedBy = "seller") @OneToMany(mappedBy = "seller")
private List<Product> products; private List<Product> products;
public Person() {
}
public Person(String personId) {
this.personId = personId;
}
/** /**
* Gets the person id. * Gets the person id.
* *

View file

@ -0,0 +1,229 @@
package com.example.crud.metrics;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ScheduledReporter;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.concurrent.TimeUnit;
/**
* A reporter class for logging metrics values to a {@link Logger} periodically, similar to
* {@link com.codahale.metrics.ConsoleReporter} or {@link com.codahale.metrics.CsvReporter}, but using
* the logging framework instead. It also supports specifying a {@link Marker} instance that can be used
* by custom appenders and filters for the bound logging toolkit to further process metrics reports.
*/
public final class CoalescingReporter extends ScheduledReporter {
private final Logger logger;
private final Marker marker;
/**
* Returns a new {@link Builder} for {@link CoalescingReporter}.
*
* @param registry the registry to report
* @return a {@link Builder} instance for a {@link CoalescingReporter}
*/
public static Builder forRegistry(MetricRegistry registry) {
return new Builder(registry);
}
private CoalescingReporter(MetricRegistry registry,
Logger logger,
Marker marker,
TimeUnit rateUnit,
TimeUnit durationUnit,
MetricFilter filter) {
super(registry, "logger-reporter", filter, rateUnit, durationUnit);
this.logger = logger;
this.marker = marker;
}
@Override
public void report(SortedMap<String, Gauge> gauges,
SortedMap<String, Counter> counters,
SortedMap<String, Histogram> histograms,
SortedMap<String, Meter> meters,
SortedMap<String, Timer> timers) {
StringBuilder data = new StringBuilder();
for (Entry<String, Gauge> entry : gauges.entrySet()) {
addGauge(data, entry.getKey(), entry.getValue());
}
for (Entry<String, Counter> entry : counters.entrySet()) {
addCounter(data, entry.getKey(), entry.getValue());
}
for (Entry<String, Histogram> entry : histograms.entrySet()) {
addHistogram(data, entry.getKey(), entry.getValue());
}
for (Entry<String, Meter> entry : meters.entrySet()) {
addMeter(data, entry.getKey(), entry.getValue());
}
for (Entry<String, Timer> entry : timers.entrySet()) {
addTimer(data, entry.getKey(), entry.getValue());
}
logger.info(marker, data.toString());
}
private void addTimer(StringBuilder data, String name, Timer timer) {
final Snapshot snapshot = timer.getSnapshot();
data.append(" type=timer.").append(name).append(":");
data.append(" count=").append(timer.getCount());
data.append(", min=").append(convertDuration(snapshot.getMin()));
data.append(", max=").append(convertDuration(snapshot.getMax()));
data.append(", mean=").append(convertDuration(snapshot.getMean()));
data.append(", stdDev=").append(convertDuration(snapshot.getStdDev()));
data.append(", median=").append(convertDuration(snapshot.getMedian()));
data.append(", p75=").append(convertDuration(snapshot.get75thPercentile()));
data.append(", p95=").append(convertDuration(snapshot.get95thPercentile()));
data.append(", p98=").append(convertDuration(snapshot.get98thPercentile()));
data.append(", p99=").append(convertDuration(snapshot.get99thPercentile()));
data.append(", 999=").append(convertDuration(snapshot.get999thPercentile()));
data.append(", mean_rate=").append(convertRate(timer.getMeanRate()));
data.append(", m1=").append(convertRate(timer.getMeanRate()));
data.append(", m5=").append(convertRate(timer.getMeanRate()));
data.append(", m15=").append(convertRate(timer.getMeanRate()));
data.append(", rate_unit=").append(getRateUnit());
data.append(", duration_unit=").append(getDurationUnit());
}
private void addMeter(StringBuilder data, String name, Meter meter) {
data.append(" type=meter.").append(name).append(":");
data.append(" count=").append(meter.getCount());
data.append(", mean_rate=").append(convertRate(meter.getMeanRate()));
data.append(", m1=").append(convertRate(meter.getOneMinuteRate()));
data.append(", m5=").append(convertRate(meter.getFiveMinuteRate()));
data.append(", m15=").append(convertRate(meter.getFifteenMinuteRate()));
data.append(", rate_unit=").append(getRateUnit());
}
private void addHistogram(StringBuilder data, String name, Histogram histogram) {
final Snapshot snapshot = histogram.getSnapshot();
data.append(" type=histogram.").append(name).append(":");
data.append(" count=").append(histogram.getCount());
data.append(", min=").append(snapshot.getMin());
data.append(", max=").append(snapshot.getMax());
data.append(", mean=").append(snapshot.getMean());
data.append(", stdDev=").append(snapshot.getStdDev());
data.append(", median=").append(snapshot.getMedian());
data.append(", p75=").append(snapshot.get75thPercentile());
data.append(", p95=").append(snapshot.get95thPercentile());
data.append(", p98=").append(snapshot.get98thPercentile());
data.append(", p99=").append(snapshot.get99thPercentile());
data.append(", 999=").append(snapshot.get999thPercentile());
}
private void addCounter(StringBuilder data, String name, Counter counter) {
data.append(" counter.").append(name).append(": ").append(counter.getCount());
}
private void addGauge(StringBuilder data, String name, Gauge gauge) {
data.append(" gauge.").append(name).append(": ").append(gauge.getValue());
}
@Override
protected String getRateUnit() {
return "events/" + super.getRateUnit();
}
/**
* A builder for {@link com.codahale.metrics.CsvReporter} instances. Defaults to logging to {@code metrics}, not
* using a marker, converting rates to events/second, converting durations to milliseconds, and
* not filtering metrics.
*/
public static final class Builder {
private final MetricRegistry registry;
private Logger logger;
private Marker marker;
private TimeUnit rateUnit;
private TimeUnit durationUnit;
private MetricFilter filter;
private Builder(MetricRegistry registry) {
this.registry = registry;
this.logger = LoggerFactory.getLogger("metrics");
this.marker = null;
this.rateUnit = TimeUnit.SECONDS;
this.durationUnit = TimeUnit.MILLISECONDS;
this.filter = MetricFilter.ALL;
}
/**
* Log metrics to the given logger.
*
* @param logger a {@link Logger}
* @return {@code this}
*/
public Builder outputTo(Logger logger) {
this.logger = logger;
return this;
}
/**
* Mark all logged metrics with the given marker.
*
* @param marker a {@link Marker}
* @return {@code this}
*/
public Builder markWith(Marker marker) {
this.marker = marker;
return this;
}
/**
* Convert rates to the given time unit.
*
* @param rateUnit a unit of time
* @return {@code this}
*/
public Builder convertRatesTo(TimeUnit rateUnit) {
this.rateUnit = rateUnit;
return this;
}
/**
* Convert durations to the given time unit.
*
* @param durationUnit a unit of time
* @return {@code this}
*/
public Builder convertDurationsTo(TimeUnit durationUnit) {
this.durationUnit = durationUnit;
return this;
}
/**
* Only report metrics which match the given filter.
*
* @param filter a {@link MetricFilter}
* @return {@code this}
*/
public Builder filter(MetricFilter filter) {
this.filter = filter;
return this;
}
/**
* Builds a {@link CoalescingReporter} with the given properties.
*
* @return a {@link CoalescingReporter}
*/
public CoalescingReporter build() {
return new CoalescingReporter(registry, logger, marker, rateUnit, durationUnit, filter);
}
}
}

View file

@ -0,0 +1,38 @@
package com.example.crud.metrics;
import com.ryantenney.metrics.spring.reporter.AbstractReporterElementParser;
/**
* Reporter for metrics-spring which logs more compact, all in one line instead of one line for each metric.
*/
public class CoalescingReporterElementParser extends AbstractReporterElementParser {
private static final String FILTER_REF = "filter-ref";
private static final String FILTER_PATTERN = "filter";
@Override
public String getType() {
return "compact-slf4j";
}
@Override
protected Class<?> getBeanClass() {
return CoalescingReporterFactoryBean.class;
}
@Override
protected void validate(ValidationContext c) {
c.require(CoalescingReporterFactoryBean.PERIOD, DURATION_STRING_REGEX, "Period is required and must be in the form '\\d+(ns|us|ms|s|m|h|d)'");
c.optional(CoalescingReporterFactoryBean.MARKER);
c.optional(CoalescingReporterFactoryBean.LOGGER);
c.optional(CoalescingReporterFactoryBean.RATE_UNIT, TIMEUNIT_STRING_REGEX, "Rate unit must be one of the enum constants from java.util.concurrent.TimeUnit");
c.optional(CoalescingReporterFactoryBean.DURATION_UNIT, TIMEUNIT_STRING_REGEX, "Duration unit must be one of the enum constants from java.util.concurrent.TimeUnit");
c.optional(FILTER_PATTERN);
c.optional(FILTER_REF);
if (c.has(FILTER_PATTERN) && c.has(FILTER_REF)) {
c.reject(FILTER_REF, "Reporter element must not specify both the 'filter' and 'filter-ref' attributes");
}
c.rejectUnmatchedProperties();
}
}

View file

@ -0,0 +1,54 @@
package com.example.crud.metrics;
import com.ryantenney.metrics.spring.reporter.AbstractScheduledReporterFactoryBean;
import org.slf4j.LoggerFactory;
import org.slf4j.MarkerFactory;
import java.util.concurrent.TimeUnit;
/**
* CoalescingReporterFactoryBean.
*/
public class CoalescingReporterFactoryBean extends AbstractScheduledReporterFactoryBean<CoalescingReporter> {
/** Period attribute. */
public static final String PERIOD = "period";
/** Duration unit. */
public static final String DURATION_UNIT = "duration-unit";
/** Rate unit. */
public static final String RATE_UNIT = "rate-unit";
/** Marker. */
public static final String MARKER = "marker";
/** Logger. */
public static final String LOGGER = "logger";
@Override
public Class<CoalescingReporter> getObjectType() {
return CoalescingReporter.class;
}
@Override
protected CoalescingReporter createInstance() {
final CoalescingReporter.Builder reporter = CoalescingReporter.forRegistry(getMetricRegistry());
if (hasProperty(DURATION_UNIT)) {
reporter.convertDurationsTo(getProperty(DURATION_UNIT, TimeUnit.class));
}
if (hasProperty(RATE_UNIT)) {
reporter.convertRatesTo(getProperty(RATE_UNIT, TimeUnit.class));
}
reporter.filter(getMetricFilter());
if (hasProperty(MARKER)) {
reporter.markWith(MarkerFactory.getMarker(getProperty(MARKER)));
}
if (hasProperty(LOGGER)) {
reporter.outputTo(LoggerFactory.getLogger(getProperty(LOGGER)));
}
return reporter.build();
}
@Override
protected long getPeriod() {
return convertDurationString(getProperty(PERIOD));
}
}

View file

@ -0,0 +1,151 @@
package com.example.crud.metrics;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.util.ContextInitializer;
import ch.qos.logback.core.joran.spi.JoranException;
import com.codahale.metrics.*;
import com.codahale.metrics.health.HealthCheckRegistry;
import com.codahale.metrics.jvm.*;
import com.codahale.metrics.logback.InstrumentedAppender;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Session;
import com.ryantenney.metrics.spring.config.annotation.EnableMetrics;
import com.ryantenney.metrics.spring.config.annotation.MetricsConfigurerAdapter;
import org.datanucleus.store.StoreManager;
import org.datanucleus.store.connection.ManagedConnection;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManagerFactory;
import java.lang.management.ManagementFactory;
import java.util.concurrent.TimeUnit;
@Configuration
@EnableMetrics(proxyTargetClass = true)
public class MetricsConfiguration extends MetricsConfigurerAdapter {
private static final String PROP_METRIC_REG_JVM_MEMORY = "jvm.memory";
private static final String PROP_METRIC_REG_JVM_GARBAGE = "jvm.garbage";
private static final String PROP_METRIC_REG_JVM_THREADS = "jvm.threads";
private static final String PROP_METRIC_REG_JVM_FILES = "jvm.files";
private static final String PROP_METRIC_REG_JVM_BUFFERS = "jvm.buffers";
private final Logger log = (Logger)LoggerFactory.getLogger(getClass().getName());
private MetricRegistry metricRegistry = new MetricRegistry();
private HealthCheckRegistry healthCheckRegistry = new HealthCheckRegistry();
@Autowired
ApplicationContext context;
@Override
@Bean
public MetricRegistry getMetricRegistry() {
return metricRegistry;
}
@Override
@Bean
public HealthCheckRegistry getHealthCheckRegistry() {
return healthCheckRegistry;
}
@PostConstruct
public void init() {
/*
final LoggerContext factory = (LoggerContext) LoggerFactory.getILoggerFactory();
final Logger root = factory.getLogger("console");//Logger.ROOT_LOGGER_NAME);
final InstrumentedAppender metrics = new InstrumentedAppender();
metrics.setContext(root.getLoggerContext());
metrics.start();
root.addAppender(metrics);
*/
LoggerContext loggerContext = (LoggerContext)LoggerFactory.getILoggerFactory();
loggerContext.reset();
ContextInitializer initializer = new ContextInitializer(loggerContext);
try {
initializer.autoConfig();
} catch (JoranException e) {
e.printStackTrace();
}
log.debug("Registering JVM gauges");
metricRegistry.registerAll(new OperatingSystemGaugeSet());
metricRegistry.register(PROP_METRIC_REG_JVM_MEMORY, new MemoryUsageGaugeSet());
metricRegistry.register(PROP_METRIC_REG_JVM_GARBAGE, new GarbageCollectorMetricSet());
metricRegistry.register(PROP_METRIC_REG_JVM_THREADS, new ThreadStatesGaugeSet());
metricRegistry.register(PROP_METRIC_REG_JVM_FILES, new FileDescriptorRatioGauge());
metricRegistry.register(PROP_METRIC_REG_JVM_BUFFERS,
new BufferPoolMetricSet(ManagementFactory.getPlatformMBeanServer()));
Cluster cluster = context.getBean(Cluster.class);
cluster.getMetrics().getRegistry().addListener(
new com.codahale.metrics.MetricRegistryListener() {
private final String METRIC_NAME_PREFIX = "com.datastax.";
@Override
public void onGaugeAdded(String name, Gauge<?> gauge) {
//if (metricRegistry.getNames().contains(name)) {
// name is already taken, maybe prefix with a namespace
//} else {
metricRegistry.register(METRIC_NAME_PREFIX + name, gauge);
}
@Override
public void onGaugeRemoved(String name) {
}
@Override
public void onCounterAdded(String name, Counter counter) {
metricRegistry.register(METRIC_NAME_PREFIX + name, counter);
}
@Override
public void onCounterRemoved(String name) {
}
@Override
public void onHistogramAdded(String name, Histogram histogram) {
metricRegistry.register(METRIC_NAME_PREFIX + name, histogram);
}
@Override
public void onHistogramRemoved(String name) {
}
@Override
public void onMeterAdded(String name, Meter meter) {
metricRegistry.register(METRIC_NAME_PREFIX + name, meter);
}
@Override
public void onMeterRemoved(String name) {
}
@Override
public void onTimerAdded(String name, Timer timer) {
metricRegistry.register(METRIC_NAME_PREFIX + name, timer);
}
@Override
public void onTimerRemoved(String name) {
}
});
CoalescingReporter reporter = CoalescingReporter.forRegistry(metricRegistry)
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.build();
reporter.start(5, TimeUnit.SECONDS);
}
}

View file

@ -0,0 +1,125 @@
package com.example.crud.metrics;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricSet;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
/**
* A set of gauges for operating system settings.
*/
public class OperatingSystemGaugeSet implements MetricSet {
private final OperatingSystemMXBean mxBean;
private final Optional<Method> committedVirtualMemorySize;
private final Optional<Method> totalSwapSpaceSize;
private final Optional<Method> freeSwapSpaceSize;
private final Optional<Method> processCpuTime;
private final Optional<Method> freePhysicalMemorySize;
private final Optional<Method> totalPhysicalMemorySize;
private final Optional<Method> openFileDescriptorCount;
private final Optional<Method> maxFileDescriptorCount;
private final Optional<Method> systemCpuLoad;
private final Optional<Method> processCpuLoad;
/**
* Creates new gauges using the platform OS bean.
*/
public OperatingSystemGaugeSet() {
this(ManagementFactory.getOperatingSystemMXBean());
}
/**
* Creates a new gauges using the given OS bean.
*
* @param mxBean an {@link OperatingSystemMXBean}
*/
public OperatingSystemGaugeSet(OperatingSystemMXBean mxBean) {
this.mxBean = mxBean;
committedVirtualMemorySize = getMethod("getCommittedVirtualMemorySize");
totalSwapSpaceSize = getMethod("getTotalSwapSpaceSize");
freeSwapSpaceSize = getMethod("getFreeSwapSpaceSize");
processCpuTime = getMethod("getProcessCpuTime");
freePhysicalMemorySize = getMethod("getFreePhysicalMemorySize");
totalPhysicalMemorySize = getMethod("getTotalPhysicalMemorySize");
openFileDescriptorCount = getMethod("getOpenFileDescriptorCount");
maxFileDescriptorCount = getMethod("getMaxFileDescriptorCount");
systemCpuLoad = getMethod("getSystemCpuLoad");
processCpuLoad = getMethod("getProcessCpuLoad");
}
@Override
public Map<String, Metric> getMetrics() {
final Map<String, Metric> gauges = new HashMap<>();
gauges.put("committedVirtualMemorySize", (Gauge<Long>) () -> invokeLong(committedVirtualMemorySize));
gauges.put("totalSwapSpaceSize", (Gauge<Long>) () -> invokeLong(totalSwapSpaceSize));
gauges.put("freeSwapSpaceSize", (Gauge<Long>) () -> invokeLong(freeSwapSpaceSize));
gauges.put("processCpuTime", (Gauge<Long>) () -> invokeLong(processCpuTime));
gauges.put("freePhysicalMemorySize", (Gauge<Long>) () -> invokeLong(freePhysicalMemorySize));
gauges.put("totalPhysicalMemorySize", (Gauge<Long>) () -> invokeLong(totalPhysicalMemorySize));
gauges.put("fd.usage", (Gauge<Double>) () -> invokeRatio(openFileDescriptorCount, maxFileDescriptorCount));
gauges.put("systemCpuLoad", (Gauge<Double>) () -> invokeDouble(systemCpuLoad));
gauges.put("processCpuLoad", (Gauge<Double>) () -> invokeDouble(processCpuLoad));
return gauges;
}
private Optional<Method> getMethod(String name) {
try {
final Method method = mxBean.getClass().getDeclaredMethod(name);
method.setAccessible(true);
return Optional.of(method);
} catch (NoSuchMethodException e) {
return Optional.empty();
}
}
private long invokeLong(Optional<Method> method) {
if (method.isPresent()) {
try {
return (long) method.get().invoke(mxBean);
} catch (IllegalAccessException | InvocationTargetException ite) {
return 0L;
}
}
return 0L;
}
private double invokeDouble(Optional<Method> method) {
if (method.isPresent()) {
try {
return (double) method.get().invoke(mxBean);
} catch (IllegalAccessException | InvocationTargetException ite) {
return 0.0;
}
}
return 0.0;
}
private double invokeRatio(Optional<Method> numeratorMethod, Optional<Method> denominatorMethod) {
if (numeratorMethod.isPresent() && denominatorMethod.isPresent()) {
try {
long numerator = (long) numeratorMethod.get().invoke(mxBean);
long denominator = (long) denominatorMethod.get().invoke(mxBean);
if (0 == denominator) {
return Double.NaN;
}
return 1.0 * numerator / denominator;
} catch (IllegalAccessException | InvocationTargetException ite) {
return Double.NaN;
}
}
return Double.NaN;
}
}

View file

@ -0,0 +1,22 @@
package com.example.crud.metrics;
import java.util.concurrent.TimeUnit;
import org.springframework.context.annotation.Configuration;
import com.codahale.metrics.ConsoleReporter;
import com.codahale.metrics.MetricRegistry;
import com.ryantenney.metrics.spring.config.annotation.EnableMetrics;
import com.ryantenney.metrics.spring.config.annotation.MetricsConfigurerAdapter;
@Configuration
@EnableMetrics
public class SpringConfiguringClass extends MetricsConfigurerAdapter {
@Override
public void configureReporters(MetricRegistry metricRegistry) {
// registerReporter allows the MetricsConfigurerAdapter to
// shut down the reporter when the Spring context is closed
registerReporter(ConsoleReporter.forRegistry(metricRegistry).build())
.start(1, TimeUnit.MINUTES);
}
}

View file

@ -1,8 +1,6 @@
package com.example.crud.repositories; package com.example.crud.repositories;
import com.codahale.metrics.annotation.Metered;
import com.example.crud.entities.Inventory; import com.example.crud.entities.Inventory;
import io.astefanutti.metrics.aspectj.Metrics;
import org.datanucleus.api.jpa.annotations.ReadOnly; import org.datanucleus.api.jpa.annotations.ReadOnly;
import org.springframework.cache.annotation.CacheEvict; import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable; import org.springframework.cache.annotation.Cacheable;
@ -12,6 +10,7 @@ import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param; import org.springframework.data.repository.query.Param;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import java.beans.Transient;
import java.util.List; import java.util.List;
//@Metrics(registry = "${this.registry}") //@Metrics(registry = "${this.registry}")
@ -31,4 +30,7 @@ public interface InventoryRepository extends JpaRepository<Inventory, String>, J
@CacheEvict(value = "inventory", key = "#name") @CacheEvict(value = "inventory", key = "#name")
void deleteInventoryBy(String name); void deleteInventoryBy(String name);
@Transactional
//CacheEvict(value = "inventory", key = "#name")
<S extends String> S save(S s);
} }

View file

@ -0,0 +1,8 @@
package com.example.crud.repositories;
import com.example.crud.entities.Inventory;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
public interface PersonRepository extends JpaRepository<Inventory, String>, JpaSpecificationExecutor {
}

View file

@ -46,6 +46,7 @@
<basic name="description"> <basic name="description">
<column length="255"/> <column length="255"/>
</basic> </basic>
<one-to-many name="seller"/>
</attributes> </attributes>
</entity> </entity>
@ -63,6 +64,9 @@
<basic name="publisher"> <basic name="publisher">
<column name="PUBLISHER" length="40"/> <column name="PUBLISHER" length="40"/>
</basic> </basic>
<basic name="paperback">
<column name="PAPERBACK" nullable="false"/>
</basic>
</attributes> </attributes>
</entity> </entity>

View file

@ -15,7 +15,7 @@
</properties> </properties>
</persistence-unit> </persistence-unit>
<persistence-unit name="crud"> <persistence-unit name="crud" transaction-type="RESOURCE_LOCAL">
<class>com.example.crud.entities.Inventory</class> <class>com.example.crud.entities.Inventory</class>
<class>com.example.crud.entities.Product</class> <class>com.example.crud.entities.Product</class>
<class>com.example.crud.entities.Book</class> <class>com.example.crud.entities.Book</class>
@ -25,10 +25,12 @@
<properties> <properties>
<property name="javax.persistence.jdbc.url" value="cassandra:"/> <property name="javax.persistence.jdbc.url" value="cassandra:"/>
<property name="datanucleus.mapping.Schema" value="kc"/> <property name="datanucleus.mapping.Schema" value="kc"/>
<property name="datanucleus.cache.level2.type" value="jcache"/> <property name="datanucleus.flush.mode" value="MANUAL"/>
<property name="datanucleus.cache.level2.cacheName" value="dist"/> <property name="datanucleus.datastoreTransactionFlushLimit" value="100"/>
<property name="datanucleus.cache.level2.timeout" value="10000"/>
<property name="datanucleus.schema.autoCreateAll" value="true"/> <property name="datanucleus.schema.autoCreateAll" value="true"/>
<property name="datanucleus.cache.level2.type" value="ehcache"/>
<property name="datanucleus.cache.level2.cacheName" value="crud"/>
<property name="datanucleus.cassandra.metrics" value="true"/>
</properties> </properties>
</persistence-unit> </persistence-unit>

View file

@ -1,13 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<ehcache xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://ehcache.org/ehcache.xsd" name="webCache" > <ehcache xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<diskStore path="java.io.tmpdir/webCache"/> xsi:noNamespaceSchemaLocation="http://ehcache.org/ehcache.xsd" name="crudCache">
<cacheManagerPeerProviderFactory class="net.sf.ehcache.distribution.jgroups.JGroupsCacheManagerPeerProviderFactory" properties="jgroups-l2-cache-udp-largecluster.xml"/> <diskStore path="java.io.tmpdir/crudCache"/>
<cache name="crud" <cacheManagerPeerProviderFactory class="net.sf.ehcache.distribution.jgroups.JGroupsCacheManagerPeerProviderFactory"
maxElementsInMemory="1000" properties="jgroups-l2-cache-udp-largecluster.xml"/>
eternal="false" <!-- Documentation at: https://github.com/ehcache/ehcache.org-site/blob/master/ehcache.xml -->
timeToIdleSeconds="600" <cache name="crud"
timeToLiveSeconds="6000" eternal="false"
overflowToDisk="true"> maxBytesLocalHeap="1g"
<cacheEventListenerFactory class="net.sf.ehcache.distribution.jgroups.JGroupsCacheReplicatorFactory" properties="replicateAsynchronously=true,replicatePuts=true,replicateUpdates=true,replicateUpdatesViaCopy=false,replicateRemovals=true" /> maxBytesLocalDisk="200g"
</cache> memoryStoreEvictionPolicy="LRU"
timeToIdleSeconds="300"
timeToLiveSeconds="600"
overflowToDisk="true"
transactionalMode="off">
<cacheEventListenerFactory class="net.sf.ehcache.distribution.jgroups.JGroupsCacheReplicatorFactory"
properties="replicateAsynchronously=true,replicatePuts=true,replicateUpdates=true,replicateUpdatesViaCopy=false,replicateRemovals=true"/>
</cache>
</ehcache> </ehcache>

View file

@ -1,18 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<infinispan
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="urn:infinispan:config:9.1 http://www.infinispan.org/schemas/infinispan-config-9.1.xsd"
xmlns="urn:infinispan:config:9.1">
<jgroups>
<stack-file name="udp" path="jgroups.xml" />
</jgroups>
<cache-container default-cache="default">
<transport stack="udp" node-name="${nodeName}" />
<replicated-cache name="repl" mode="SYNC" />
<distributed-cache name="dist" mode="SYNC" owners="2" />
</cache-container>
</infinispan>

View file

@ -17,92 +17,58 @@
</filter> </filter>
</appender> </appender>
<category name="com.example.crud"> <appender name="consoleasync" class="ch.qos.logback.classic.AsyncAppender">
<priority value="TRACE"/> <queueSize>10000</queueSize>
</category> <discardingThreshold>0</discardingThreshold>
<logger name="com.mchange.v2.resourcepool" level="INFO">
<appender-ref ref="console"/> <appender-ref ref="console"/>
</logger> </appender>
<logger name="com.mchange.v2.c3p0" level="INFO"> <category name="com.example.crud" level="TRACE" additivity="true"/>
<appender-ref ref="console"/>
</logger>
<logger name="org.logicalcobwebs.proxool" level="INFO">
<appender-ref ref="console"/>
</logger>
<logger name="DataNucleus" level="TRACE">
<appender-ref ref="console"/>
</logger>
<logger name="org.springframework" level="TRACE"> <logger name="com.mchange.v2.resourcepool" level="INFO"/>
<level value="trace" />
<appender-ref ref="console"/>
</logger>
<logger name="org.springdata.cassandra" level="TRACE"> <logger name="com.mchange.v2.c3p0" level="INFO"/>
<appender-ref ref="console"/>
</logger>
<logger name="com.datastax.driver.core.Cluster" level="INFO"> <logger name="org.logicalcobwebs.proxool" level="INFO"/>
<appender-ref ref="console"/>
</logger>
<logger name="com.datastax.driver.core.Session" level="INFO"> <logger name="DataNucleus" level="TRACE"/>
<appender-ref ref="console"/>
</logger>
<logger name="com.datastax.driver.core.RequestHandler" level="TRACE"> <logger name="org.springframework" level="TRACE"/>
<appender-ref ref="console"/>
</logger>
<logger name="com.datastax.driver.core.Connection" level="INFO"> <logger name="org.springframework.cache" level="Trace"/>
<appender-ref ref="console"/>
</logger>
<logger name="com.datastax.driver.core.Message" level="INFO"> <logger name="org.springdata.cassandra" level="TRACE"/>
<appender-ref ref="console"/>
</logger>
<logger name="com.datastax.driver.core.QueryLogger.SLOW" level="INFO"> <logger name="com.datastax.driver.core.Cluster" level="INFO"/>
<appender-ref ref="console"/>
</logger>
<logger name="com.datastax.driver.core.QueryLogger.NORMAL" level="INFO"> <logger name="com.datastax.driver.core.Session" level="INFO"/>
<appender-ref ref="console"/>
</logger>
<logger name="com.datastax.driver.core.QueryLogger.FAST" level="INFO"> <logger name="com.datastax.driver.core.RequestHandler" level="TRACE"/>
<appender-ref ref="console"/>
</logger>
<logger name="com.datastax.driver.core.QueryLogger.ERROR" level="INFO"> <logger name="com.datastax.driver.core.Connection" level="INFO"/>
<appender-ref ref="console"/>
</logger>
<category name="org.infinispan"> <logger name="com.datastax.driver.core.Message" level="INFO"/>
<priority value="TRACE"/>
</category>
<category name="org.jgroups"> <logger name="com.datastax.driver.core.QueryLogger.SLOW" level="INFO"/>
<priority value="WARN"/>
</category>
<logger name="org.mongodb.driver" level="TRACE"> <logger name="com.datastax.driver.core.QueryLogger.NORMAL" level="INFO"/>
<appender-ref ref="console"/>
</logger>
<logger name="net.spy.memcached" level="TRACE"> <logger name="com.datastax.driver.core.QueryLogger.FAST" level="INFO"/>
<appender-ref ref="console"/>
</logger>
<!-- Root logger <logger name="com.datastax.driver.core.QueryLogger.ERROR" level="INFO"/>
<root>
<level value="info" /> <logger name="net.sf.ehcache" level="TRACE"/>
<appender-ref ref="console" />
</root> <logger name="org.jgroups" level="TRACE"/>
-->
<logger name="org.mongodb.driver" level="TRACE"/>
<logger name="net.spy.memcached" level="TRACE"/>
<!-- Root logger -->
<root>
<level value="info" />
<appender-ref ref="consoleasync" />
</root>
</configuration> </configuration>