Initial import of functional, work in progress, example code.

This commit is contained in:
Greg Burd 2017-04-11 13:14:16 -04:00
commit 123658753e
55 changed files with 6409 additions and 0 deletions

57
.gitignore vendored Normal file
View file

@ -0,0 +1,57 @@
*.iws
*.ipr
*.iml
*.log
.DS_Store
*.swp
*.orig
*.xmt
*.jou
*.pyc
*.prefs
*~
#*#
gradle
.gradle
run-jetty-run/
license.err
test/client/test-results.xml
.idea/
node_modules/
bower_components/
/projectFilesBackup/*
logs/*
!.gitkeep
coverage/*
/ios
xcuserdata/
/buildSrc/build/
services/host.properties
erl_crash.dump
.build_config
/stage/
.tmp/
jspm_packages/
typings/
/cppGraphics/
.vscode/
.metadata/
RemoteSystemsTempFiles/
*.mo
.project
.classpath
.settings
project/eclipse/build/
.eslintcache
.buckconfig
.buckconfig.local
.okbuck
buckw
buck-out
build
out
infer-out
.buckd
.watchmanconfig
*.class
/bin/

18
Makefile Normal file
View file

@ -0,0 +1,18 @@
gradlew:
gradle gradlew
lock:
./gradlew clean generateLock saveLock
run:
./gradlew bootRun
all:
./gradlew build
clean:
gradle clean
infer:
infer -- ./gradlew build

114
README.txt Normal file
View file

@ -0,0 +1,114 @@
First, download the latest release of Cockroach Database from http://cockroachlabs.com/ for your platform. I rename the
executable to 'crdb' to keep things crisp. Then start the server on your developer host, I run it in /tmp just because.
$ cd /tmp
$ crdb start --background --http-port=9090
CockroachDB node starting at 2017-03-16 15:04:25.280278938 -0400 EDT
build: CCL beta-20170309 @ 2017/03/09 16:34:35 (go1.8)
admin: http://localhost:9090
sql: postgresql://root@localhost:26257?sslmode=disable
logs: cockroach-data/logs
store[0]: path=cockroach-data
status: restarted pre-existing node
clusterID: 59fcea0b-e206-44e2-9071-f8164a52bb6b
nodeID: 1
First:
$ gradle gradlew
To change jvm arguments:
$ ./gradlew bootRun -PjvmArgs="-Dwhatever1=value1 -Dwhatever2=value2"
To rebuild the lock file:
$ ./gradlew clean generateLock saveLock
Otherwise just:
$ ./gradlew bootRun
Basic tests are in a file 'test-api', first install HTTPie and JasonQuery(jq).
$ brew install httpie jq
Then run the tests:
$ ./test-api
Of course you can exercise the API with CuRL:
$ # Create:
$ curl -i -X POST -H "Content-Type:application/json" -d '{ "firstName" : "Karl", "lastName" : "Penzhorn" }' localhost:8443/persons
HTTP/1.1 201
Location: http://localhost:8443/persons/215677213022060545
Content-Type: application/hal+json;charset=UTF-8
Transfer-Encoding: chunked
Date: Tue, 31 Jan 2017 19:11:02 GMT
{
"firstName" : "Karl",
"lastName" : "Penzhorn",
"_links" : {
"self" : {
"href" : "http://localhost:8443/persons/215677213022060545"
},
"person" : {
"href" : "http://localhost:8443/persons/215677213022060545"
}
}
}
$ # Read:
$ curl localhost:8443/persons/215677213022060545
{
"firstName" : "Karl",
"lastName" : "Penzhorn",
"_links" : {
"self" : {
"href" : "http://localhost:8443/persons/215677213022060545"
},
"person" : {
"href" : "http://localhost:8443/persons/215677213022060545"
}
}
}
$ # Update:
$ curl -i -X PUT -H "Content-Type:application/json" -d '{ "firstName" : "Karl", "lastName" : "Zen" }' localhost:8443/persons/215677213022060545
HTTP/1.1 200
Location: http://localhost:8443/persons/215677213022060545
Content-Type: application/hal+json;charset=UTF-8
Transfer-Encoding: chunked
Date: Tue, 31 Jan 2017 19:18:22 GMT
{
"firstName" : "Karl",
"lastName" : "Zen",
"_links" : {
"self" : {
"href" : "http://localhost:8443/persons/215677213022060545"
},
"person" : {
"href" : "http://localhost:8443/persons/215677213022060545"
}
}
}
$
$ # Delete:
$ curl -i -X DELETE localhost:8443/persons/215677213022060545
HTTP/1.1 204
Date: Tue, 31 Jan 2017 19:14:51 GMT
$ curl localhost:8443/persons/215677213022060545
$
You can update the log levels at runtime.
$ curl http://localhost:8443/loggers
Then, to change the ROOT configured level to TRACE you'd simply:
$ curl -i -X POST -H 'Content-Type: application/json' -d '{"configuredLevel": "TRACE"}' http://localhost:8080/loggers/ROOT
or:
$ HTTP POST localhost:8080/loggers/ROOT configuredLevel:TRACE
Also, you can get the set of REST endpoints mapped (not including model objects) via:
curl http://localhost:8443/mappings | jq

215
build.gradle Normal file
View file

@ -0,0 +1,215 @@
// gradle wrapper
// ./gradlew dependencies --configuration compile
// ./gradlew clean generateLock saveLock
// ./gradlew compileJava
// ./gradlew run
// ./gradlew run --debug-jvm
buildscript {
ext { }
repositories {
jcenter()
mavenLocal()
mavenCentral()
maven { url "https://clojars.org/repo" }
maven { url "https://plugins.gradle.org/m2/" }
maven { url "https://repo.spring.io/milestone" } //maven { url "https://repo.spring.io/snapshot" }
}
dependencies {
classpath("org.springframework.boot:spring-boot-gradle-plugin:1.5.2.RELEASE")
classpath 'com.netflix.nebula:gradle-dependency-lock-plugin:4.+'
// classpath 'se.transmode.gradle:gradle-docker:1.2'
classpath 'com.uber:okbuck:0.19.0'
}
}
apply plugin: 'java'
apply plugin: 'idea'
apply plugin: 'eclipse'
apply plugin: 'application'
apply plugin: 'org.springframework.boot'
apply plugin: "io.spring.dependency-management"
apply plugin: 'com.uber.okbuck'
apply plugin: 'nebula.dependency-lock'
//TODO(gburd): apply plugin: 'org.flywaydb.flyway', version "4.1.2"
//TODO(gburd): apply plugin: 'docker'
task wrapper(type: Wrapper) {
gradleVersion = '3.4.1'
}
applicationDefaultJvmArgs = ["-Dgreeting.language=en"]
jar {
baseName = 'crud'
version = '0.0.2-SNAPSHOT'
}
bootRun {
if (project.hasProperty('jvmArgs')) {
jvmArgs = (project.jvmArgs.split("\\s+") as List)
} else {
jvmArgs = ["-server",
"-Xms1g",
"-Xmx1g",
"-XX:NewRatio=3",
"-Xss16m",
"-XX:+UseConcMarkSweepGC",
"-XX:+CMSParallelRemarkEnabled",
"-XX:ConcGCThreads=4",
"-XX:ReservedCodeCacheSize=240m",
"-XX:+AlwaysPreTouch",
"-XX:+TieredCompilation",
"-XX:+UseCompressedOops",
"-XX:SoftRefLRUPolicyMSPerMB=50",
"-Dsun.io.useCanonCaches=false",
"-Djava.net.preferIPv4Stack=true",
"-Djsse.enableSNIExtension=false",
"-ea",
"-XX:+UseAdaptiveGCBoundary",
"-XX:CompileThreshold=10000",
"-XX:+OptimizeStringConcat",
"-XX:+UseFastAccessorMethods",
"-XX:+HeapDumpOnOutOfMemoryError",
"-XX:-OmitStackTraceInFastThrow",
"-XX:MaxJavaStackTraceDepth=-1",
"-XX:+UseCompressedOops"]
}
}
sourceCompatibility = 1.8
targetCompatibility = 1.8
repositories {
jcenter()
mavenLocal()
mavenCentral()
maven { url "https://clojars.org/repo" }
maven { url "https://repo.spring.io/milestone" } //maven { url "https://repo.spring.io/snapshot" }
}
configurations {
querydslapt
compile.exclude module: 'spring-boot-starter-tomcat'
compile.exclude module: 'tomcat-jdbc'
}
configurations.all {
exclude group: 'org.hibernate', module: 'hibernate-entitymanager'
exclude group: 'org.hibernate', module: 'hibernate-core'
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'log4j', module: 'log4j'
}
dependencies {
// Some Java Extras
compile group: 'javax.inject', name: 'javax.inject', version: '1'
// Apache Commons
compile group: 'commons-codec', name: 'commons-codec', version: '1.+'
compile group: 'org.apache.commons', name: 'commons-lang3', version: '3.+'
// Google Guava, "For all the Goodness(TM)"
compile group: 'com.google.guava', name: 'guava', version: '21.+'
// Joda Time, "Because time is hard(TM)"
compile group: 'joda-time', name: 'joda-time', version: '2.+'
// Lombok, "Where less is more(TM)"
compile group: 'org.projectlombok', name: 'lombok'
// AOP
compile group: 'org.aspectj', name: 'aspectjweaver', version: '1.+'
// All the Spring, for all the things (e.g. a REST web application with a SQL database)
compile group: 'org.springframework', name: 'spring-core'
compile group: 'org.springframework', name: 'spring-beans'
compile group: 'org.springframework', name: 'spring-context'
compile group: 'org.springframework', name: 'spring-tx'
compile group: 'org.springframework.data', name: 'spring-data-jpa'
compile group: 'org.springframework.boot', name: 'spring-boot-starter-undertow'
compile group: 'org.springframework.boot', name: 'spring-boot-starter-data-rest'
compile group: 'org.springframework.boot', name: 'spring-boot-starter-data-jpa'
compile group: 'org.springframework.boot', name: 'spring-boot-starter-actuator'
compile group: 'org.springframework.boot', name: 'spring-boot-starter-cache'
compile group: 'org.springframework.boot', name: 'spring-boot-starter-aop'
compile group: 'org.springframework.boot', name: 'spring-boot-starter-web'
compile group: 'com.querydsl', name: 'querydsl-jpa', version: '4.+'
querydslapt group: 'com.mysema.querydsl', name: 'querydsl-apt', version: '3.+' // QueryDsl Weaving
// Entity Storage (aka. txn{CRUD}), "Things necessary for data persistence"
// - CockroachDB speaks the PostgreSQL wire protocol
// - EclipseLink (ORM)
// - UUID/Primary Keys (Fast, coordination-free, decentralized, k-ordered unique ID generator)
// - Flake ID generation
// - Flyway, "Change happens(TM)"
compile group: 'org.postgresql', name: 'postgresql'
compile group: 'org.eclipse.persistence', name: 'eclipselink', version: '2.+'
compile group: 'org.eclipse.persistence', name: 'javax.persistence', version: '2.+'
compile group: 'org.eclipse.persistence', name: 'org.eclipse.persistence.jpa', version: '2.+'
compile group: 'com.github.rholder.fauxflake', name: 'fauxflake-core', version: '1.+'
// compile group: 'org.flywaydb', name: 'flyway-core'
// Dropwizard (aka. CodaHale) Metrics, "Measure all the things!(TM)"
compile group: 'io.dropwizard.metrics', name: 'metrics-core', version: '3.+'
compile group: 'io.dropwizard.metrics', name: 'metrics-jvm', version: '3.+'
compile group: 'io.dropwizard.metrics', name: 'metrics-healthchecks', version: '3.+'
compile group: 'io.dropwizard.metrics', name: 'metrics-servlets', version: '3.+'
compile group: 'io.dropwizard.metrics', name: 'metrics-jetty9', version: '3.+'
compile group: 'io.dropwizard.metrics', name: 'metrics-graphite', version: '3.+'
compile group: 'com.ryantenney.metrics', name: 'metrics-spring', version: '3.+'
compile group: 'io.riemann', name: 'metrics3-riemann-reporter', version: '0.+'
compile group: 'defunkt', name: 'logback-riemann-appender', version: '0.+'
// Logging for Java, "Visibility proceeds insight(TM)"
compile group: 'ch.qos.logback', name: 'logback-core', version: '1.+'
compile group: 'ch.qos.logback', name: 'logback-classic', version: '1.+'
compile group: 'org.zalando', name: 'logbook-servlet', version: '1.+'
compile group: 'org.zalando', name: 'logbook-httpclient', version: '1.+'
compile group: 'org.zalando', name: 'logbook-spring-boot-starter', version: '1.+'
compile group: 'org.slf4j', name: 'jcl-over-slf4j', version: '1.+'
compile group: 'org.slf4j', name: 'log4j-over-slf4j', version: '1.+'
// Testing
testCompile group: 'junit', name: 'junit', version: '4.+'
testCompile group: 'org.mockito', name: 'mockito-all', version: '1.+'
testCompile group: 'org.assertj', name: 'assertj-core', version: '2.+'
testCompile group: 'com.github.stefanbirkner', name: 'system-rules', version: '1.+'
testCompile group: 'nl.jqno.equalsverifier', name: 'equalsverifier', version: '1.+'
testCompile group: 'org.hamcrest', name: 'hamcrest-core', version: '1.3'
testCompile group: 'org.quicktheories', name: 'quicktheories', version: '0.+'
testCompile group: 'org.springframework.boot', name: 'spring-boot-starter-test'
testCompile group: 'com.github.javafaker', name: 'javafaker', version: '0.+'
}
dependencyManagement {
imports { mavenBom 'org.springframework.data:spring-data-releasetrain:Ingalls-SR1' }
dependencies {
dependency group: 'org.springframework.data', name: 'spring-data-jpa', version: '1.11.+'
}
}
// flyway {
// url = 'jdbc:postgresql://127.0.0.1:26257/crud?sslmode=disable'
// user = 'root'
// }
task performJPAWeaving(type: JavaExec, dependsOn: "compileJava"){
inputs.dir compileJava.destinationDir
outputs.dir compileJava.destinationDir
main "org.eclipse.persistence.tools.weaving.jpa.StaticWeave"
args "-persistenceinfo",
"src/main/resources",
compileJava.destinationDir.getAbsolutePath(),
compileJava.destinationDir.getAbsolutePath()
classpath = configurations.compile
}
tasks.withType(JavaCompile){
doLast{
tasks.performJPAWeaving.execute()
}
}
// http://stackoverflow.com/questions/6431026/generating-jpa2-metamodel-from-a-gradle-build-script
// http://bsideup.blogspot.com/2015/04/querydsl-with-gradle-and-idea.html

1516
dependencies.lock Normal file

File diff suppressed because it is too large Load diff

7
ops/Makefile Normal file
View file

@ -0,0 +1,7 @@
up:
docker-compose -f riemann.yml up --force-recreate
down:
docker-compose -f riemann.yml down

6
ops/NOTES Normal file
View file

@ -0,0 +1,6 @@
$ docker run -d -p 4567:4567 travix/riemann-dash:latest
$ docker run -d -p 5555:5555 -p 5556:5556 -p 16384:16384 travix/riemann-server:latest
$ open http://127.0.0.1:4567/#Riemann
java -jar riemann-grid-0.6.4-standalone.jar -l 127.0.0.1 -p 9999 -H 127.0.0.1 -P 5555

14
ops/logback.xml Normal file
View file

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true">
<appender name="R1" class="pl.defunkt.logback.RiemannAppender">
<serviceName>Example CRUD Service</serviceName>
<riemannHostName>127.0.0.1</riemannHostName>
<riemannPort>5555</riemannPort>
<hostName>graphene.local</hostName>
<customAttributes>application:test-service,datacenter:us-sw</customAttributes>
<riemannLogLevel>WARN</riemannLogLevel>
</appender>
<root level="DEBUG">
<appender-ref ref="R1"/>
</root>
</configuration>

Binary file not shown.

32
ops/riemann.yml Normal file
View file

@ -0,0 +1,32 @@
version: "3"
networks:
riemann:
driver: overlay
services:
riemannserver:
container_name: riemann-server
image: "nathanleclaire/riemann-server:article"
network_mode: "riemann"
ports:
- "127.0.0.1:5556:5556"
restart: always
riemannhealth:
image: "nathanleclaire/riemann-health:article"
network_mode: "riemann"
pid: host
environment:
- "affinity:container!=*riemannhealth*"
volumes:
- "/etc/hostname:/etc/hostname:ro"
restart: always
riemanndash:
image: "nathanleclaire/riemann-dash:article"
networks:
- riemann
ports:
- "127.0.0.1:4567:4567"
restart: always

View file

@ -0,0 +1,147 @@
package com.example.crud;
import com.google.common.collect.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.Banner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.actuate.autoconfigure.MetricFilterAutoConfiguration;
import org.springframework.boot.actuate.autoconfigure.MetricRepositoryAutoConfiguration;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cache.Cache;
import org.springframework.cache.CacheManager;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.cache.concurrent.ConcurrentMapCache;
import org.springframework.cache.support.SimpleCacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.core.env.Environment;
import org.springframework.core.env.SimpleCommandLinePropertySource;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.Database;
import org.springframework.orm.jpa.vendor.EclipseLinkJpaDialect;
import org.springframework.orm.jpa.vendor.EclipseLinkJpaVendorAdapter;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.zalando.logbook.Logbook;
import javax.inject.Inject;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import static org.zalando.logbook.Conditions.*;
@ComponentScan
@EnableCaching
@EnableJpaRepositories("com.example.crud.db.dao")
@EnableAutoConfiguration(exclude = { MetricFilterAutoConfiguration.class, MetricRepositoryAutoConfiguration.class })
@EnableTransactionManagement
@SpringBootApplication
public class Application {
private static final Logger log = LoggerFactory.getLogger(Application.class);
@Inject private Environment env;
public static void main(String[] args) {
SpringApplication app = new SpringApplication(Application.class);
app.setBannerMode(Banner.Mode.OFF);
SimpleCommandLinePropertySource source = new SimpleCommandLinePropertySource(args);
addDefaultProfile(app, source);
logbookSetup();
Environment env = app.run(args).getEnvironment();
}
private static void logbookSetup() {
Logbook logbook = Logbook.builder()
.condition(exclude(
requestTo("/health"),
requestTo("/admin/**"),
contentType("application/octet-stream"),
header("X-Secret", Sets.newHashSet("1", "true")::contains)))
.build();
}
/**
* If no profile has been configured, set by default the "dev" profile.
*/
private static void addDefaultProfile(SpringApplication app, SimpleCommandLinePropertySource source) {
if (!source.containsProperty("spring.profiles.active") &&
!System.getenv().containsKey("SPRING_PROFILES_ACTIVE")) {
app.setAdditionalProfiles(Constants.SPRING_PROFILE_DEVELOPMENT);
}
}
/**
* EclipseLink JPA setup.
*/
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean ret = new LocalContainerEntityManagerFactoryBean();
ret.setDataSource(dataSource());
ret.setJpaVendorAdapter(jpaVendorAdapter());
ret.setJpaDialect(eclipseLinkJpaDialect());
ret.setJpaPropertyMap(jpaProperties());
ret.setPackagesToScan("com.example.crud.db");
return ret;
}
@Bean
public EclipseLinkJpaDialect eclipseLinkJpaDialect() {
return new EclipseLinkJpaDialect();
}
/*
* Set this property to disable LoadTimeWeaver (i.e. Dynamic Weaving) for EclipseLink.
* Otherwise, you'll get: Cannot apply class transformer without LoadTimeWeaver specified
*/
@Bean
public Map<String, String> jpaProperties() {
Map<String, String> props = new HashMap<>();
props.put("eclipselink.weaving", "static"); //TODO(gburd): enable
return props;
}
@Bean
public JpaVendorAdapter jpaVendorAdapter() {
EclipseLinkJpaVendorAdapter jpaVendorAdapter = new EclipseLinkJpaVendorAdapter();
jpaVendorAdapter.setDatabase(Database.POSTGRESQL);
jpaVendorAdapter.setGenerateDdl(true);
return jpaVendorAdapter;
}
@Bean
public DataSource dataSource() {
final DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName("org.postgresql.Driver");
dataSource.setUrl("jdbc:postgresql://127.0.0.1:26257/crud");
dataSource.setUsername("root");
dataSource.setPassword("");
return dataSource;
}
@Bean
public CacheManager cacheManager() {
//TODO(gburd): is there an eclipselink cache manager? or caffeine? or...?
Cache cache = new ConcurrentMapCache("name");
SimpleCacheManager manager = new SimpleCacheManager();
manager.setCaches(Arrays.asList(cache));
return manager;
}
}

View file

@ -0,0 +1,21 @@
package com.example.crud;
/**
* Application constants.
*/
public final class Constants {
// Spring profile for development, production and "fast", see http://jhipster.github.io/profiles.html
public static final String SPRING_PROFILE_DEVELOPMENT = "dev";
public static final String SPRING_PROFILE_PRODUCTION = "prod";
public static final String SPRING_PROFILE_FAST = "fast";
// Spring profile used when deploying with Spring Cloud (used when deploying to CloudFoundry)
public static final String SPRING_PROFILE_CLOUD = "cloud";
// Spring profile used when deploying to Heroku
public static final String SPRING_PROFILE_HEROKU = "heroku";
public static final String SYSTEM_ACCOUNT = "system";
private Constants() {
}
}

View file

@ -0,0 +1,27 @@
package com.example.crud;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
@Configuration
@EnableTransactionManagement
public class TransactionManagersConfig {
@Autowired EntityManagerFactory emf;
@Autowired private DataSource dataSource;
@Bean(name = "transactionManager")
public PlatformTransactionManager transactionManager() {
JpaTransactionManager tm =
new JpaTransactionManager();
tm.setEntityManagerFactory(emf);
tm.setDataSource(dataSource);
return tm;
}
}

View file

@ -0,0 +1,132 @@
package com.example.crud.controllers;
import com.example.crud.db.annotations.Retry;
import com.example.crud.db.models.Employee;
import com.example.crud.services.EmployeeService;
import lombok.val;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.transaction.annotation.Isolation;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import static org.springframework.web.bind.annotation.RequestMethod.*;
@Controller
@Transactional(propagation = Propagation.MANDATORY)
public class EmployeeController {
private static final Logger log = LoggerFactory.getLogger(EmployeeService.class);
@Autowired private EmployeeService employeeService;
@Transactional(readOnly = true, isolation = Isolation.REPEATABLE_READ)
@RequestMapping(value = "/api/v1/employees", method = GET, produces = "application/json")
public ResponseEntity<List<Employee>> index() {
log.debug("Getting all employees...");
@SuppressWarnings("unchecked") ResponseEntity response = ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
try {
List<Employee> employees = employeeService.findAll();
if (employees != null) {
response = ResponseEntity.status(HttpStatus.OK).body(employees);
} else {
response = ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
} catch (Exception e) {
response = ResponseEntity.status(HttpStatus.BAD_REQUEST).build();
}
return response;
}
@Retry(times = 3, on = org.springframework.dao.OptimisticLockingFailureException.class)
@Transactional(propagation = Propagation.REQUIRES_NEW, isolation = Isolation.REPEATABLE_READ)
@RequestMapping(value = "/api/v1/employee", method = POST, consumes = "application/json")
public ResponseEntity<Long> addEmployee(@RequestBody Employee employee) {
log.debug("Inserting employee");
@SuppressWarnings("unchecked") ResponseEntity response = ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
try {
val e = employeeService.saveEmployee(employee);
if (e != null) {
response = ResponseEntity.status(HttpStatus.CREATED).body(e.getId());
} else {
response = ResponseEntity.status(HttpStatus.NOT_MODIFIED).build();
}
} catch (Exception e) {
response = ResponseEntity.status(HttpStatus.BAD_REQUEST).build();
}
return response;
}
@Transactional(propagation = Propagation.REQUIRED, isolation = Isolation.SERIALIZABLE)
@RequestMapping(value = "/api/v1/employee/{id:[\\d]+}", method = DELETE)
public ResponseEntity<Long> deleteEmployee(@PathVariable Long id) {
log.debug("Deleting employee");
@SuppressWarnings("unchecked") ResponseEntity response = ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
try {
employeeService.delete(id);
response = ResponseEntity.status(HttpStatus.OK).body(id);
} catch (Exception e) {
response = ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
return response;
}
@Transactional(readOnly = true, isolation = Isolation.REPEATABLE_READ)
//@RequestMapping(value = "/api/v1/employee/{id:[\\d]+}", method = GET)
@RequestMapping(value = "/api/v1/employee/{id}", method = GET)
public ResponseEntity<Employee> find(@PathVariable Long id) {
log.debug("Getting a specific employee by id {}", id);
@SuppressWarnings("unchecked") ResponseEntity response = ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
try {
Employee employee = employeeService.getOne(id);
if (employee != null) {
response = ResponseEntity.status(HttpStatus.OK).body(employee);
} else {
response = ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
} catch (Exception e) {
response = ResponseEntity.status(HttpStatus.BAD_REQUEST).build();
}
return response;
}
@Transactional(readOnly = true, isolation = Isolation.REPEATABLE_READ)
@RequestMapping(value = "/api/v1/employee/named", method = GET)
public ResponseEntity<List<Employee>> find(@RequestParam("last") String name) {
log.debug("Getting a specific employee by name: {}", name);
@SuppressWarnings("unchecked") ResponseEntity response = ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
try {
List<Employee> employees = employeeService.findByLastName(name);
if (employees != null) {
response = ResponseEntity.status(HttpStatus.OK).body(employees);
} else {
response = ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
} catch (Exception e) {
response = ResponseEntity.status(HttpStatus.BAD_REQUEST).build();
}
return response;
}
@Transactional(readOnly = true, isolation = Isolation.REPEATABLE_READ)
@RequestMapping(value = "/api/v1/employee/{id:[\\d]+}", method = DELETE)
public @ResponseBody Long delete(@PathVariable Long id) {
log.debug("Remove an employee by id {}", id);
employeeService.delete(id);
return id;
}
/*
@Transactional(readOnly = true, isolation = Isolation.REPEATABLE_READ)
@RequestMapping("/api/v1/employee/lastNameLength")
public List<Employee> fetchByLength(Long length) {
return employeeService.fetchByLastNameLength(length);
}
*/
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,52 @@
//http://stackoverflow.com/a/11038230/366692
package com.example.crud.crypto;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import java.security.SecureRandom;
import org.apache.commons.codec.binary.Base64;
public class Password {
// The higher the number of iterations the more expensive computing the hash is for us and also for an attacker.
private static final int iterations = 20*1000;
private static final int saltLen = 32;
private static final int desiredKeyLen = 256;
/**
* Computes a salted PBKDF2 hash of given plaintext password suitable for storing in a database.
* Empty passwords are not supported.
*/
public static String getSaltedHash(String password) throws Exception {
byte[] salt = SecureRandom.getInstance("SHA1PRNG").generateSeed(saltLen);
// combine the salt with the password
return Base64.encodeBase64String(salt) + "$" + hash(password, salt);
}
/**
* Checks whether given plaintext password corresponds to a stored salted hash of the password.
*/
public static boolean check(String password, String stored) throws Exception{
String[] saltAndPass = stored.split("\\$");
if (saltAndPass.length != 2) {
throw new IllegalStateException(
"The stored password have the form 'salt$hash'");
}
String hashOfInput = hash(password, Base64.decodeBase64(saltAndPass[0]));
return hashOfInput.equals(saltAndPass[1]);
}
/**
* NOTE: using PBKDF2 from Sun, an alternative is https://github.com/wg/scrypt
* cf. http://www.unlimitednovelty.com/2012/03/dont-use-bcrypt.html
*/
private static String hash(String password, byte[] salt) throws Exception {
if (password == null || password.length() == 0)
throw new IllegalArgumentException("Empty passwords are not supported.");
SecretKeyFactory f = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
SecretKey key = f.generateSecret(new PBEKeySpec(
password.toCharArray(), salt, iterations, desiredKeyLen)
);
return Base64.encodeBase64String(key.getEncoded());
}
}

View file

@ -0,0 +1,190 @@
package com.example.crud.crypto;
import java.security.SecureRandom;
import java.util.regex.Pattern;
public final class Randomness {
private static final int NUM_SEEDS = 16;
private static final SecureRandom SEED_PROVIDER = new SecureRandom();
private static final Pattern DB_OBJECT_ID_PATTERN = Pattern.compile("[0-9a-f]{24}", Pattern.CASE_INSENSITIVE);
private static final String BASE64_CHAR = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
private static final String OBJECT_ID_CHAR = "0123456789abcdef";
private static final int[] BASE64_NUM = new int[256]; //maps base64 characters to 0-63, others to 0.
static {
for(int i = 0; i < 64; ++i) {
BASE64_NUM[BASE64_CHAR.charAt(i)] = i;
}
}
private static final ThreadLocal<ReseedingSecureRandom> SECURE_RANDOM = new ThreadLocal<ReseedingSecureRandom>() {
@Override
protected ReseedingSecureRandom initialValue() {
return new ReseedingSecureRandom();
}
};
// Use the MersenneTwisterFast implementation for its performance and long period. The implementation is
// not thread-safe so we keep an instance in thread-local storage.
private static final ThreadLocal<MersenneTwisterFast> MERSENNE_TWISTER = new ThreadLocal<MersenneTwisterFast>() {
@Override
protected MersenneTwisterFast initialValue() {
int[] seedInts = new int[NUM_SEEDS];
for (int i = 0; i < NUM_SEEDS; i++) {
seedInts[i] = SEED_PROVIDER.nextInt();
}
return new MersenneTwisterFast(seedInts);
}
};
private Randomness() {}
/** Unsecure! */
public static MersenneTwisterFast getMersenneTwister() {
return MERSENNE_TWISTER.get();
}
public static int randomIntSecure(int n) {
if (n < 1) {
throw new IllegalArgumentException();
}
// NOTE: This is completely different from how java.util.Random#nextInt(int) does it
return Math.abs(SECURE_RANDOM.get().nextInt()) % n;
}
public static int[] randomIntsSecure(int count) {
int[] ints = new int[count];
for (int i = 0; i < ints.length; ++i) {
ints[i] = SECURE_RANDOM.get().nextInt();
}
return ints;
}
public static int[] randomIntsUnsecure(int count) {
MersenneTwisterFast mt = getMersenneTwister();
int[] ints = new int[count];
for (int i = 0; i < ints.length; ++i) {
ints[i] = mt.nextInt();
}
return ints;
}
public static byte[] randomBytesSecure(int size /* in bytes */) {
byte[] bytes = new byte[size];
SECURE_RANDOM.get().nextBytes(bytes);
return bytes;
}
public static byte[] randomBytesUnsecure(int size /* in bytes */) {
byte[] bytes = new byte[size];
getMersenneTwister().nextBytes(bytes);
return bytes;
}
public static String randomHexStringSecure(int lengthBytes) {
StringBuilder str = new StringBuilder();
ReseedingSecureRandom sr = SECURE_RANDOM.get();
for (int i = 0; i < lengthBytes; i++) {
str.append(Integer.toHexString(sr.nextInt(16)));
}
return str.toString();
}
public static boolean isValidDBObjectId(String stringId) {
return stringId != null && DB_OBJECT_ID_PATTERN.matcher(stringId).matches()
&& stringId.toLowerCase().equals(stringId);
}
public static String toValidDBObjectId(String stringId) {
if (isValidDBObjectId(stringId)) {
return stringId;
}
if (stringId.length() < 24) {
throw new AssertionError("Can't convert to valid DBObjectId " + stringId);
}
StringBuilder hexString = new StringBuilder();
for (int i = 0; i < 24; ++i) {
byte bt = (byte) stringId.charAt(i);
String hs = Integer.toHexString(bt & 0x0F).toLowerCase();
hexString.append(hs);
}
return hexString.toString();
}
public static String treeNodeId() {
return treeNodeId('\u0000');
}
public static String treeNodeId(char prefix) {
return treeNodeId(prefix, getMersenneTwister());
}
public static String treeNodeId(char prefix, MersenneTwisterFast twister) {
final int prefixLength = prefix == '\u0000' ? 0 : 1;
final int numChars = prefixLength + 16; // 16 * 6 is 96 bits of entropy
char[] chars = new char[numChars];
if (prefixLength == 1) {
chars[0] = prefix;
}
for (int i = 0; i < 16; ++i) {
chars[prefixLength + i] = BASE64_CHAR.charAt(twister.nextInt(64));
}
return new String(chars);
}
/** Not secure! */
public static String alphaString(int length) {
MersenneTwisterFast mt = getMersenneTwister();
char[] chars = new char[length];
for (int i = 0; i < length; ++i) {
chars[i] = BASE64_CHAR.charAt(mt.nextInt(52));
}
return new String(chars);
}
public static String alphaNumericStringSecure(int length) {
ReseedingSecureRandom sr = SECURE_RANDOM.get();
char[] chars = new char[length];
for (int i = 0; i < length; ++i) {
chars[i] = BASE64_CHAR.charAt(sr.nextInt(62));
}
return new String(chars);
}
public static String elementId() {
MersenneTwisterFast mt = getMersenneTwister();
char[] chars = new char[24];
for (int i = 0; i < 24; ++i) {
chars[i] = OBJECT_ID_CHAR.charAt(mt.nextInt(16));
}
return new String(chars);
}
public static String alphaNumericString(int length) {
MersenneTwisterFast mt = getMersenneTwister();
char[] chars = new char[length];
for (int i = 0; i < length; ++i) {
chars[i] = BASE64_CHAR.charAt(mt.nextInt(62));
}
return new String(chars);
}
/** Returns a feature id without the ordinal */
public static String featureId() {
return "F" + alphaNumericString(14);
}
public static char toBase64Char(int position) {
return BASE64_CHAR.charAt(position % 64);
}
public static int fromBase64Char(char character) {
return BASE64_NUM[character];
}
}

View file

@ -0,0 +1,41 @@
package com.example.crud.crypto;
import java.security.SecureRandom;
import java.util.concurrent.atomic.AtomicInteger;
final class ReseedingSecureRandom {
/** How frequently does reseeding happen. This is not strict */
private static final int RESEED_AT = 100000;
/** Atomic integer that keeps track of number of calls */
private final AtomicInteger count_ = new AtomicInteger(0);
/** Secure random */
private volatile SecureRandom secureRandom_;
ReseedingSecureRandom() {
secureRandom_ = new SecureRandom();
}
void nextBytes(byte[] bytes) {
getSecureRandom().nextBytes(bytes);
}
int nextInt() {
return getSecureRandom().nextInt();
}
int nextInt(int n) {
return getSecureRandom().nextInt(n);
}
private SecureRandom getSecureRandom() {
int currentCount = count_.incrementAndGet();
if ((currentCount % RESEED_AT) == 0) {
// Reset to 0, next caller should not come into this "if" and might use the old secure random which is fine
count_.set(0);
secureRandom_ = new SecureRandom();
}
return secureRandom_;
}
}

View file

@ -0,0 +1,15 @@
package com.example.crud.db.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Retry {
Class<? extends Exception>[] on();
int times() default 1;
}

View file

@ -0,0 +1,81 @@
package com.example.crud.db.aspects;
import com.example.crud.db.annotations.Retry;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert;
import java.lang.reflect.Method;
import java.util.Arrays;
@Aspect
public class OptimisticConcurrencyControlAspect {
private static final Logger log = LoggerFactory.getLogger(OptimisticConcurrencyControlAspect.class);
@Around("@annotation(com.example.crud.db.annotations.Retry)")
public Object retry(ProceedingJoinPoint pjp) throws Throwable {
Retry retryAnnotation = getRetryAnnotation(pjp);
return (retryAnnotation != null) ? proceed(pjp, retryAnnotation) : proceed(pjp);
}
private Object proceed(ProceedingJoinPoint pjp) throws Throwable {
return pjp.proceed();
}
private Object proceed(ProceedingJoinPoint pjp, Retry retryAnnotation) throws Throwable {
int times = retryAnnotation.times();
Class<? extends Throwable>[] retryOn = retryAnnotation.on();
Assert.isTrue(times > 0, "@Retry{times} should be greater than 0!");
Assert.isTrue(retryOn.length > 0, "@Retry{on} should have at least one Throwable!");
log.info("Proceed with {} retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn);
}
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
try {
return proceed(pjp);
} catch (Throwable throwable) {
if(isRetryThrowable(throwable, retryOn) && times-- > 0) {
log.info("Optimistic locking detected, {} remaining retries on {}", times, Arrays.toString(retryOn));
return tryProceeding(pjp, times, retryOn);
}
throw throwable;
}
}
private boolean isRetryThrowable(Throwable throwable, Class<? extends Throwable>[] retryOn) {
Throwable[] causes = ExceptionUtils.getThrowables(throwable);
for(Throwable cause : causes) {
for(Class<? extends Throwable> retryThrowable : retryOn) {
if(retryThrowable.isAssignableFrom(cause.getClass())) {
return true;
}
}
}
return false;
}
private Retry getRetryAnnotation(ProceedingJoinPoint pjp) throws NoSuchMethodException {
MethodSignature signature = (MethodSignature) pjp.getSignature();
Method method = signature.getMethod();
Retry retryAnnotation = AnnotationUtils.findAnnotation(method, Retry.class);
if(retryAnnotation != null) {
return retryAnnotation;
}
Class[] argClasses = new Class[pjp.getArgs().length];
for (int i = 0; i < pjp.getArgs().length; i++) {
argClasses[i] = pjp.getArgs()[i].getClass();
}
method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses);
return AnnotationUtils.findAnnotation(method, Retry.class);
}
}

View file

@ -0,0 +1,94 @@
package com.example.crud.db.dao;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.NoRepositoryBean;
import org.springframework.data.repository.PagingAndSortingRepository;
import org.springframework.data.repository.query.QueryByExampleExecutor;
import java.io.Serializable;
import java.util.List;
@NoRepositoryBean
public interface BaseRepository<T, ID extends Serializable> extends JpaRepository<T, ID>,
PagingAndSortingRepository<T, ID>, QueryByExampleExecutor<T> { //TODO(gburd): this requires gradle to pre-build classes like '@Employee' etc. }, QueryDslPredicateExecutor<T> {
/**
* Saves a given entity. Use the returned instance for further operations as the save operation might have changed the
* entity instance completely.
*
* @param entity
* @return the saved entity
*/
<S extends T> S save(S entity);
/**
* Saves all given models.
*
* @param entities
* @return the saved models
* @throws IllegalArgumentException in case the given entity is {@literal null}.
*/
<S extends T> List<S> save(Iterable<S> entities);
/**
* Retrieves an entity by its id.
*
* @param id must not be {@literal null}.
* @return the entity with the given id or {@literal null} if none found
* @throws IllegalArgumentException if {@code id} is {@literal null}
*/
T findOne(ID id);
/**
* Returns all instances of the type.
*
* @return all models
*/
List<T> findAll();
/**
* Returns all instances of the type with the given IDs.
*/
List<T> findAll(Iterable<ID> ids);
/**
* Returns whether an entity with the given id exists.
*
* @param id must not be {@literal null}.
* @return true if an entity with the given id exists, {@literal false} otherwise
* @throws IllegalArgumentException if {@code id} is {@literal null}
*/
boolean exists(ID id);
/**
* Returns the number of models available.
*
* @return the number of models
*/
long count();
/**
* Deletes a given entity.
*
* @throws IllegalArgumentException in case the given entity is {@literal null}.
*/
// Optional<T>...
void delete(ID id);
/**
* Deletes the given models.
*
* @throws IllegalArgumentException in case the given {@link Iterable} is {@literal null}.
*/
void delete(Iterable<? extends T> entities);
/**
* Deletes all models managed by the models.
*/
void deleteAll();
/**
* Flush cache.
*/
default void refresh() {} //TODO(gburd): implement cache flush... ?
}

View file

@ -0,0 +1,18 @@
package com.example.crud.db.dao;
import com.example.crud.db.models.Employee;
import com.example.crud.db.models.Gender;
import org.springframework.data.rest.core.annotation.RepositoryRestResource;
import org.springframework.stereotype.Component;
import java.util.List;
@Component
@RepositoryRestResource
public interface EmployeeRepository extends BaseRepository<Employee, Long> {
List<Employee> findByLastName(String lastName);
List<Employee> findByGender(Gender gender);
List<Employee> findByAgeLessThan(int age);
List<Employee> findByAgeBetween(int age1, int age2);
// List<Employee> fetchByLastNameLength(@Param("length") Long length);
}

View file

@ -0,0 +1,53 @@
package com.example.crud.db.models;
import com.example.crud.util.SerialVersionUID;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import org.eclipse.persistence.annotations.*;
import org.eclipse.persistence.descriptors.changetracking.ChangeTracker;
import org.eclipse.persistence.sessions.Session;
import org.springframework.data.annotation.CreatedDate;
import org.springframework.data.annotation.LastModifiedDate;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
import javax.persistence.*;
import java.beans.PropertyChangeListener;
import java.io.Serializable;
import java.util.Date;
import static org.eclipse.persistence.annotations.ChangeTrackingType.AUTO;
@EqualsAndHashCode
@ChangeTracking(AUTO) // uses the persistence property change listeners
/* NOTE: @Cache is ignored on mapped superclasses (@MappedSuperclass), this is here purely for reference sake.
@Cache(type = CacheType.SOFT_WEAK // let the GC remove cached data under memory pressure
,expiryTimeOfDay=@TimeOfDay(hour=1) // cached data will expire after 1hr even if there is enough memory to keep the data around longer
,disableHits=false // use the cache (when true the cache is only used for identity)
,isolation=ISOLATED // cache within UnitOfWork (db/txn), or ".SHARED" for Session scope
,alwaysRefresh=true // update cache on reads...
,refreshOnlyIfNewer=true // only when read value is newer than one in cache
,databaseChangeNotificationType=INVALIDATE // when DCN is enabled, invalidate cached objects when DB says they've mutated
,coordinationType=SEND_NEW_OBJECTS_WITH_CHANGES) //.INVALIDATE_CHANGED_OBJECTS */
@MappedSuperclass
public abstract class AbstractModel<T extends Serializable> implements Model<T>, ChangeTracker {
private static final long serialVersionUID = SerialVersionUID.compute(AbstractModel.class);
// Instance variables *not* mapped to database fields must be marked as @Transient
@Transient private PropertyChangeListener listener;
@Getter @CreatedDate @Temporal(TemporalType.TIMESTAMP) @Column(name="CREATED") Date createdDate;
@Getter @LastModifiedDate @Temporal(TemporalType.TIMESTAMP) @Column(name="MODIFIED") Date modifiedDate;
// OPTIMISTIC CONCURRENCY CONTROL
@Version @Column(name="VERSION") private long version;
// TODO(gburd): what must these two methods do?
public PropertyChangeListener _persistence_getPropertyChangeListener() {
return listener;
}
public void _persistence_setPropertyChangeListener(PropertyChangeListener listener) {
this.listener = listener;
}
}

View file

@ -0,0 +1,55 @@
package com.example.crud.db.models;
import com.example.crud.util.SerialVersionUID;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.eclipse.persistence.annotations.Cache;
import org.eclipse.persistence.annotations.CacheType;
import org.eclipse.persistence.annotations.TimeOfDay;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
import javax.persistence.*;
import static javax.persistence.FetchType.LAZY;
import static org.eclipse.persistence.annotations.CacheCoordinationType.SEND_NEW_OBJECTS_WITH_CHANGES;
import static org.eclipse.persistence.annotations.DatabaseChangeNotificationType.INVALIDATE;
import static org.eclipse.persistence.config.CacheIsolationType.ISOLATED;
@EnableJpaAuditing
@EntityListeners(AuditingEntityListener.class)
@EqualsAndHashCode(callSuper=true)
@Cache(type = CacheType.SOFT_WEAK
,expiryTimeOfDay=@TimeOfDay(hour=1)
,disableHits=false
,isolation=ISOLATED
,alwaysRefresh=true
,refreshOnlyIfNewer=true
,databaseChangeNotificationType=INVALIDATE
,coordinationType=SEND_NEW_OBJECTS_WITH_CHANGES)
@Table(name = "ADDRESS")
public @Data @Entity class Address extends AbstractModel<Long> {
private static final long serialVersionUID = SerialVersionUID.compute(Address.class);
// PRIMARY KEY
@Id @Column @GeneratedValue(generator = "flake-seq")
private Long id;
// FIELDS
private String city;
private String country;
@Basic(fetch=LAZY) private String province;
private String postalCode;
private String street;
public Address() {
}
public Address(String city, String country, String province, String postalCode, String street) {
this.city = city;
this.country = country;
this.province = province;
this.postalCode = postalCode;
this.street = street;
}
}

View file

@ -0,0 +1,148 @@
package com.example.crud.db.models;
import com.example.crud.util.SerialVersionUID;
import com.fasterxml.jackson.annotation.JsonBackReference;
import com.fasterxml.jackson.annotation.JsonManagedReference;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.eclipse.persistence.annotations.*;
import org.eclipse.persistence.annotations.Cache;
import org.eclipse.persistence.config.HintValues;
import org.eclipse.persistence.config.QueryHints;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
import javax.persistence.*;
import javax.persistence.CollectionTable;
import javax.persistence.Convert;
import javax.persistence.Index;
import javax.swing.*;
import javax.validation.constraints.*;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import static javax.persistence.TemporalType.DATE;
import static org.eclipse.persistence.annotations.CacheCoordinationType.SEND_NEW_OBJECTS_WITH_CHANGES;
import static org.eclipse.persistence.annotations.DatabaseChangeNotificationType.INVALIDATE;
import static org.eclipse.persistence.config.CacheIsolationType.ISOLATED;
@EqualsAndHashCode(callSuper=true)
@EnableJpaAuditing
@EntityListeners(AuditingEntityListener.class)
@SecondaryTable(name = "SALARY")
@NamedQueries({
@NamedQuery(name = "Employee.findAll", query = "SELECT e FROM Employee e ORDER BY e.id"),
@NamedQuery(name = "Employee.findByName", query = "SELECT e FROM Employee e WHERE e.firstName LIKE :firstName AND e.lastName LIKE :lastName"),
@NamedQuery(name = "Employee.count", query = "SELECT COUNT(e) FROM Employee e"),
@NamedQuery(name = "Employee.countByName", query = "SELECT COUNT(e) FROM Employee e WHERE e.firstName LIKE :firstName AND e.lastName LIKE :lastName"),
// Query used in {@link IdInPaging}
@NamedQuery(name = "Employee.idsIn", query = "SELECT e FROM Employee e WHERE e.id IN :IDS ORDER BY e.id",
hints = { @QueryHint(name = QueryHints.QUERY_RESULTS_CACHE, value = HintValues.TRUE) }) })
@Cache(type = CacheType.SOFT_WEAK
,expiryTimeOfDay=@TimeOfDay(hour=1)
,disableHits=false
,isolation=ISOLATED
,alwaysRefresh=true
,refreshOnlyIfNewer=true
,databaseChangeNotificationType=INVALIDATE
,coordinationType=SEND_NEW_OBJECTS_WITH_CHANGES)
@Table(name = "EMPLOYEE",
indexes={
@Index(name="EMP_SSN_INDEX", unique=true, columnList="SSN"),
@Index(name="EMP_EMAIL_INDEX", columnList="EMAIL"),
@Index(name="EMP_F_NAME_INDEX", columnList="FIRST_NAME"),
@Index(name="EMP_L_NAME_INDEX", columnList="LAST_NAME"),
@Index(name="EMP_NAME_INDEX", columnList="FIRST_NAME LAST_NAME") })
public @Data @Entity class Employee extends AbstractModel<Long> {
private static final long serialVersionUID = SerialVersionUID.compute(AbstractModel.class);
// PRIMARY KEY
@Id @Column @GeneratedValue(generator = "flake-seq")
private Long id;
// FIELDS
@Column(name="SSN") @CacheIndex @NotNull private String socialSecurityNumber;
@Column private String honorific;
@Column(name="FIRST_NAME") private String firstName;
@Column(name="LAST_NAME") private String lastName;
@Column private String suffix;
@Column(name="EMAIL") private String emailAddress;
@Column @Past @Temporal(DATE) private Calendar birthdate;
@Column @Digits(integer=3, fraction=0) @NotNull @Min(0) @Max(125) private int age = 0;
@Basic(fetch=FetchType.LAZY) @Lob private ImageIcon picture;
/* NOTE: Gender mapped using Basic with an ObjectTypeConverter to map between
* single char code value in database to enum. JPA only supports mapping to
* the full name of the enum or its ordinal value. */
@Basic @Column(name = "GENDER") @Convert(converter = GenderConverter.class) private Gender gender = Gender.Male;
@Column(table = "SALARY")
private double salary;
// RELATIONS
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "MANAGER_ID")
@JsonBackReference
private Employee manager;
@OneToMany(mappedBy = "manager")
private List<Employee> managedEmployees = new ArrayList<Employee>();
@OneToMany(mappedBy = "owner", cascade = CascadeType.ALL, orphanRemoval = true)
@PrivateOwned
@JsonManagedReference
private List<PhoneNumber> phoneNumbers = new ArrayList<PhoneNumber>();
@OneToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
@JoinColumn(name = "ADDRESS_ID")
private Address address;
@Embedded
@AttributeOverrides({ @AttributeOverride(name = "startDate", column = @Column(name = "START_DATE")),
@AttributeOverride(name = "endDate", column = @Column(name = "END_DATE")) })
private EmploymentPeriod period;
@ElementCollection
@CollectionTable(name = "RESPONS")
private List<String> responsibilities = new ArrayList<String>();
public Employee addManagedEmployee(Employee employee) {
getManagedEmployees().add(employee);
employee.setManager(this);
return employee;
}
public Employee removeManagedEmployee(Employee employee) {
getManagedEmployees().remove(employee);
employee.setManager(null);
return employee;
}
public PhoneNumber addPhoneNumber(PhoneNumber phoneNumber) {
getPhoneNumbers().add(phoneNumber);
phoneNumber.setOwner(this);
return phoneNumber;
}
public PhoneNumber addPhoneNumber(String type, String number) {
PhoneNumber phoneNumber = new PhoneNumber(type, number);
return addPhoneNumber(phoneNumber);
}
public PhoneNumber removePhoneNumber(PhoneNumber phoneNumber) {
getPhoneNumbers().remove(phoneNumber);
phoneNumber.setOwner(null);
return phoneNumber;
}
public void addResponsibility(String responsibility) {
getResponsibilities().add(responsibility);
}
public void removeResponsibility(String responsibility) {
getResponsibilities().remove(responsibility);
}
}

View file

@ -0,0 +1,29 @@
package com.example.crud.db.models;
import lombok.Data;
import lombok.EqualsAndHashCode;
import static javax.persistence.TemporalType.TIMESTAMP;
import java.util.Calendar;
import javax.persistence.Embeddable;
import javax.persistence.Temporal;
/**
* Represents the period of time an employee has worked for the company. A null
* endDate indicates that the employee is current.
*/
public @Data @Embeddable class EmploymentPeriod {
@Temporal(TIMESTAMP) private Calendar startDate;
@Temporal(TIMESTAMP) private Calendar endDate;
public void setStartDate(int year, int month, int date) {
getStartDate().set(year, month, date);
}
public void setEndDate(int year, int month, int date) {
getEndDate().set(year, month, date);
}
}

View file

@ -0,0 +1,5 @@
package com.example.crud.db.models;
public enum Gender {
Female, Male, ;
}

View file

@ -0,0 +1,36 @@
package com.example.crud.db.models;
import javax.persistence.AttributeConverter;
import javax.persistence.Converter;
@Converter(autoApply = true)
public class GenderConverter implements AttributeConverter<Gender, String> {
@Override
public String convertToDatabaseColumn(Gender gender) {
switch (gender) {
case Male:
return "M";
case Female:
return "F";
default:
throw new IllegalArgumentException("Invalid gender: " + gender);
}
}
@Override
public Gender convertToEntityAttribute(String gender) {
switch (gender) {
case "M":
return Gender.Male;
case "F":
return Gender.Female;
default:
throw new IllegalArgumentException("Invalid gender code: " + gender);
}
}
}

View file

@ -0,0 +1,54 @@
package com.example.crud.db.models;
import com.example.crud.crypto.Password;
import com.example.crud.util.SerialVersionUID;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.eclipse.persistence.annotations.Cache;
import org.eclipse.persistence.annotations.CacheType;
import org.eclipse.persistence.annotations.TimeOfDay;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
import javax.persistence.*;
import static org.eclipse.persistence.annotations.CacheCoordinationType.SEND_NEW_OBJECTS_WITH_CHANGES;
import static org.eclipse.persistence.annotations.DatabaseChangeNotificationType.INVALIDATE;
import static org.eclipse.persistence.config.CacheIsolationType.ISOLATED;
@EnableJpaAuditing
@EntityListeners(AuditingEntityListener.class)
@EqualsAndHashCode(callSuper=true)
@Cache(type = CacheType.SOFT_WEAK
,expiryTimeOfDay=@TimeOfDay(hour=1)
,disableHits=false
,isolation=ISOLATED
,alwaysRefresh=true
,refreshOnlyIfNewer=true
,databaseChangeNotificationType=INVALIDATE
,coordinationType=SEND_NEW_OBJECTS_WITH_CHANGES)
public @Data @Entity class Login extends AbstractModel<Long> {
private static final long serialVersionUID = SerialVersionUID.compute(AbstractModel.class);
// PRIMARY KEY
@Id @Column @GeneratedValue(generator = "flake-seq")
private Long id;
// FIELDS
private String username;
private String password;
public void setPassword(String password) {
try { password = Password.getSaltedHash(password); }
catch (Exception e) {
e.printStackTrace();
}
}
public boolean check(String password) {
try { return Password.check(password, this.password); }
catch (Exception e) {
return false;
}
}
}

View file

@ -0,0 +1,9 @@
package com.example.crud.db.models;
import java.io.Serializable;
import java.util.Date;
public interface Model<T extends Serializable> {
Date getCreatedDate();
Date getModifiedDate();
}

View file

@ -0,0 +1,75 @@
package com.example.crud.db.models;
import com.example.crud.util.SerialVersionUID;
import com.fasterxml.jackson.annotation.JsonBackReference;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.ToString;
import org.eclipse.persistence.annotations.Cache;
import org.eclipse.persistence.annotations.CacheType;
import org.eclipse.persistence.annotations.TimeOfDay;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
import java.io.Serializable;
import javax.persistence.*;
import static org.eclipse.persistence.annotations.CacheCoordinationType.SEND_NEW_OBJECTS_WITH_CHANGES;
import static org.eclipse.persistence.annotations.DatabaseChangeNotificationType.INVALIDATE;
import static org.eclipse.persistence.config.CacheIsolationType.ISOLATED;
@ToString(exclude="owner") @EqualsAndHashCode(exclude="owner", callSuper=true)
@EnableJpaAuditing
@EntityListeners(AuditingEntityListener.class)
@Table(name = "PHONE")
@IdClass(PhoneNumber.ID.class)
@Cache(type = CacheType.SOFT_WEAK
,expiryTimeOfDay=@TimeOfDay(hour=1)
,disableHits=false
,isolation=ISOLATED
,alwaysRefresh=true
,refreshOnlyIfNewer=true
,databaseChangeNotificationType=INVALIDATE
,coordinationType=SEND_NEW_OBJECTS_WITH_CHANGES)
public @Data @Entity class PhoneNumber extends AbstractModel<Long> {
private static final long serialVersionUID = SerialVersionUID.compute(PhoneNumber.class);
@Id @Column(name = "EMP_ID", updatable = false, insertable = false)
private Long id;
@Id @Column(updatable = false)
private String type;
private String number;
@ManyToOne @JoinColumn(name = "EMP_ID")
@JsonBackReference
private Employee owner;
public PhoneNumber() {
}
public PhoneNumber(String type, String number) {
this();
setType(type);
setNumber(number);
}
protected void setOwner(Employee employee) {
this.owner = employee;
if (employee != null) {
this.id = employee.getId();
}
}
/**
* Inner-class to manage the compound primary key for phone numbers.
*/
public @Data static class ID implements Serializable {
private static final long serialVersionUID = SerialVersionUID.compute(PhoneNumber.ID.class);
public Long id;
public String type;
}
}

View file

@ -0,0 +1,39 @@
package com.example.crud.db.monitoring;
import org.eclipse.persistence.internal.sessions.IdentityMapAccessor;
import org.eclipse.persistence.jpa.JpaEntityManager;
import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl;
import org.eclipse.persistence.sessions.Session;
import org.eclipse.persistence.sessions.server.ServerSession;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
public class EclipseLinkCache {
EntityManagerFactory emf = Persistence.createEntityManagerFactory("default");
EntityManager em = emf.createEntityManager();
Session session = em.unwrap(Session.class);
JpaEntityManager jem = em.unwrap(JpaEntityManager.class);
UnitOfWorkImpl ouw = jem.unwrap(UnitOfWorkImpl.class);
ServerSession ss = jem.unwrap(ServerSession.class);
IdentityMapAccessor ima = (IdentityMapAccessor) ss.getIdentityMapAccessor();
/**
* long count = countCachedEntitiesL1(clazz);
*/
public long countCachedEntitiesL1(Class clazz) {
return ouw.getCloneMapping().keySet().stream()
.filter(entity -> entity.getClass().equals(clazz))
.count();
}
/**
* int count = countCachedEntitiesL2(clazz);
*/
public int countCachedEntitiesL2(Class clazz) {
return ima.getIdentityMap(clazz).getSize();
}
}

View file

@ -0,0 +1,118 @@
package com.example.crud.db.monitoring;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricSet;
import java.lang.management.ManagementFactory;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
/**
* A set of gauges for operating system settings.
*/
public class EclipseLinkGaugeSet implements MetricSet {
private final Optional<Method> level1CacheObjectCount;
private final Optional<Method> level1CacheMemorySize;
private final Optional<Method> level1CacheHitMissRatio;
private final Optional<Method> level2CacheObjectCount;
private final Optional<Method> level2CacheHitMissRatio;
/**
* Creates new gauges using the platform OS bean.
*/
public EclipseLinkGaugeSet() {
this(ManagementFactory.getEclipseLinkMXBean());
}
/**
* Creates a new gauges using the given OS bean.
*
* @param mxBean an {@link EclipseLinkMXBean}
*/
public EclipseLinkGaugeSet(EclipseLinkMXBean mxBean) {
this.mxBean = mxBean;
committedVirtualMemorySize = getMethod("getCommittedVirtualMemorySize");
totalSwapSpaceSize = getMethod("getTotalSwapSpaceSize");
freeSwapSpaceSize = getMethod("getFreeSwapSpaceSize");
processCpuTime = getMethod("getProcessCpuTime");
freePhysicalMemorySize = getMethod("getFreePhysicalMemorySize");
totalPhysicalMemorySize = getMethod("getTotalPhysicalMemorySize");
openFileDescriptorCount = getMethod("getOpenFileDescriptorCount");
maxFileDescriptorCount = getMethod("getMaxFileDescriptorCount");
systemCpuLoad = getMethod("getSystemCpuLoad");
processCpuLoad = getMethod("getProcessCpuLoad");
}
@Override
public Map<String, Metric> getMetrics() {
final Map<String, Metric> gauges = new HashMap<>();
gauges.put("committedVirtualMemorySize", (Gauge<Long>) () -> invokeLong(committedVirtualMemorySize));
gauges.put("totalSwapSpaceSize", (Gauge<Long>) () -> invokeLong(totalSwapSpaceSize));
gauges.put("freeSwapSpaceSize", (Gauge<Long>) () -> invokeLong(freeSwapSpaceSize));
gauges.put("processCpuTime", (Gauge<Long>) () -> invokeLong(processCpuTime));
gauges.put("freePhysicalMemorySize", (Gauge<Long>) () -> invokeLong(freePhysicalMemorySize));
gauges.put("totalPhysicalMemorySize", (Gauge<Long>) () -> invokeLong(totalPhysicalMemorySize));
gauges.put("fd.usage", (Gauge<Double>) () -> invokeRatio(openFileDescriptorCount, maxFileDescriptorCount));
gauges.put("systemCpuLoad", (Gauge<Double>) () -> invokeDouble(systemCpuLoad));
gauges.put("processCpuLoad", (Gauge<Double>) () -> invokeDouble(processCpuLoad));
return gauges;
}
private Optional<Method> getMethod(String name) {
try {
final Method method = mxBean.getClass().getDeclaredMethod(name);
method.setAccessible(true);
return Optional.of(method);
} catch (NoSuchMethodException e) {
return Optional.empty();
}
}
private long invokeLong(Optional<Method> method) {
if (method.isPresent()) {
try {
return (long) method.get().invoke(mxBean);
} catch (IllegalAccessException | InvocationTargetException ite) {
return 0L;
}
}
return 0L;
}
private double invokeDouble(Optional<Method> method) {
if (method.isPresent()) {
try {
return (double) method.get().invoke(mxBean);
} catch (IllegalAccessException | InvocationTargetException ite) {
return 0.0;
}
}
return 0.0;
}
private double invokeRatio(Optional<Method> numeratorMethod, Optional<Method> denominatorMethod) {
if (numeratorMethod.isPresent() && denominatorMethod.isPresent()) {
try {
long numerator = (long) numeratorMethod.get().invoke(mxBean);
long denominator = (long) denominatorMethod.get().invoke(mxBean);
if (0 == denominator) {
return Double.NaN;
}
return 1.0 * numerator / denominator;
} catch (IllegalAccessException | InvocationTargetException ite) {
return Double.NaN;
}
}
return Double.NaN;
}
}

View file

@ -0,0 +1,77 @@
package com.example.crud.db.sequence;
import com.example.crud.crypto.Randomness;
import com.github.rholder.fauxflake.DefaultIdGenerator;
import com.github.rholder.fauxflake.api.IdGenerator;
import com.github.rholder.fauxflake.provider.SystemTimeProvider;
import com.github.rholder.fauxflake.provider.twitter.SnowflakeEncodingProvider;
import org.eclipse.persistence.internal.databaseaccess.Accessor;
import org.eclipse.persistence.internal.sessions.AbstractSession;
import org.eclipse.persistence.sequencing.Sequence;
import java.util.Vector;
/**
* FlakeSequence is a decentralized, k-ordered unique ID generator that produces 64bit integers (Long).
*
* This is configured to mimic Twitter's Snowflake pattern. "k-ordered" means that they sort in timestamp
* order based on when they were created within the level of precision available and of course with all the
* standard NTP-based cavaets.
*/
public class FlakeSequence extends Sequence {
IdGenerator idGenerator;
public FlakeSequence(String name) {
super(name);
long mid = Randomness.randomIntSecure(1024); // TODO(gburd): use a hash of the hostname?
idGenerator = new DefaultIdGenerator(new SystemTimeProvider(), new SnowflakeEncodingProvider(mid));
}
@Override
public Object getGeneratedValue(Accessor accessor, AbstractSession writeSession, String seqName) {
while (true) {
try {
return idGenerator.generateId(10).asLong();
}
catch (InterruptedException e) {
// We waited more than 10ms to generate an Id, try again. This could be due to NTP
// drift, leap seconds, GC pause, who knows.
}
}
}
@Override
public Vector getGeneratedVector(Accessor accessor, AbstractSession writeSession, String seqName, int size) {
return null;
}
@Override
public void onConnect() {
}
@Override
public void onDisconnect() {
}
@Override
public boolean shouldAcquireValueAfterInsert() {
return false;
}
public boolean shouldAlwaysOverrideExistingValue(String seqName, Object existingValue) {
return ((String) existingValue).isEmpty();
}
@Override
public boolean shouldUseTransaction() {
return false;
}
@Override
public boolean shouldUsePreallocation() {
// NOTE: never pre-allocate, that would defeat the time-ordered nature of these IDs
return false;
}
}

View file

@ -0,0 +1,51 @@
package com.example.crud.db.sequence;
import com.example.crud.crypto.Randomness;
import org.eclipse.persistence.internal.databaseaccess.Accessor;
import org.eclipse.persistence.internal.sessions.AbstractSession;
import org.eclipse.persistence.sequencing.Sequence;
import java.util.Vector;
public class PSRNSequence extends Sequence {
public PSRNSequence(String name) {
super(name);
}
@Override
public Object getGeneratedValue(Accessor accessor, AbstractSession writeSession, String seqName) {
return Randomness.randomHexStringSecure(36);
}
@Override
public Vector getGeneratedVector(Accessor accessor, AbstractSession writeSession, String seqName, int size) {
return null;
}
@Override
public void onConnect() { }
@Override
public void onDisconnect() { }
@Override
public boolean shouldAcquireValueAfterInsert() {
return false;
}
public boolean shouldAlwaysOverrideExistingValue(String seqName, Object existingValue) {
return ((String) existingValue).isEmpty();
}
@Override
public boolean shouldUseTransaction() {
return false;
}
@Override
public boolean shouldUsePreallocation() {
return false;
}
}

View file

@ -0,0 +1,55 @@
package com.example.crud.db.sequence;
import java.util.UUID;
import java.util.Vector;
import org.eclipse.persistence.config.SessionCustomizer;
import org.eclipse.persistence.internal.databaseaccess.Accessor;
import org.eclipse.persistence.internal.sessions.AbstractSession;
import org.eclipse.persistence.sequencing.Sequence;
import org.eclipse.persistence.sessions.Session;
public class UUIDSequence extends Sequence {
public UUIDSequence(String name) {
super(name);
}
@Override
public Object getGeneratedValue(Accessor accessor, AbstractSession writeSession, String seqName) {
return UUID.randomUUID().toString().toUpperCase();
}
@Override
public Vector getGeneratedVector(Accessor accessor, AbstractSession writeSession, String seqName, int size) {
return null;
}
@Override
public void onConnect() {
}
@Override
public void onDisconnect() {
}
@Override
public boolean shouldAcquireValueAfterInsert() {
return false;
}
public boolean shouldAlwaysOverrideExistingValue(String seqName, Object existingValue) {
return ((String) existingValue).isEmpty();
}
@Override
public boolean shouldUseTransaction() {
return false;
}
@Override
public boolean shouldUsePreallocation() {
return false;
}
}

View file

@ -0,0 +1,34 @@
package com.example.crud.db.util;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.data.domain.AuditorAware;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
import org.springframework.scheduling.annotation.EnableAsync;
@EnableAsync
@SpringBootApplication
@EnableJpaAuditing
class AuditingConfiguration {
@Bean
public AuditorAware<String> createAuditorProvider() {
return new SecurityAuditor();
}
@Bean
public AuditingEntityListener createAuditingListener() {
return new AuditingEntityListener();
}
public static class SecurityAuditor implements AuditorAware<String> {
@Override
public String getCurrentAuditor() {
//Authentication auth = SecurityContextHolder.getContext().getAuthentication();
//String username = auth.getName();
//return username;
return "joeblow";
}
}
}

View file

@ -0,0 +1,57 @@
package com.example.crud.db.util;
import com.example.crud.db.sequence.FlakeSequence;
import com.example.crud.db.sequence.PSRNSequence;
import com.example.crud.db.sequence.UUIDSequence;
import com.google.common.collect.Lists;
import org.eclipse.persistence.config.SessionCustomizer;
import org.eclipse.persistence.descriptors.ClassDescriptor;
import org.eclipse.persistence.internal.sessions.AbstractSession;
import org.eclipse.persistence.sessions.Session;
import org.eclipse.persistence.tools.schemaframework.IndexDefinition;
import java.sql.SQLException;
public class EclipseLinkSessionCustomizer implements SessionCustomizer {
@Override
public void customize(Session session) throws SQLException {
// Enable concurrent processing of result sets and concurrent loading of load groups.
((AbstractSession) session).setIsConcurrent(true);
// Add sequence generators.
Lists.newArrayList(
new UUIDSequence("uuid-seq"),
new PSRNSequence("psrn-seq"),
new FlakeSequence("flake-seq"))
.forEach(sequence -> {
session.getLogin().addSequence(sequence);
});
// Convert class names to table names by adding '_' characters when case changes.
for (ClassDescriptor descriptor : session.getDescriptors().values()) {
// Only change the table name for non-embeddable entities with no @Table already
if (!descriptor.getTables().isEmpty() &&
descriptor.getAlias().equalsIgnoreCase(descriptor.getTableName())) {
String tableName = convertCaseToUnderscores(descriptor.getTableName());
descriptor.setTableName(tableName);
for (IndexDefinition index : descriptor.getTables().get(0).getIndexes()) {
index.setTargetTable(tableName);
}
}
}
}
private static String convertCaseToUnderscores(String name) {
StringBuffer buf = new StringBuffer(name.replace('.', '_'));
for (int i = 1; i < buf.length() - 1; i++) {
if (Character.isLowerCase(buf.charAt(i - 1)) &&
Character.isUpperCase(buf.charAt(i)) &&
Character.isLowerCase(buf.charAt(i + 1))) {
buf.insert(i++, '_');
}
}
return buf.toString().toLowerCase();
}
}

View file

@ -0,0 +1,229 @@
package com.example.crud.metrics;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ScheduledReporter;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.concurrent.TimeUnit;
/**
* A reporter class for logging metrics values to a {@link Logger} periodically, similar to
* {@link com.codahale.metrics.ConsoleReporter} or {@link com.codahale.metrics.CsvReporter}, but using
* the logging framework instead. It also supports specifying a {@link Marker} instance that can be used
* by custom appenders and filters for the bound logging toolkit to further process metrics reports.
*/
public final class CoalescingReporter extends ScheduledReporter {
private final Logger logger;
private final Marker marker;
/**
* Returns a new {@link Builder} for {@link CoalescingReporter}.
*
* @param registry the registry to report
* @return a {@link Builder} instance for a {@link CoalescingReporter}
*/
public static Builder forRegistry(MetricRegistry registry) {
return new Builder(registry);
}
private CoalescingReporter(MetricRegistry registry,
Logger logger,
Marker marker,
TimeUnit rateUnit,
TimeUnit durationUnit,
MetricFilter filter) {
super(registry, "logger-reporter", filter, rateUnit, durationUnit);
this.logger = logger;
this.marker = marker;
}
@Override
public void report(SortedMap<String, Gauge> gauges,
SortedMap<String, Counter> counters,
SortedMap<String, Histogram> histograms,
SortedMap<String, Meter> meters,
SortedMap<String, Timer> timers) {
StringBuilder data = new StringBuilder();
for (Entry<String, Gauge> entry : gauges.entrySet()) {
addGauge(data, entry.getKey(), entry.getValue());
}
for (Entry<String, Counter> entry : counters.entrySet()) {
addCounter(data, entry.getKey(), entry.getValue());
}
for (Entry<String, Histogram> entry : histograms.entrySet()) {
addHistogram(data, entry.getKey(), entry.getValue());
}
for (Entry<String, Meter> entry : meters.entrySet()) {
addMeter(data, entry.getKey(), entry.getValue());
}
for (Entry<String, Timer> entry : timers.entrySet()) {
addTimer(data, entry.getKey(), entry.getValue());
}
logger.info(marker, data.toString());
}
private void addTimer(StringBuilder data, String name, Timer timer) {
final Snapshot snapshot = timer.getSnapshot();
data.append(" type=timer.").append(name).append(":");
data.append(" count=").append(timer.getCount());
data.append(", min=").append(convertDuration(snapshot.getMin()));
data.append(", max=").append(convertDuration(snapshot.getMax()));
data.append(", mean=").append(convertDuration(snapshot.getMean()));
data.append(", stdDev=").append(convertDuration(snapshot.getStdDev()));
data.append(", median=").append(convertDuration(snapshot.getMedian()));
data.append(", p75=").append(convertDuration(snapshot.get75thPercentile()));
data.append(", p95=").append(convertDuration(snapshot.get95thPercentile()));
data.append(", p98=").append(convertDuration(snapshot.get98thPercentile()));
data.append(", p99=").append(convertDuration(snapshot.get99thPercentile()));
data.append(", 999=").append(convertDuration(snapshot.get999thPercentile()));
data.append(", mean_rate=").append(convertRate(timer.getMeanRate()));
data.append(", m1=").append(convertRate(timer.getMeanRate()));
data.append(", m5=").append(convertRate(timer.getMeanRate()));
data.append(", m15=").append(convertRate(timer.getMeanRate()));
data.append(", rate_unit=").append(getRateUnit());
data.append(", duration_unit=").append(getDurationUnit());
}
private void addMeter(StringBuilder data, String name, Meter meter) {
data.append(" type=meter.").append(name).append(":");
data.append(" count=").append(meter.getCount());
data.append(", mean_rate=").append(convertRate(meter.getMeanRate()));
data.append(", m1=").append(convertRate(meter.getOneMinuteRate()));
data.append(", m5=").append(convertRate(meter.getFiveMinuteRate()));
data.append(", m15=").append(convertRate(meter.getFifteenMinuteRate()));
data.append(", rate_unit=").append(getRateUnit());
}
private void addHistogram(StringBuilder data, String name, Histogram histogram) {
final Snapshot snapshot = histogram.getSnapshot();
data.append(" type=histogram.").append(name).append(":");
data.append(" count=").append(histogram.getCount());
data.append(", min=").append(snapshot.getMin());
data.append(", max=").append(snapshot.getMax());
data.append(", mean=").append(snapshot.getMean());
data.append(", stdDev=").append(snapshot.getStdDev());
data.append(", median=").append(snapshot.getMedian());
data.append(", p75=").append(snapshot.get75thPercentile());
data.append(", p95=").append(snapshot.get95thPercentile());
data.append(", p98=").append(snapshot.get98thPercentile());
data.append(", p99=").append(snapshot.get99thPercentile());
data.append(", 999=").append(snapshot.get999thPercentile());
}
private void addCounter(StringBuilder data, String name, Counter counter) {
data.append(" counter.").append(name).append(": ").append(counter.getCount());
}
private void addGauge(StringBuilder data, String name, Gauge gauge) {
data.append(" gauge.").append(name).append(": ").append(gauge.getValue());
}
@Override
protected String getRateUnit() {
return "events/" + super.getRateUnit();
}
/**
* A builder for {@link com.codahale.metrics.CsvReporter} instances. Defaults to logging to {@code metrics}, not
* using a marker, converting rates to events/second, converting durations to milliseconds, and
* not filtering metrics.
*/
public static final class Builder {
private final MetricRegistry registry;
private Logger logger;
private Marker marker;
private TimeUnit rateUnit;
private TimeUnit durationUnit;
private MetricFilter filter;
private Builder(MetricRegistry registry) {
this.registry = registry;
this.logger = LoggerFactory.getLogger("metrics");
this.marker = null;
this.rateUnit = TimeUnit.SECONDS;
this.durationUnit = TimeUnit.MILLISECONDS;
this.filter = MetricFilter.ALL;
}
/**
* Log metrics to the given logger.
*
* @param logger a {@link Logger}
* @return {@code this}
*/
public Builder outputTo(Logger logger) {
this.logger = logger;
return this;
}
/**
* Mark all logged metrics with the given marker.
*
* @param marker a {@link Marker}
* @return {@code this}
*/
public Builder markWith(Marker marker) {
this.marker = marker;
return this;
}
/**
* Convert rates to the given time unit.
*
* @param rateUnit a unit of time
* @return {@code this}
*/
public Builder convertRatesTo(TimeUnit rateUnit) {
this.rateUnit = rateUnit;
return this;
}
/**
* Convert durations to the given time unit.
*
* @param durationUnit a unit of time
* @return {@code this}
*/
public Builder convertDurationsTo(TimeUnit durationUnit) {
this.durationUnit = durationUnit;
return this;
}
/**
* Only report metrics which match the given filter.
*
* @param filter a {@link MetricFilter}
* @return {@code this}
*/
public Builder filter(MetricFilter filter) {
this.filter = filter;
return this;
}
/**
* Builds a {@link CoalescingReporter} with the given properties.
*
* @return a {@link CoalescingReporter}
*/
public CoalescingReporter build() {
return new CoalescingReporter(registry, logger, marker, rateUnit, durationUnit, filter);
}
}
}

View file

@ -0,0 +1,38 @@
package com.example.crud.metrics;
import com.ryantenney.metrics.spring.reporter.AbstractReporterElementParser;
/**
* Reporter for metrics-spring which logs more compact, all in one line instead of one line for each metric.
*/
public class CoalescingReporterElementParser extends AbstractReporterElementParser {
private static final String FILTER_REF = "filter-ref";
private static final String FILTER_PATTERN = "filter";
@Override
public String getType() {
return "compact-slf4j";
}
@Override
protected Class<?> getBeanClass() {
return CoalescingReporterFactoryBean.class;
}
@Override
protected void validate(ValidationContext c) {
c.require(CoalescingReporterFactoryBean.PERIOD, DURATION_STRING_REGEX, "Period is required and must be in the form '\\d+(ns|us|ms|s|m|h|d)'");
c.optional(CoalescingReporterFactoryBean.MARKER);
c.optional(CoalescingReporterFactoryBean.LOGGER);
c.optional(CoalescingReporterFactoryBean.RATE_UNIT, TIMEUNIT_STRING_REGEX, "Rate unit must be one of the enum constants from java.util.concurrent.TimeUnit");
c.optional(CoalescingReporterFactoryBean.DURATION_UNIT, TIMEUNIT_STRING_REGEX, "Duration unit must be one of the enum constants from java.util.concurrent.TimeUnit");
c.optional(FILTER_PATTERN);
c.optional(FILTER_REF);
if (c.has(FILTER_PATTERN) && c.has(FILTER_REF)) {
c.reject(FILTER_REF, "Reporter element must not specify both the 'filter' and 'filter-ref' attributes");
}
c.rejectUnmatchedProperties();
}
}

View file

@ -0,0 +1,54 @@
package com.example.crud.metrics;
import com.ryantenney.metrics.spring.reporter.AbstractScheduledReporterFactoryBean;
import org.slf4j.LoggerFactory;
import org.slf4j.MarkerFactory;
import java.util.concurrent.TimeUnit;
/**
* CoalescingReporterFactoryBean.
*/
public class CoalescingReporterFactoryBean extends AbstractScheduledReporterFactoryBean<CoalescingReporter> {
/** Period attribute. */
public static final String PERIOD = "period";
/** Duration unit. */
public static final String DURATION_UNIT = "duration-unit";
/** Rate unit. */
public static final String RATE_UNIT = "rate-unit";
/** Marker. */
public static final String MARKER = "marker";
/** Logger. */
public static final String LOGGER = "logger";
@Override
public Class<CoalescingReporter> getObjectType() {
return CoalescingReporter.class;
}
@Override
protected CoalescingReporter createInstance() {
final CoalescingReporter.Builder reporter = CoalescingReporter.forRegistry(getMetricRegistry());
if (hasProperty(DURATION_UNIT)) {
reporter.convertDurationsTo(getProperty(DURATION_UNIT, TimeUnit.class));
}
if (hasProperty(RATE_UNIT)) {
reporter.convertRatesTo(getProperty(RATE_UNIT, TimeUnit.class));
}
reporter.filter(getMetricFilter());
if (hasProperty(MARKER)) {
reporter.markWith(MarkerFactory.getMarker(getProperty(MARKER)));
}
if (hasProperty(LOGGER)) {
reporter.outputTo(LoggerFactory.getLogger(getProperty(LOGGER)));
}
return reporter.build();
}
@Override
protected long getPeriod() {
return convertDurationString(getProperty(PERIOD));
}
}

View file

@ -0,0 +1,179 @@
package com.example.crud.metrics;
import com.codahale.metrics.JmxReporter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.graphite.Graphite;
import com.codahale.metrics.graphite.GraphiteReporter;
import com.codahale.metrics.health.HealthCheckRegistry;
import com.codahale.metrics.jvm.*;
import com.codahale.metrics.riemann.Riemann;
import com.codahale.metrics.riemann.RiemannReporter;
import com.example.crud.Constants;
import com.ryantenney.metrics.spring.config.annotation.EnableMetrics;
import com.ryantenney.metrics.spring.config.annotation.MetricsConfigurerAdapter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.bind.RelaxedPropertyResolver;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.core.env.Environment;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.net.InetSocketAddress;
import java.util.concurrent.TimeUnit;
@Configuration
@EnableMetrics(proxyTargetClass = true)
@Profile("!" + Constants.SPRING_PROFILE_FAST)
public class MetricsConfiguration extends MetricsConfigurerAdapter implements EnvironmentAware {
private static final String ENV_METRICS = "metrics.";
private static final String ENV_METRICS_GRAPHITE = "metrics.graphite.";
private static final String ENV_METRICS_RIEMANN = "metrics.riemann.";
private static final String PROP_JMX_ENABLED = "jmx.enabled";
private static final String PROP_RIEMANN_ENABLED = "enabled";
private static final String PROP_GRAPHITE_ENABLED = "enabled";
private static final String PROP_GRAPHITE_PREFIX = "";
private static final String PROP_RIEMANN_PREFIX = "";
private static final String PROP_PORT = "port";
private static final String PROP_HOST = "host";
private static final String PROP_METRIC_REG_JVM_MEMORY = "jvm.memory";
private static final String PROP_METRIC_REG_JVM_GARBAGE = "jvm.garbage";
private static final String PROP_METRIC_REG_JVM_THREADS = "jvm.threads";
private static final String PROP_METRIC_REG_JVM_FILES = "jvm.files";
private static final String PROP_METRIC_REG_JVM_BUFFERS = "jvm.buffers";
private final Logger log = LoggerFactory.getLogger(MetricsConfiguration.class);
private MetricRegistry metricRegistry = new MetricRegistry();
private HealthCheckRegistry healthCheckRegistry = new HealthCheckRegistry();
private RelaxedPropertyResolver propertyResolver;
@Override
public void setEnvironment(Environment environment) {
this.propertyResolver = new RelaxedPropertyResolver(environment, ENV_METRICS);
}
@Override
@Bean
public MetricRegistry getMetricRegistry() {
return metricRegistry;
}
@Override
@Bean
public HealthCheckRegistry getHealthCheckRegistry() {
return healthCheckRegistry;
}
@PostConstruct
public void init() {
log.debug("Registering JVM gauges");
metricRegistry.registerAll(new OperatingSystemGaugeSet());
metricRegistry.register(PROP_METRIC_REG_JVM_MEMORY, new MemoryUsageGaugeSet());
metricRegistry.register(PROP_METRIC_REG_JVM_GARBAGE, new GarbageCollectorMetricSet());
metricRegistry.register(PROP_METRIC_REG_JVM_THREADS, new ThreadStatesGaugeSet());
metricRegistry.register(PROP_METRIC_REG_JVM_FILES, new FileDescriptorRatioGauge());
metricRegistry.register(PROP_METRIC_REG_JVM_BUFFERS, new BufferPoolMetricSet(ManagementFactory.getPlatformMBeanServer()));
if (propertyResolver.getProperty(PROP_JMX_ENABLED, Boolean.class, false)) {
log.info("Initializing Metrics JMX reporting");
JmxReporter jmxReporter = JmxReporter.forRegistry(metricRegistry).build();
jmxReporter.start();
}
CoalescingReporter reporter = CoalescingReporter.forRegistry(metricRegistry)
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.build();
reporter.start(1, TimeUnit.MINUTES);
}
@Configuration
@ConditionalOnClass(Graphite.class)
@Profile("!" + Constants.SPRING_PROFILE_FAST)
public static class GraphiteRegistry implements EnvironmentAware {
private final Logger log = LoggerFactory.getLogger(GraphiteRegistry.class);
@Inject
private MetricRegistry metricRegistry;
private RelaxedPropertyResolver propertyResolver;
@Override
public void setEnvironment(Environment environment) {
this.propertyResolver = new RelaxedPropertyResolver(environment, ENV_METRICS_GRAPHITE);
}
@PostConstruct
private void init() {
//TODO(gburd): why isn't this picking up the application property?
Boolean enabled = propertyResolver.getProperty(PROP_GRAPHITE_ENABLED, Boolean.class, false);
if (enabled) {
log.info("Initializing Metrics Graphite reporting");
String host = propertyResolver.getRequiredProperty(PROP_HOST);
Integer port = propertyResolver.getRequiredProperty(PROP_PORT, Integer.class);
String prefix = propertyResolver.getProperty(PROP_GRAPHITE_PREFIX, String.class, "");
Graphite graphite = new Graphite(new InetSocketAddress(host, port));
GraphiteReporter reporter = GraphiteReporter.forRegistry(metricRegistry)
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.prefixedWith(prefix)
.build(graphite);
reporter.start(5, TimeUnit.MINUTES);
}
}
}
@Configuration
@ConditionalOnClass(Riemann.class)
@Profile("!" + Constants.SPRING_PROFILE_FAST)
public static class RiemannRegistry implements EnvironmentAware {
private final Logger log = LoggerFactory.getLogger(RiemannRegistry.class);
@Inject
private MetricRegistry metricRegistry;
private RelaxedPropertyResolver propertyResolver;
@Override
public void setEnvironment(Environment environment) {
this.propertyResolver = new RelaxedPropertyResolver(environment, ENV_METRICS_RIEMANN);
}
@PostConstruct
private void init() {
//TODO(gburd): why isn't this picking up the application property?
Boolean enabled = propertyResolver.getProperty(PROP_RIEMANN_ENABLED, Boolean.class, false);
if (enabled) {
log.info("Initializing Metrics Riemann reporting");
String host = propertyResolver.getRequiredProperty(PROP_HOST);
Integer port = propertyResolver.getRequiredProperty(PROP_PORT, Integer.class);
String prefix = propertyResolver.getProperty(PROP_RIEMANN_PREFIX, String.class, "");
try {
Riemann riemann = new Riemann(host, port);
RiemannReporter reporter = RiemannReporter.forRegistry(metricRegistry)
.convertRatesTo(TimeUnit.MILLISECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.prefixedWith(prefix)
.build(riemann);
reporter.start(1, TimeUnit.SECONDS);
} catch (IOException e) {
log.error(e.toString());
}
}
}
}
}

View file

@ -0,0 +1,125 @@
package com.example.crud.metrics;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricSet;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
/**
* A set of gauges for operating system settings.
*/
public class OperatingSystemGaugeSet implements MetricSet {
private final OperatingSystemMXBean mxBean;
private final Optional<Method> committedVirtualMemorySize;
private final Optional<Method> totalSwapSpaceSize;
private final Optional<Method> freeSwapSpaceSize;
private final Optional<Method> processCpuTime;
private final Optional<Method> freePhysicalMemorySize;
private final Optional<Method> totalPhysicalMemorySize;
private final Optional<Method> openFileDescriptorCount;
private final Optional<Method> maxFileDescriptorCount;
private final Optional<Method> systemCpuLoad;
private final Optional<Method> processCpuLoad;
/**
* Creates new gauges using the platform OS bean.
*/
public OperatingSystemGaugeSet() {
this(ManagementFactory.getOperatingSystemMXBean());
}
/**
* Creates a new gauges using the given OS bean.
*
* @param mxBean an {@link OperatingSystemMXBean}
*/
public OperatingSystemGaugeSet(OperatingSystemMXBean mxBean) {
this.mxBean = mxBean;
committedVirtualMemorySize = getMethod("getCommittedVirtualMemorySize");
totalSwapSpaceSize = getMethod("getTotalSwapSpaceSize");
freeSwapSpaceSize = getMethod("getFreeSwapSpaceSize");
processCpuTime = getMethod("getProcessCpuTime");
freePhysicalMemorySize = getMethod("getFreePhysicalMemorySize");
totalPhysicalMemorySize = getMethod("getTotalPhysicalMemorySize");
openFileDescriptorCount = getMethod("getOpenFileDescriptorCount");
maxFileDescriptorCount = getMethod("getMaxFileDescriptorCount");
systemCpuLoad = getMethod("getSystemCpuLoad");
processCpuLoad = getMethod("getProcessCpuLoad");
}
@Override
public Map<String, Metric> getMetrics() {
final Map<String, Metric> gauges = new HashMap<>();
gauges.put("committedVirtualMemorySize", (Gauge<Long>) () -> invokeLong(committedVirtualMemorySize));
gauges.put("totalSwapSpaceSize", (Gauge<Long>) () -> invokeLong(totalSwapSpaceSize));
gauges.put("freeSwapSpaceSize", (Gauge<Long>) () -> invokeLong(freeSwapSpaceSize));
gauges.put("processCpuTime", (Gauge<Long>) () -> invokeLong(processCpuTime));
gauges.put("freePhysicalMemorySize", (Gauge<Long>) () -> invokeLong(freePhysicalMemorySize));
gauges.put("totalPhysicalMemorySize", (Gauge<Long>) () -> invokeLong(totalPhysicalMemorySize));
gauges.put("fd.usage", (Gauge<Double>) () -> invokeRatio(openFileDescriptorCount, maxFileDescriptorCount));
gauges.put("systemCpuLoad", (Gauge<Double>) () -> invokeDouble(systemCpuLoad));
gauges.put("processCpuLoad", (Gauge<Double>) () -> invokeDouble(processCpuLoad));
return gauges;
}
private Optional<Method> getMethod(String name) {
try {
final Method method = mxBean.getClass().getDeclaredMethod(name);
method.setAccessible(true);
return Optional.of(method);
} catch (NoSuchMethodException e) {
return Optional.empty();
}
}
private long invokeLong(Optional<Method> method) {
if (method.isPresent()) {
try {
return (long) method.get().invoke(mxBean);
} catch (IllegalAccessException | InvocationTargetException ite) {
return 0L;
}
}
return 0L;
}
private double invokeDouble(Optional<Method> method) {
if (method.isPresent()) {
try {
return (double) method.get().invoke(mxBean);
} catch (IllegalAccessException | InvocationTargetException ite) {
return 0.0;
}
}
return 0.0;
}
private double invokeRatio(Optional<Method> numeratorMethod, Optional<Method> denominatorMethod) {
if (numeratorMethod.isPresent() && denominatorMethod.isPresent()) {
try {
long numerator = (long) numeratorMethod.get().invoke(mxBean);
long denominator = (long) denominatorMethod.get().invoke(mxBean);
if (0 == denominator) {
return Double.NaN;
}
return 1.0 * numerator / denominator;
} catch (IllegalAccessException | InvocationTargetException ite) {
return Double.NaN;
}
}
return Double.NaN;
}
}

View file

@ -0,0 +1,22 @@
package com.example.crud.metrics;
import java.util.concurrent.TimeUnit;
import org.springframework.context.annotation.Configuration;
import com.codahale.metrics.ConsoleReporter;
import com.codahale.metrics.MetricRegistry;
import com.ryantenney.metrics.spring.config.annotation.EnableMetrics;
import com.ryantenney.metrics.spring.config.annotation.MetricsConfigurerAdapter;
@Configuration
@EnableMetrics
public class SpringConfiguringClass extends MetricsConfigurerAdapter {
@Override
public void configureReporters(MetricRegistry metricRegistry) {
// registerReporter allows the MetricsConfigurerAdapter to
// shut down the reporter when the Spring context is closed
registerReporter(ConsoleReporter.forRegistry(metricRegistry).build())
.start(1, TimeUnit.MINUTES);
}
}

View file

@ -0,0 +1,45 @@
package com.example.crud.services;
import com.example.crud.db.models.Employee;
import com.example.crud.db.dao.EmployeeRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@EnableJpaRepositories("com.example.crud.db.dao")
public class EmployeeService {
private static final Logger log = LoggerFactory.getLogger(EmployeeService.class);
@Autowired private EmployeeRepository repository;
public List<Employee> findAll() {
return repository.findAll();
}
public Employee saveEmployee(Employee employee) {
return repository.saveAndFlush(employee);
}
public Employee getOne(Long id) {
return repository.getOne(id);
}
public List<Employee> findByLastName(String name) {
return repository.findByLastName(name);
}
public void delete(Long id) {
repository.delete(id);
}
/*
public List<Employee> fetchByLastNameLength(Long length) {
return repository.fetchByLastNameLength(length);
}
*/
}

View file

@ -0,0 +1,28 @@
package com.example.crud.util;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.reflect.MethodSignature;
import org.springframework.core.annotation.AnnotationUtils;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
public class ReflectionUtils {
public static <T extends Annotation> T getAnnotation(ProceedingJoinPoint pjp, Class<T> annotationClass) throws NoSuchMethodException {
MethodSignature signature = (MethodSignature) pjp.getSignature();
Method method = signature.getMethod();
T annotation = AnnotationUtils.findAnnotation(method, annotationClass);
if (annotation != null) {
return annotation;
}
Class[] argClasses = new Class[pjp.getArgs().length];
for (int i = 0; i < pjp.getArgs().length; i++) {
argClasses[i] = pjp.getArgs()[i].getClass();
}
method = pjp.getTarget().getClass().getMethod(pjp.getSignature().getName(), argClasses);
return AnnotationUtils.findAnnotation(method, annotationClass);
}
}

View file

@ -0,0 +1,104 @@
package com.example.crud.util;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
public class SerialVersionUID {
/**
* Compute a UID for the serialization version of a class.
*
* The Java object serialization standard defines an algorithm for computing the default serialVersionUID of a
* class: http://docs.oracle.com/javase/6/docs/platform/serialization/spec/class.html#4100. This method computes a
* serialVersionUID value models classes. For example, the com.example.models.User class should specify the following:
*
* <code><pre>
* private static final long serialVersionUID = SerialVersionUID.computeUID(User.class);
* </pre></code>
*
* Java provides a way to access and possibly compute a default value for serialVersionUID using
* java.io.ObjectStreamClass. If a class has no serialVersionID, the
* {@link java.io.ObjectStreamClass#getSerialVersionUID()} computes a default value according to the algorithm
* defined by the object serialization standard. However, we do not use this method because there is no clean way to
* access the private method computeDefaultSUID that performs the computation.
*
* Therefore, this method is based on, but different from the implementation of
* java.io.ObjectStreamClass#computeDefaultSIUD(Class<?>). This implementation does not factor various elements of
* the default computation into the resulting hash because the goal is to characterize versions of the class which
* are compatible for serialization and deserialization of their fields even if method signatures change. We don't
* want the addition of a method or a constructor to change the computed value. Therefore, this method uses only
* some of the internal logic of the default algorithm: it implements steps 1, 4, 8 and 9 of the standard algorithm
* and omits steps 2, 3, 5, 6 and 8.
*
* Like the standard algorithm, this implementation writes various elements of the class definition to a
* DataOutputStream and then computes a SHA-1 digest of the stream and returns a hash based on the digest. Unlike
* the standard algorithm, this implementation does not include interface, method and constructor signatures.
*
* @param clazz
* The class
* @return A version UID.
*/
public static long compute(final Class<?> clazz) {
try {
ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
DataOutputStream dataOut = new DataOutputStream(bytesOut);
// #1
dataOut.writeUTF(clazz.getName());
// #4
List<Field> sorted = Arrays.asList(clazz.getDeclaredFields());
sorted.sort(new Comparator<Field>() {
@Override
public int compare(Field field1, Field field2) {
return field1.getName().compareTo(field2.getName());
}
});
for (final Field field : sorted) {
int mods = field.getModifiers() &
(Modifier.PUBLIC | Modifier.PRIVATE | Modifier.PROTECTED |
Modifier.STATIC | Modifier.FINAL | Modifier.VOLATILE |
Modifier.TRANSIENT);
if (((mods & Modifier.PRIVATE) == 0) ||
((mods & (Modifier.STATIC | Modifier.TRANSIENT)) == 0))
// Don't include private static or
// private transient fields
{
dataOut.writeUTF(field.getName());
dataOut.writeInt(mods);
dataOut.writeUTF(field.getType().getName());
}
}
dataOut.flush();
// #8
MessageDigest md = MessageDigest.getInstance("SHA");
byte[] hashBytes = md.digest(bytesOut.toByteArray());
// #9
long hash = 0L;
for (int i = Math.min(hashBytes.length, 8) - 1; i >= 0; i--) {
hash = (hash << 8) | (hashBytes[i] & 0xFF);
}
return hash;
}
catch (IOException ex) {
throw new InternalError(ex);
}
catch (NoSuchAlgorithmException ex) {
throw new SecurityException(ex.getMessage());
}
}
}

View file

@ -0,0 +1,79 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<!-- https://jcp.org/aboutJava/communityprocess/final/jsr338/index.html -->
<persistence xmlns="http://xmlns.jcp.org/xml/ns/persistence"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/persistence
http://xmlns.jcp.org/xml/ns/persistence/persistence_2_1.xsd"
version="2.1">
<persistence-unit name="default" transaction-type="RESOURCE_LOCAL">
<provider>org.eclipse.persistence.jpa.PersistenceProvider</provider>
<exclude-unlisted-classes>true</exclude-unlisted-classes>
<class>com.example.crud.db.models.Address</class>
<class>com.example.crud.db.models.Employee</class>
<class>com.example.crud.db.models.EmploymentPeriod</class>
<class>com.example.crud.db.models.Gender</class>
<class>com.example.crud.db.models.GenderConverter</class>
<class>com.example.crud.db.models.PhoneNumber</class>
<properties>
<!-- EclipseLink should create the database schema automatically -->
<property name="javax.persistence.jdbc.driver" value="org.postgresql.Driver"/>
<property name="javax.persistence.jdbc.url" value="jdbc:postgresql://127.0.0.1:26257/crud?sslmode=disable"/>
<property name="javax.persistence.jdbc.user" value="root"/>
<property name="javax.persistence.jdbc.password" value=""/>
<property name="javax.persistence.schema-generation.database.action" value="create-or-extend-tables"/>
<property name="eclipselink.ddl-generation" value="create-or-extend-tables"/>
<property name="eclipselink.ddl-generation.output-mode" value="database"/>
<property name="eclipselink.target-database" value="org.eclipse.persistence.platform.database.PostgreSQLPlatform"/>
<property name="eclipselink.session.customizer" value="com.example.crud.db.util.EclipseLinkSessionCustomizer"/>
<!-- Enable weaving "All the things!"(TM) -->
<property name="eclipselink.weaving" value="static"/>
<property name="eclipselink.weaving.eager" value="true"/>
<property name="eclipselink.weaving.fetchgroups" value="true"/>
<property name="eclipselink.weaving.internal" value="true"/>
<property name="eclipselink.weaving.lazy" value="true"/>
<!-- Optimization - statement caching -->
<property name="eclipselink.jdbc.cache-statements" value="true"/>
<!-- Optimization - batch writing -->
<property name="eclipselink.jdbc.batch-writing" value="JDBC"/>
<property name="eclipselink.jdbc.batch-writing.size" value="1000"/>
<!-- Optimization - disable caching for batch insert (caching only improves reads, so only adds overhead for inserts) -->
<property name="eclipselink.cache.shared.default" value="false"/>
<!-- Except for XXX which is shared by orders
<property name="eclipselink.cache.shared.Customer" value="true"/> TODO(gburd): ? -->
<!-- Optimization - turn logging off -->
<property name="eclipselink.logging.logger" value="JavaLogger"/>
<property name="eclipselink.logging.parameters" value="true"/> <!-- false -->
<property name="eclipselink.logging.level" value="FINE" /> <!-- OFF -->
<property name="eclipselink.logging.level.sql" value="FINE"/>
<!-- Optimization - close EntityManager on commit, to avoid cost of resume -->
<property name="eclipselink.persistence-context.close-on-commit" value="true"/>
<!-- Optimization - avoid auto flush cost on query execution -->
<property name="eclipselink.persistence-context.flush-mode" value="commit"/>
<!-- Optimization - avoid cost of persist on commit -->
<property name="eclipselink.persistence-context.persist-on-commit" value="false"/>
<!-- Optimization - -->
<property name="eclipselink.jdbc.bind-parameters" value="true"/>
<!-- Optimization - create indexes for all foreign key fields -->
<property name="eclipselink.ddl-generation.index-foreign-keys" value="true"/>
<!-- Optimization - enable query caching -->
<property name="eclipselink.cache.query-results" value="true"/>
<!-- Optimization - disable the Performance Profiler in production -->
<property name="eclipselink.profiler" value="org.eclipse.persistence.tools.profiler.PerformanceProfiler"/>
<!-- Level 2 Cache: database change notifications to synchronize L2 cache state -->
<!--
The hope is to plumb EclipseLink with a L2 cache similar to Hibernate's use of Redisson
and then to have a multi-region cluster of Redis nodes managed by Netflix's Dynomite.
TODO(gburd): Either these change notifications sent from the database...
(a) are idempotent and cheap to perform many times or
(b) sent to a specific owner for a key (using the Dynomite Redis client to route)
(b) are first queued then applied once by workers
<property name="eclipselink.cache.database-event-listener" value="DCN"/> -->
</properties>
</persistence-unit>
</persistence>

View file

@ -0,0 +1,28 @@
management:
port: 8484
security:
enabled: false
ssl:
enabled: false
key-store: classpath:management.jks
key-password: super*53cr37!pa33w0rd,
server:
port: ${port:8443}
ssl:
enabled: false
key-store: classpath:main.jks
key-password: super*53cr37!pa33w0rd,
undertow:
accesslog:
enabled: true
pattern: '%t %a "%r" %s (%D ms)'
spring:
aop:
proxy-target-class: true
datasource:
driver-class-name: org.postgresql.Driver
password: ''
url: jdbc:postgresql://127.0.0.1:26257/crud?sslmode=disable
username: root
jpa:
show-sql: true

View file

@ -0,0 +1,31 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:metrics="http://www.ryantenney.com/schema/metrics" xmlns:aop="http://www.springframework.org/schema/aop"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.ryantenney.com/schema/metrics
http://www.ryantenney.com/schema/metrics/metrics.xsd http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd">
<!-- Creates a MetricRegistry bean -->
<metrics:metric-registry id="metricRegistry" />
<!-- Creates a HealthCheckRegistry bean (Optional) -->
<metrics:health-check-registry id="health" />
<!-- Registers BeanPostProcessors with Spring which proxy beans and capture metrics -->
<!-- Include this once per context (once in the parent context and in any subcontexts) -->
<metrics:annotation-driven metric-registry="metricRegistry" />
<!-- Example reporter definiton. Supported reporters include jmx, slf4j, graphite, and others. -->
<!-- Reporters should be defined only once, preferably in the parent context -->
<metrics:reporter type="console" metric-registry="metricRegistry" period="1m" />
<!-- Register metric beans (Optional) -->
<!-- The metrics in this example require metrics-jvm -->
<metrics:register metric-registry="metricRegistry">
<bean metrics:name="jvm.gc" class="com.codahale.metrics.jvm.GarbageCollectorMetricSet" />
<bean metrics:name="jvm.memory" class="com.codahale.metrics.jvm.MemoryUsageGaugeSet" />
<bean metrics:name="jvm.thread-states" class="com.codahale.metrics.jvm.ThreadStatesGaugeSet" />
<bean metrics:name="jvm.fd.usage" class="com.codahale.metrics.jvm.FileDescriptorRatioGauge" />
</metrics:register>
</beans>

View file

@ -0,0 +1,118 @@
package com.example.crud.test;
import com.example.crud.db.annotations.Retry;
import com.example.crud.db.models.Employee;
import com.example.crud.db.models.Gender;
import org.junit.Test;
import javax.persistence.*;
import org.eclipse.persistence.sessions.Session;
import java.util.List;
public class BasicTest {
EntityManagerFactory emf;
EntityManager em;
// Connection connection = em.unwrap(java.sql.Connection.class);
@Test
public void testMain() throws Exception {
//emf = PersistenceTesting.createEMF(true);
emf = Persistence.createEntityManagerFactory("default");
em = emf.createEntityManager();
Session session = em.unwrap(Session.class);
session.getLogin().setQueryRetryAttemptCount(3);
try {
hire(100);
// Add employee with 555 area code to satisfy a test query
em.getTransaction().begin();
Employee e = new Employee();
e.setFirstName("John");
e.setLastName("Doe");
e.setGender(Gender.Male);
e.setSocialSecurityNumber("111-22-3333");
e.addPhoneNumber("HOME", "555-555-2222");
em.persist(e);
Long id = e.getId();
em.getTransaction().commit();
em.clear();
queryAllEmployees(em);
em.clear();
queryEmployeeLikeAreaCode55(em);
em.clear();
modifyEmployee(em, id);
em.clear();
deleteEmployee(em, id);
em.clear();
em.close();
} finally {
emf.close();
}
}
@Retry(times = 3, on = org.springframework.dao.OptimisticLockingFailureException.class)
public int hire(int n) {
em.getTransaction().begin();
new SamplePopulation().createNewEmployees(em, n);
em.getTransaction().commit();
em.clear();
return n;
}
public void queryAllEmployees(EntityManager em) {
List<Employee> results = em.createQuery("SELECT e FROM Employee e", Employee.class).getResultList();
System.out.println("Query All Results: " + results.size());
results.forEach(e -> System.out.println("\t>" + e));
}
public void queryEmployeeLikeAreaCode55(EntityManager em) {
System.out.println("\n\n --- Query Employee.phoneNumbers.areaCode LIKE '55%' ---");
TypedQuery<Employee> query = em.createQuery("SELECT e FROM Employee e JOIN e.phoneNumbers phones WHERE phones.number LIKE '55%'", Employee.class);
List<Employee> emps = query.getResultList();
emps.forEach(e -> System.out.println("> " + e));
}
public void modifyEmployee(EntityManager em, Long id) {
System.out.println("\n\n --- Modify Employee ---");
em.getTransaction().begin();
Employee emp = em.find(Employee.class, id);
emp.setSalary(1);
TypedQuery<Employee> query = em.createQuery("SELECT e FROM Employee e WHERE e.id = :ID AND e.firstName = :FNAME", Employee.class);
query.setParameter("ID", id);
query.setParameter("FNAME", emp.getFirstName());
emp = query.getSingleResult();
em.getTransaction().commit();
}
public void deleteEmployee(EntityManager em, Long id) {
em.getTransaction().begin();
em.remove(em.find(Employee.class, id));
em.flush();
//em.getTransaction().rollback();
em.getTransaction().commit();
}
}

View file

@ -0,0 +1,38 @@
package com.example.crud.test;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class ConfigTest {
@Test
public void bootstrap() {
EntityManager em = getEmf().createEntityManager();
em.close();
}
private static EntityManagerFactory emf;
public static EntityManagerFactory getEmf() {
return emf;
}
@BeforeClass
public static void createEMF() {
emf = PersistenceTesting.createEMF(true);
}
@AfterClass
public static void closeEMF() {
if (emf != null && emf.isOpen()) {
emf.close();
}
emf = null;
}
}

View file

@ -0,0 +1,54 @@
package com.example.crud.test;
import java.util.HashMap;
import java.util.Map;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import org.eclipse.persistence.config.PersistenceUnitProperties;
/**
* Persistence testing helper which creates an EMF providing testing overrides
* to use direct JDBC instead of a data source
*/
public class PersistenceTesting {
public static EntityManagerFactory createEMF(boolean replaceTables) {
Map<String, Object> props = new HashMap<String, Object>();
// Ensure the persistence.xml provided data source are ignored during test runs
props.put(PersistenceUnitProperties.NON_JTA_DATASOURCE, "");
props.put(PersistenceUnitProperties.JTA_DATASOURCE, "");
props.put(PersistenceUnitProperties.TRANSACTION_TYPE, "RESOURCE_LOCAL");
// Configure the use of embedded derby for the tests allowing system properties of the same name to override
setProperty(props, PersistenceUnitProperties.JDBC_DRIVER, "org.postgresql.Driver");
setProperty(props, PersistenceUnitProperties.JDBC_URL, "jdbc:postgresql://127.0.0.1:26257/crud");
setProperty(props, PersistenceUnitProperties.JDBC_USER, "root");
setProperty(props, PersistenceUnitProperties.JDBC_PASSWORD, "");
// Ensure weaving is not used during testing
props.put(PersistenceUnitProperties.WEAVING, "false");
if (replaceTables) {
props.put(PersistenceUnitProperties.DDL_GENERATION, PersistenceUnitProperties.DROP_AND_CREATE);
props.put(PersistenceUnitProperties.DDL_GENERATION_MODE, PersistenceUnitProperties.DDL_DATABASE_GENERATION);
}
return Persistence.createEntityManagerFactory("default", props);
}
/**
* Add the system property value if it exists, otherwise use the default
* value.
*/
private static void setProperty(Map<String, Object> props, String key, String defaultValue) {
String value = defaultValue;
if (System.getProperties().containsKey(key)) {
value = System.getProperty(key);
}
props.put(key, value);
}
}

View file

@ -0,0 +1,52 @@
package com.example.crud.test;
import com.example.crud.db.models.Address;
import com.example.crud.db.models.Employee;
import com.example.crud.db.models.Gender;
import com.github.javafaker.Faker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import java.util.Random;
import static java.lang.String.format;
/**
* Examples illustrating the use of JPA with the employee domain
* com.example.crud.models.
*
* @see BasicTest
*/
public class SamplePopulation {
private static final Logger log = LoggerFactory.getLogger(SamplePopulation.class);
Faker fake = new Faker(); //TODO(gburd): https://github.com/joselufo/RandomUserApi https://randomuser.me/
/**
* Create the specified number of random sample employees.
*/
public void createNewEmployees(EntityManager em, int quantity) {
for (int index = 0; index < quantity; index++) {
em.persist(createRandomEmployee());
}
}
public Employee createRandomEmployee() {
Random r = new Random();
Employee emp = new Employee();
emp.setSocialSecurityNumber(format("%d-%d-%d", r.nextInt(999), r.nextInt(99), r.nextInt(9999)));
emp.setGender(Gender.values()[r.nextInt(2)]);
emp.setFirstName(fake.name().firstName());
emp.setLastName(fake.name().lastName());
emp.addPhoneNumber("HOME", fake.phoneNumber().phoneNumber().toString());
emp.addPhoneNumber("WORK", fake.phoneNumber().phoneNumber().toString());
emp.addPhoneNumber("MOBILE", fake.phoneNumber().cellPhone().toString());
emp.setAddress(new Address(fake.address().city(), fake.address().country(), "", fake.address().zipCode(), fake.address().streetAddress()));
return emp;
}
}

View file

@ -0,0 +1,54 @@
package com.example.crud.test.rest;
import java.util.Map;
import com.example.crud.Application;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.embedded.LocalServerPort;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.web.client.TestRestTemplate;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit4.SpringRunner;
import static org.assertj.core.api.BDDAssertions.then;
/**
* Basic integration tests for service example application.
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
@TestPropertySource(properties = { "management.port=0" })
public class CrudConfigurationTest {
@LocalServerPort
private int port;
@Value("${local.management.port}")
private int mgt;
@Autowired
private TestRestTemplate testRestTemplate;
@Test
public void shouldReturn200WhenSendingRequestToController() throws Exception {
@SuppressWarnings("rawtypes")
ResponseEntity<Map> entity = this.testRestTemplate.getForEntity(
"http://localhost:" + this.port + "/hello-world", Map.class);
then(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
}
@Test
public void shouldReturn200WhenSendingRequestToManagementEndpoint() throws Exception {
@SuppressWarnings("rawtypes")
ResponseEntity<Map> entity = this.testRestTemplate.getForEntity(
"http://localhost:" + this.mgt + "/info", Map.class);
then(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
}
}