Phoenix & Java: Connecting Secure

In this tutorial I will show you how to connect to an Secure Phoenix using Java. It’s rather straight forward.

POM.xml

<dependency>
	<groupId>org.apache.phoenix</groupId>
	<artifactId>phoenix-queryserver</artifactId>
	<version>5.0.0-HBase-2.0</version>
</dependency>

Imports:

import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.ResultSet;
import java.sql.Statement;

Initiate Kerberos Authentication

System.setProperty("java.security.krb5.conf", "C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\krb5.conf");
System.setProperty("java.security.krb5.realm", "REALM.CA");
System.setProperty("java.security.krb5.kdc", "REALM.CA");
System.setProperty("sun.security.krb5.debug", "true");
System.setProperty("javax.net.debug", "all");

Connect:

Now we create the connection.

Class.forName("org.apache.phoenix.jdbc.PhoenixDriver");
String url = "jdbc:phoenix:hadoop:2181:/hbase-secure:hbase/hadoop@REALM.CA:\\data\\hbase.service.keytab";
Connection connection = DriverManager.getConnection(url);

System.out.println("Connected");

Statement statement = connection.createStatement();

//Drop table
String deleteTableSql = "DROP TABLE IF EXISTS employee";		 
System.out.println("Deleting Table: " + deleteTableSql);
statement.executeUpdate(deleteTableSql);
System.out.println("Created Table");
 
//Create a table
String createTableSql = "CREATE TABLE employee ( eid bigint primary key, name varchar)";		 
System.out.println("Creating Table: " + createTableSql);
statement.executeUpdate(createTableSql);
System.out.println("Created Table");

//Insert Data
String insertTableSql = "UPSERT INTO employee VALUES(1, 'Oliver')";
System.out.println("Inserting Data: " + insertTableSql);
statement.executeUpdate(insertTableSql);
System.out.println("Inserted Data");

connection.commit();

//Select Data
String selectTablesSql = "select * from employee";
System.out.println("Show records: " + selectTablesSql);
ResultSet res = statement.executeQuery(selectTablesSql);
 
while (res.next()) {
	System.out.println(String.format("id: %s name: %s", res.getInt("eid"), res.getString("name")));
}

 

 

 

 

 

HBASE & Java: Connecting Secure

In this tutorial I will show you how to connect to an Secure HBASE using Java. It’s rather straight forward.

Import SSL Cert to Java:

Follow this tutorial to “Installing unlimited strength encryption Java libraries

If on Windows do the following

#Import it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -import -file hadoop.csr -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts" -alias "hadoop"

#Check it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -list -v -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts"

#If you want to delete it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -delete -alias hadoop -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts"

POM.xml

<dependency>
	<groupId>org.apache.hbase</groupId>
	<artifactId>hbase-client</artifactId>
	<version>2.1.0</version>
</dependency>
<dependency>
	<groupId>org.apache.hbase</groupId>
	<artifactId>hbase</artifactId>
	<version>2.1.0</version>
	<type>pom</type>
</dependency>

Imports:

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.security.UserGroupInformation;

Initiate Kerberos Authentication

System.setProperty("java.security.auth.login.config", "C:\\data\\kafkaconnect\\kafka\\src\\main\\resources\\client_jaas.conf");
System.setProperty("https.protocols", "TLSv1,TLSv1.1,TLSv1.2");
System.setProperty("java.security.krb5.conf", "C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\krb5.conf");
System.setProperty("java.security.krb5.realm", "REALM.CA");
System.setProperty("java.security.krb5.kdc", "REALM.CA");
System.setProperty("sun.security.krb5.debug", "false");
System.setProperty("javax.net.debug", "false");
System.setProperty("javax.net.ssl.keyStorePassword", "changeit");
System.setProperty("javax.net.ssl.keyStore", "C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\cacerts");
System.setProperty("javax.net.ssl.trustStore", "C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\cacerts");
System.setProperty("javax.net.ssl.trustStorePassword", "changeit");
System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");

Config:

We will use the basic configuration here. You should secure the cluster and use appropriate settings for that.

// Setup the configuration object.
final Configuration config = HBaseConfiguration.create();
config.set("hbase.zookeeper.quorum", "hadoop");
config.set("hbase.zookeeper.property.clientPort", "2181");
config.set("hadoop.security.authentication", "kerberos");
config.set("hbase.security.authentication", "kerberos");
config.set("hbase.cluster.distributed", "true");
config.set("hbase.rpc.protection", "integrity");
config.set("zookeeper.znode.parent", "/hbase-secure");
config.set("hbase.master.kerberos.principal", "hbase/hadoop@REALM.CA");
config.set("hbase.regionserver.kerberos.principal", "hbase/hadoop@REALM.CA");

Connect:

Now we create the connection.

UserGroupInformation.setConfiguration(config);
UserGroupInformation.setLoginUser(UserGroupInformation.loginUserFromKeytabAndReturnUGI("hbase/hadoop@REALM.CA", "c:\\data\\hbase.service.keytab"));

System.out.println(UserGroupInformation.getLoginUser());
System.out.println(UserGroupInformation.getCurrentUser());

Connection conn = ConnectionFactory.createConnection(config);

//Later when we are done we will want to close the connection.
conn.close();

Hbase Admin:

Retrieve an Admin implementation to administer an HBase cluster. If you need it.

Admin admin = conn.getAdmin();
//Later when we are done we will want to close the connection.
admin.close();

Kafka & Java: Secured Consumer Read Record

In this tutorial I will show you how to read a record to Kafka. Before you begin you will need Maven/Eclipse all setup and a project ready to go. If you haven’t installed Kafka Kerberos yet please do so.

Import SSL Cert to Java:

Follow this tutorial to “Installing unlimited strength encryption Java libraries

If on Windows do the following

#Import it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -import -file hadoop.csr -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts" -alias "hadoop"

#Check it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -list -v -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts"

#If you want to delete it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -delete -alias hadoop -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts"

POM.xml

<dependency>
	<groupId>org.apache.kafka</groupId>
	<artifactId>kafka-clients</artifactId>
	<version>1.1.0</version>
</dependency>

Imports

import org.apache.kafka.clients.consumer.*;
import java.util.Properties;
import java.io.InputStream;
import java.util.Arrays;

Consumer JAAS Conf (client_jaas.conf)

KafkaClient {
    com.sun.security.auth.module.Krb5LoginModule required
    useTicketCache=false
    refreshKrb5Config=true
    debug=true
    useKeyTab=true
    storeKey=true
    keyTab="c:\\data\\kafka.service.keytab"
    principal="kafka/hadoop@REALM.CA";
};

Consumer Props File

You can go here to view all the options for consumer properties.

bootstrap.servers=hadoop:9094
group.id=test

security.protocol=SASL_SSL
sasl.kerberos.service.name=kafka

#offset will be periodically committed in the background
enable.auto.commit=true

# The serializer for the key
key.deserializer=org.apache.kafka.common.serialization.StringDeserializer

# The serializer for the value
value.deserializer=org.apache.kafka.common.serialization.StringDeserializer

# heartbeat to detect worker failures
session.timeout.ms=10000

#Automatically reset offset to earliest offset
auto.offset.reset=earliest

Initiate Kerberos Authentication

System.setProperty("java.security.auth.login.config", "C:\\data\\kafkaconnect\\kafka\\src\\main\\resources\\client_jaas.conf");
System.setProperty("https.protocols", "TLSv1,TLSv1.1,TLSv1.2");
System.setProperty("java.security.krb5.conf", "C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\krb5.conf");
System.setProperty("java.security.krb5.realm", "REALM.CA");
System.setProperty("java.security.krb5.kdc", "REALM.CA");
System.setProperty("sun.security.krb5.debug", "false");
System.setProperty("javax.net.debug", "false");
System.setProperty("javax.net.ssl.keyStorePassword", "changeit");
System.setProperty("javax.net.ssl.keyStore", "C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\cacerts");
System.setProperty("javax.net.ssl.trustStore", "C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\cacerts");
System.setProperty("javax.net.ssl.trustStorePassword", "changeit");
System.setProperty("javax.security.auth.useSubjectCredsOnly", "true");

Consumer Connection/Send

The record we will read will just be a string for both key and value.

Consumer<String, String> consumer = null;

try {
	ClassLoader classLoader = getClass().getClassLoader();

	try (InputStream props = classLoader.getResourceAsStream("consumer.props")) {
		Properties properties = new Properties();
		properties.load(props);
		consumer = new KafkaConsumer<>(properties);
	}
	
	System.out.println("Consumer Created");

	// Subscribe to the topic.
	consumer.subscribe(Arrays.asList("testTopic"));

	while (true) {
		final ConsumerRecords<String, String> consumerRecords = consumer.poll(1000);
		
		if (consumerRecords.count() == 0) {
			//Keep reading till no records
			break;
		}

		consumerRecords.forEach(record -> {
			System.out.printf("Consumer Record:(%s, %s, %d, %d)\n", record.key(), record.value(), record.partition(), record.offset());
		});

		//Commit offsets returned on the last poll() for all the subscribed list of topics and partition
		consumer.commitAsync();
	}
} finally {
	consumer.close();
}
System.out.println("Consumer Closed");

References

I used kafka-sample-programs as a guide for setting up props.

Kafka & Java: Consumer Seek To Beginning

This is a quick tutorial on how to seek to beginning using a Kafka consumer. If you haven’t setup the consumer yet follow this tutorial.

This is all that is required once you have setup the consumer. This will put the kafka offset for the topic of your choice to the beginning so once you start reading you will get all records.

consumer.seekToBeginning(consumer.assignment());

Hive & Java: Connect to Remote Kerberos Hive using KeyTab

In this tutorial I will show you how to connect to remote Kerberos Hive cluster using Java. If you haven’t install Hive yet follow the tutorial.

Import SSL Cert to Java:

Follow this tutorial to “Installing unlimited strength encryption Java libraries

If on Windows do the following

#Import it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -import -file hadoop.csr -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts" -alias "hadoop"

#Check it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -list -v -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts"

#If you want to delete it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -delete -alias hadoop -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts"

POM.xml:

<dependency>
	<groupId>org.apache.hive</groupId>
	<artifactId>hive-jdbc</artifactId>
	<version>2.3.3</version>
	<exclusions>
		<exclusion>
			<groupId>jdk.tools</groupId>
			<artifactId>jdk.tools</artifactId>
		</exclusion>
	</exclusions>
</dependency>

Imports:

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;

Connect:

// Setup the configuration object.
final Configuration config = new Configuration();

config.set("fs.defaultFS", "swebhdfs://hadoop:50470");
config.set("hadoop.security.authentication", "kerberos");
config.set("hadoop.rpc.protection", "integrity");

System.setProperty("https.protocols", "TLSv1,TLSv1.1,TLSv1.2");
System.setProperty("java.security.krb5.conf", "C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\krb5.conf");
System.setProperty("java.security.krb5.realm", "REALM.CA");
System.setProperty("java.security.krb5.kdc", "REALM.CA");
System.setProperty("sun.security.krb5.debug", "true");
System.setProperty("javax.net.debug", "all");
System.setProperty("javax.net.ssl.keyStorePassword","changeit");
System.setProperty("javax.net.ssl.keyStore","C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\cacerts");
System.setProperty("javax.net.ssl.trustStore", "C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\cacerts");
System.setProperty("javax.net.ssl.trustStorePassword","changeit");
System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");

UserGroupInformation.setConfiguration(config);
UserGroupInformation.setLoginUser(UserGroupInformation.loginUserFromKeytabAndReturnUGI("hive/hadoop@REALM.CA", "c:\\data\\hive.service.keytab"));

System.out.println(UserGroupInformation.getLoginUser());
System.out.println(UserGroupInformation.getCurrentUser());

//Add the hive driver
Class.forName("org.apache.hive.jdbc.HiveDriver");

//Connect to hive jdbc
Connection connection = DriverManager.getConnection("jdbc:hive2://hadoop:10000/default;principal=hive/hadoop@REALM.CA");
Statement statement = connection.createStatement();

//Create a table
String createTableSql = "CREATE TABLE IF NOT EXISTS "
		+" employee ( eid int, name String, "
		+" salary String, designation String)"
		+" COMMENT 'Employee details'"
		+" ROW FORMAT DELIMITED"
		+" FIELDS TERMINATED BY '\t'"
		+" LINES TERMINATED BY '\n'"
		+" STORED AS TEXTFILE";

System.out.println("Creating Table: " + createTableSql);
statement.executeUpdate(createTableSql);

//Show all the tables to ensure we successfully added the table
String showTablesSql = "show tables";
System.out.println("Show All Tables: " + showTablesSql);
ResultSet res = statement.executeQuery(showTablesSql);

while (res.next()) {
	System.out.println(res.getString(1));
}

//Drop the table
String dropTablesSql = "DROP TABLE IF EXISTS employee";

System.out.println("Dropping Table: " + dropTablesSql);
statement.executeUpdate(dropTablesSql);

System.out.println("Finish!");

Hadoop & Java: Connect to Remote Kerberos HDFS using KeyTab

In this tutorial I will show you how to connect to remote Kerberos HDFS cluster using Java.  If you haven’t install hdfs with kerberos yet follow the tutorial.

Import SSL Cert to Java:

Follow this tutorial to “Installing unlimited strength encryption Java libraries

If on Windows do the following

#Import it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -import -file hadoop.csr -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts" -alias "hadoop"

#Check it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -list -v -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts"

#If you want to delete it
"C:\Program Files\Java\jdk1.8.0_171\bin\keytool" -delete -alias hadoop -keystore "C:\Program Files\Java\jdk1.8.0_171\jre\lib\security\cacerts"

POM.xml:

<dependency>
	<groupId>org.apache.hadoop</groupId>
	<artifactId>hadoop-client</artifactId>
	<version>2.9.1</version>
</dependency>

Imports:

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;

Connect:

// Setup the configuration object.
final Configuration config = new Configuration();

config.set("fs.defaultFS", "swebhdfs://hadoop:50470");
config.set("hadoop.security.authentication", "kerberos");
config.set("hadoop.rpc.protection", "integrity");

System.setProperty("https.protocols", "TLSv1,TLSv1.1,TLSv1.2");
System.setProperty("java.security.krb5.conf", "C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\krb5.conf");
System.setProperty("java.security.krb5.realm", "REALM.CA");
System.setProperty("java.security.krb5.kdc", "REALM.CA");
System.setProperty("sun.security.krb5.debug", "true");
System.setProperty("javax.net.debug", "all");
System.setProperty("javax.net.ssl.keyStorePassword","YOURPASSWORD");
System.setProperty("javax.net.ssl.keyStore","C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\cacerts");
System.setProperty("javax.net.ssl.trustStore", "C:\\Program Files\\Java\\jdk1.8.0_171\\jre\\lib\\security\\cacerts");
System.setProperty("javax.net.ssl.trustStorePassword","YOURPASSWORD");
System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");

UserGroupInformation.setConfiguration(config);
UserGroupInformation.setLoginUser(UserGroupInformation.loginUserFromKeytabAndReturnUGI("myuser/hadoop@REALM.CA", "c:\\data\\myuser.keytab"));

System.out.println(UserGroupInformation.getLoginUser());
System.out.println(UserGroupInformation.getCurrentUser());

Dropwizard: Swagger Integration

This entry is part 5 of 5 in the series Dropwizard

In this tutorial I will show you how to use Swagger in your Maven application. I will also show you how to configure it with Swagger UI so when you start your application you can see the Swagger UI from your generated JSON.

POM.xml

Dependencies

<dependency>
	<groupId>io.dropwizard</groupId>
	<artifactId>dropwizard-assets</artifactId>
	<version>1.3.2</version>
</dependency>

<dependency>
	<groupId>io.swagger</groupId>
	<artifactId>swagger-jaxrs</artifactId>
	<version>1.5.19</version>
</dependency>

Plugins

maven-jar-plugin

If you followed creating a basic Dropwizard app then you should have this already installed. If so then just add the following two configs under “manifest” section.

<addDefaultImplementationEntries>true</addDefaultImplementationEntries>
<addDefaultSpecificationEntries>true</addDefaultSpecificationEntries>
maven-clean-plugin

Because we are pulling the latest Swagger-UI code on each build we must clean the old build.

<plugin>
	<artifactId>maven-clean-plugin</artifactId>
	<version>3.1.0</version>
	<configuration>
		<filesets>
			<fileset>
				<directory>${basedir}/src/main/resources/swagger-ui</directory>
				<followSymlinks>false</followSymlinks>
			</fileset>
		</filesets>
	</configuration>
</plugin>
download-maven-plugin

We are downloading the latest Swagger-UI code from github. Notice how lifecycle phase “generate-resources” is used. This is important due to build getting the proper code before beginning build.

<plugin>
	<groupId>com.googlecode.maven-download-plugin</groupId>
	<artifactId>download-maven-plugin</artifactId>
	<version>1.4.0</version>
	<executions>
		<execution>
			<id>swagger-ui</id>
			<phase>generate-resources</phase>
			<goals>
				<goal>wget</goal>
			</goals>
			<configuration>
				<url>
					https://github.com/swagger-api/swagger-ui/archive/master.tar.gz
				</url>
				<unpack>true</unpack>
				<outputDirectory>
					${project.build.directory}
				</outputDirectory>
			</configuration>
		</execution>
	</executions>
</plugin>
replacer

This updates the code downloaded from github to have your swagger.json content instead of the petstore swagger content. Notice how lifecycle phase “generate-resources” is used. This is important due to build getting the proper code before beginning build.

<plugin>
	<groupId>com.google.code.maven-replacer-plugin</groupId>
	<artifactId>replacer</artifactId>
	<version>1.5.3</version>
	<executions>
		<execution>
			<phase>generate-resources</phase>
			<goals>
				<goal>replace</goal>
			</goals>
		</execution>
	</executions>
	<configuration>
		<includes>
			<include>${project.build.directory}/swagger-ui-master/dist/index.html</include>
			<include>${project.build.directory}/swagger-ui-master/dist/swagger-ui-bundle.js</include>
			<include>${project.build.directory}/swagger-ui-master/dist/swagger-ui-bundle.js.map</include>
			<include>${project.build.directory}/swagger-ui-master/dist/swagger-ui-standalone-preset.js</include>
			<include>${project.build.directory}/swagger-ui-master/dist/swagger-ui-standalone-preset.js.map</include>
			<include>${project.build.directory}/swagger-ui-master/dist/swagger-ui.js</include>
			<include>${project.build.directory}/swagger-ui-master/dist/swagger-ui.js.map</include>
		</includes>
		<replacements>
			<replacement>
				<token>http://petstore.swagger.io/v2/swagger.json</token>
				<value>/swagger.json</value>
			</replacement>
		</replacements>
	</configuration>
</plugin>
maven-resources-plugin

This will copy the content that you just downloaded and modified into your resources folder. Notice how lifecycle phase “generate-resources” is used. This is important due to build getting the proper code before beginning build.

<plugin>
	<groupId>org.apache.maven.plugins</groupId>
	<artifactId>maven-resources-plugin</artifactId>
	<version>3.1.0</version>
	<executions>
		<execution>
			<id>copy-resources</id>
			<phase>generate-resources</phase>
			<goals>
				<goal>copy-resources</goal>
			</goals>
			<configuration>
				<outputDirectory>
					${basedir}/src/main/resources/swagger-ui
				</outputDirectory>
				<resources>
					<resource>
						<directory>
							${project.build.directory}/swagger-ui-master/dist
						</directory>
					</resource>
				</resources>
			</configuration>
		</execution>
	</executions>
</plugin>

Now if you run the following command you will see that the swagger-ui copied to your resources folder.

mvn clean install

MyDropwizardAppApplication

initialize

Now we need to configure our Dropwizard app to host the swagger-ui that we recently downloaded and modified. In our “MyDropwizardAppApplication” class that we created in the initial Dropwizard tutorial we must add the AssetsBundle for our swagger-ui.

@Override
public void initialize(final Bootstrap bootstrap) {
	bootstrap.addBundle(GuiceBundle.builder().enableAutoConfig(this.getClass().getPackage().getName())
			.modules(new ServerModule()).build());

	// This allows you to host swagger ui on this dropwizard app's host
	final AssetsBundle assetsBundle = new AssetsBundle("/swagger-ui", "/swagger-ui", "index.html");
	bootstrap.addBundle(assetsBundle);
	bootstrap.addCommand(new MyCommand());
}
run

Now we need to setup our Swagger scanners for our api and our models.

@Override
public void run(final MyDropwizardAppConfiguration configuration, final Environment environment) {
	this.initSwagger(configuration, environment);
}

private void initSwagger(MyDropwizardAppConfiguration configuration, Environment environment) {
	// Swagger Resource
	// The ApiListingResource creates the swagger.json file at localhost:8080/swagger.json
	environment.jersey().register(new ApiListingResource());
	environment.jersey().register(SwaggerSerializers.class);

	Package objPackage = this.getClass().getPackage();
	String version = objPackage.getImplementationVersion();

	// Swagger Scanner, which finds all the resources for @Api Annotations
	ScannerFactory.setScanner(new DefaultJaxrsScanner());

	//This is what is shown when you do "http://localhost:8080/swagger-ui/"
	BeanConfig beanConfig = new BeanConfig();
	beanConfig.setVersion(version);
	beanConfig.setSchemes(new String[] { "http" });
	beanConfig.setHost("localhost:8080");
	beanConfig.setPrettyPrint(true);
	beanConfig.setDescription("The drpowizard apis");
	beanConfig.setResourcePackage("ca.gaudreault.mydropwizardapp");
	beanConfig.setScan(true);
}

Now if we were to run our app we would be able to go to http://localhost:8080/swagger-ui/ and we would see our content but since we didn’t update any model or api then we wouldn’t see much of anything. So remember the previous tutorials on Dropwizard Guice and Dropwizard Resource. We will update those now.

Model

If you compare this to the one we did in the guice tutorial there are only a few differences. Notice we import the swagger annotations. We then add “ApiModel” annotation to the class and “ApiModelProperty” to the variable “value” and set it to be “NotNull”.

package ca.gaudreault.mydropwizardapp.models;

import java.io.Serializable;

import javax.validation.constraints.NotNull;

import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;

@ApiModel(description = "My Example Model.")
public class MyModel implements Serializable {
	private static final long serialVersionUID = 1L;
	@NotNull
	@ApiModelProperty(required = true, notes = "My value")
	private Integer value;
	
	public Integer getValue() {
		return value;
	}
	public void setValue(Integer value) {
		this.value = value;
	}
}

Resource

If you compare this to the one we did in the guice tutorial there are only a few differences. Notice our class has “@SwaggerDefinition” and “@API” defined. This will help the Swagger-UI  group your end points together using the tags. Also notice how our “runTest” end point has “@Path”, “@ApiResponses” and “@ApiOperation” now.

package ca.gaudreault.mydropwizardapp.resources;

import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;

import org.eclipse.jetty.http.HttpStatus;

import com.codahale.metrics.annotation.Timed;
import com.google.inject.Inject;

import ca.gaudreault.mydropwizardapp.models.MyModel;
import ca.gaudreault.mydropwizardapp.services.MyService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponses;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.SwaggerDefinition;
import io.swagger.annotations.Tag;

@SwaggerDefinition(tags = { @Tag(name = "MyResource", description = "My Example Resource") })
@Api(value = "MyResource")
@Timed
@Path("/my-resource")
public class MyResource {
	private MyService myService;

	@Inject
	public MyResource(final MyService myService) {
		this.myService = myService;
	}

	@GET
	@Path("/runTest")
	@ApiOperation(value = "Run test and returns myModel", notes = "Run test and returns myModel", response = MyModel.class, tags = {
			"MyResource" })
	@ApiResponses(value = {
			@ApiResponse(code = HttpStatus.OK_200, message = "Successfully Tested", response = MyModel.class) })
	@Timed
	@Produces(MediaType.APPLICATION_JSON)
	public MyModel runTest() {
		return this.myService.runTest();
	}
}

Run our Project

If we run our project and we hit the following rest end point http://localhost:8080/my-resource/runTest we will get back the below. This shows us our rest end point is working as expected still.

{"value":123123}

Checking Swagger-UI

Now that we have started our project we can now check to see what was generated. Go to Swagger-UI. You will see the below. You are now well on your way in using Swagger.

Model Expanded

Resource Expanded

 

 

 

 

References

The following helped me build this tutorial.

  • https://robferguson.org/blog/2016/12/11/resteasy-embedded-jetty-fat-jars-swagger-and-swagger-ui/
  • https://itazuramono.com/2015/12/07/automatic-swagger-documentation-for-dropwizard-using-maven/
  • http://mikelynchgames.com/software-development/adding-swagger-to-your-dropwizard-application/

Java: JUnit 4 /w PowerMock

In this tutorial I will show you how to use JUnit 4 with PowerMock for mocking Static classes into your application. If you have not already done so follow JUnit 4 tutorial.

POM.xml

<dependency>
	<groupId>org.mockito</groupId>
	<artifactId>mockito-core</artifactId>
	<version>2.18.3</version>
	<scope>test</scope>
</dependency>
<dependency>
	<groupId>org.assertj</groupId>
	<artifactId>assertj-core</artifactId>
	<version>3.10.0</version>
	<scope>test</scope>
</dependency>

Static Class

We will create this class to use for our static testing.

public final class MyStaticTest {
	public static String getString() {
		return "test";
	}
}

Imports

import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.when;

import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.MockitoAnnotations;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;

Test Class

Now we can run our test with PowerMock and mock our static classes methods as you can see from the below.

@RunWith(PowerMockRunner.class)
@PrepareForTest({ MyStaticTest.class })
public class AppTestStatic {
	@Before
	public void setup() {
		MockitoAnnotations.initMocks(this);
		PowerMockito.mockStatic(MyStaticTest.class);
	}

	@Test
	public void myTest() {
		when(MyStaticTest.getString()).thenReturn("myTest");

		final String returnString = MyStaticTest.getString();

		assertThat(returnString).isEqualTo("myTest");
	}
}

 

Java: JUnit 4 Example

In this tutorial I will show you how to use JUnit 4 into your application. The next tutorial I will use this class with PowerMock.

Build Path

Ensure you have added JUnit 4 to your build path and that you are using Java 8.

Imports

import org.junit.Before;
import org.junit.Test;

Test Class

package ca.gaudreault.mytestapp;

import org.junit.Before;
import org.junit.Test;

public class AppTest {
	@Before
	public void setup() {
	}

	@Test
	public void myTest() {
	}
}

This was a very basic example of writing a unit test with JUnit 4. In future example I will build on this with PowerMock.

Dropwizard: Resource

This entry is part 4 of 5 in the series Dropwizard

In this tutorial I will give a basic example of a resource endpoint. If you haven’t configured Guice yet please do so before continuing.

So basically now that you have Guice configured and working you can now create an api endpoint. For this we will just use a GET but you can also do POST, PUT, DELETE.

package ca.gaudreault.mydropwizardapp.resources;

import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;

import com.codahale.metrics.annotation.Timed;
import com.google.inject.Inject;

import ca.gaudraeult.mydropwizardapp.services.MyService;
import ca.gaudreault.mydropwizardapp.models.MyModel;

@Timed
@Path("/my-resource")
public class MyResource {
	MyService myService;

	@Inject
	public MyResource(final MyService myService) {
		this.myService = myService;
	}

	@GET
	@Timed
	@Produces(MediaType.APPLICATION_JSON)
	public MyModel runTest() {
		return this.myService.runTest();
	}
}

Once you run your application you can view the endpoint by going to http://localhost:8080/my-resource.

The output will be as follows.

{"value":123123}

If you noticed we added the “@Timed” annotation. You can now go to http://localhost:8081/metrics?pretty=true to view the metrics on our “runTest” method. The output will look like the below.

{
	"ca.gaudreault.mydropwizardapp.resources.MyResource.runTest": {
		"count": 0,
		"max": 0.0,
		"mean": 0.0,
		"min": 0.0,
		"p50": 0.0,
		"p75": 0.0,
		"p95": 0.0,
		"p98": 0.0,
		"p99": 0.0,
		"p999": 0.0,
		"stddev": 0.0,
		"m15_rate": 0.0,
		"m1_rate": 0.0,
		"m5_rate": 0.0,
		"mean_rate": 0.0,
		"duration_units": "seconds",
		"rate_units": "calls/second"
}

Dropwizard: Command

This entry is part 3 of 5 in the series Dropwizard

In this tutorial I will give a brief demonstration on how to write a custom dropwizard command.

MyCommand

So below you will see the command class and how we are creating and registering a command line param called “test” which is a Boolean.

package ca.gaudreault.mydropwizardapp;

import io.dropwizard.cli.Command;
import io.dropwizard.setup.Bootstrap;
import net.sourceforge.argparse4j.inf.Namespace;
import net.sourceforge.argparse4j.inf.Subparser;

public class MyCommand extends Command {

	protected MyCommand() {
		super("myCommand", "This is a sample command");
	}

	@Override
	public void configure(Subparser subparser) {
	    subparser.addArgument("-test").required(true).type(Boolean.class).dest("test").help("Does something really awesome");
	}

	@Override
	public void run(Bootstrap<?> bootstrap, Namespace namespace) throws Exception {
		System.out.println("MyCommand " + namespace.getBoolean("test"));
	}
}

MyDropwizardAppApplication

If you remember from part 1 of this series you created the based Dropwizard app. So you should have a class called “MyDropwizardAppApplication”. Open that now and modify the “initialize” like the below. Note that we are only adding the “addCommand”.

@Override
public void initialize(final Bootstrap bootstrap) {
	bootstrap.addCommand(new MyCommand());
}

Executing Command

Basically now we can just call our JAR file and pass the following arguments to it.

myCommand -test false

You will see once it runs that following

MyCommand false

Dropwizard: Guice Bundle

This entry is part 2 of 5 in the series Dropwizard

In this tutorial I will show you how to add Guice to your Dropwizard app. This will be a very basic implementation. Some things you should note is that I didn’t put in any docstrings. You should always do that!

Now there are a few Dropwizard Guice integrations available but the most active is the one I will show you today called “dropwizard-guicey“.

POM.xml

<dependency>
	<groupId>ru.vyarus</groupId>
	<artifactId>dropwizard-guicey</artifactId>
	<version>4.1.0</version>
</dependency>

Model

Now we create a model to use with our service

package ca.gaudreault.mydropwizardapp.models;

import java.io.Serializable;

import javax.validation.constraints.NotNull;

public class MyModel implements Serializable {
	private static final long serialVersionUID = 1L;
	private Integer value;
	
	public Integer getValue() {
		return value;
	}
	public void setValue(Integer value) {
		this.value = value;
	}
}

Service

Here you will create your service interface and class so that you can bind it in the guice module.

Interface

package ca.gaudraeult.mydropwizardapp.services;

import ca.gaudreault.mydropwizardapp.models.MyModel;

public interface MyService {
	MyModel runTest();
}

Implementation

package ca.gaudraeult.mydropwizardapp.services;

import ca.gaudreault.mydropwizardapp.models.MyModel;

public class MyServiceImpl implements MyService {
	
	public MyServiceImpl() { }

	@Override
	public MyModel runTest() {
		final MyModel myModel = new MyModel();
		myModel.setValue(123123);
		return myModel;
	}
}

ServerModule

Now when we create our module class you can bind the interface to the implementation. Note that if your implementation does not implement the interface this will not work.

package ca.gaudreault.mydropwizardapp;

import com.google.inject.AbstractModule;

import ca.gaudraeult.mydropwizardapp.services.MyService;
import ca.gaudraeult.mydropwizardapp.services.MyServiceImpl;

public class ServerModule extends AbstractModule  {

	@Override
	protected void configure() {
		bind(MyService.class).to(MyServiceImpl.class);
	}
}

Dropwizard Application

If you remember from part 1 of this series you created the based Dropwizard app. So you should have a class called “MyDropwizardAppApplication”. Open that now and modify the “initialize” like the below. Baseically here we are registering our ServerModule class to Dropwizard.

@Override
public void initialize(final Bootstrap bootstrap) {
	bootstrap.addBundle(GuiceBundle.builder()
		.enableAutoConfig(this.getClass().getPackage().getName())
		.modules(new ServerModule())
		.build());
}

And that is it you have configured a very basic Dropwizard Guice configuration.

Eclipse/Maven: Jacoco Integration

This tutorial will guide you through configuring Jacoco in your Maven application and install the Eclipse plugin.

First Open Eclipse MarketPlace then search for “EclEmma”.

Next you need to click Install and accept the license agreement reading it first. Then it will complete and need to restart Eclipse.

Once Eclipse opens again you can edit “Code Coverage” from “Window/Preferences”.

You can now run “Code Coverage” through Eclipse by right clicking your project. As you can see below I have not written any unit tests yet :(.

 

Pom.xml

Build

<build>
	<plugins>
		<plugin>
			<groupId>org.jacoco</groupId>
			<artifactId>jacoco-maven-plugin</artifactId>
			<version>0.8.1</version>
			<configuration>
				<!-- Path to the output file for execution data. (Used in initialize 
					phase) -->
				<destFile>${project.build.directory}/target/coverage-reports/jacoco-unit.exec</destFile>
				<!-- File with execution data. (Used in package phase) -->
				<dataFile>${project.build.directory}/target/coverage-reports/jacoco-unit.exec</dataFile>
				<excludes>
				</excludes>
			</configuration>
			<executions>
				<execution>
					<id>jacoco-initialization</id>
					<phase>initialize</phase>
					<goals>
						<!-- https://www.eclemma.org/jacoco/trunk/doc/prepare-agent-mojo.html -->
						<goal>prepare-agent</goal>
					</goals>
				</execution>
				<execution>
					<id>jacoco-site</id>
					<phase>package</phase>
					<goals>
						<!-- https://www.eclemma.org/jacoco/trunk/doc/report-mojo.html -->
						<goal>report</goal>
					</goals>
				</execution>
			</executions>
		</plugin>
	</plugins>
</build>

 

 

 

Eclipse/Maven: FindBugs/SpotBugs Integration

This tutorial will guide you through configuration FindBugs/SpotBugs in your Maven application and install the Eclipse plugin.

First Open Eclipse MarketPlace then search for “SpotBugs”.

Next you need to click Install and accept the license agreement reading it first. Then it will complete and need to restart Eclipse.

Once Eclipse opens again you right click the project(s) you want to activate FindBugs/SpotBugs for and click “Properties”. Click “SpotBugs” and then make the following changes.

Now you can run SpotBugs by right clicking your project and selecting SpotBugs then “Find Bugs”.

Pom.xml

Reporting

<reporting>
	<plugins>
		<plugin>
			<groupId>com.github.spotbugs</groupId>
			<artifactId>spotbugs-maven-plugin</artifactId>
			<version>3.1.3</version>
		</plugin>
	</plugins>
</reporting>

Build

<build>
	<plugins>
		<plugin>
			<groupId>com.github.spotbugs</groupId>
			<artifactId>spotbugs-maven-plugin</artifactId>
			<version>3.1.3</version>
			<dependencies>
				<dependency>
					<groupId>com.github.spotbugs</groupId>
					<artifactId>spotbugs</artifactId>
					<version>3.1.3</version>
				</dependency>
			</dependencies>
			<configuration>
				<effort>Max</effort>
				<threshold>Low</threshold>
				<failOnError>true</failOnError>
				<plugins>
					<plugin>
						<groupId>com.h3xstream.findsecbugs</groupId>
						<artifactId>findsecbugs-plugin</artifactId>
						<version>LATEST</version>
					</plugin>
				</plugins>
			</configuration>
		</plugin>
	</plugins>
</build>

Maven Commands

mvn spotbugs:spotbugs

#Generates the report site
mvn site

Eclipse/Maven: PMD Integration

This tutorial will guide you through configuring PMD in your Maven application and install the Eclipse plugin.

First Open Eclipse MarketPlace then search for “PMD”.

Next you need to click Install and accept the license agreement reading it first. Then it will complete and need to restart Eclipse.

Once Eclipse opens again you right click the project(s) you want to activate PMD for and click “Properties”. Click “PMD” and then click “Enable PMD for this project”. You will need to create a rule set. To do that go here.

Pom.xml

Reporting

You will need both reporting plugins in your project. “maven-jxr-plugin” fixes an issue with not finding the xRef.

<reporting>
	<plugins>
		<plugin>
			<groupId>org.apache.maven.plugins</groupId>
			<artifactId>maven-pmd-plugin</artifactId>
			<version>3.9.0</version>
		</plugin>
		<plugin>
			<groupId>org.apache.maven.plugins</groupId>
			<artifactId>maven-jxr-plugin</artifactId>
			<version>2.5</version>
		</plugin>
	</plugins>
</reporting>

Build

You will need to configure the following to use with “mvn pmd:???” commands.

<build>
	<plugins>
		<plugin>
			<groupId>org.apache.maven.plugins</groupId>
			<artifactId>maven-pmd-plugin</artifactId>
			<version>3.9.0</version>
			<configuration>
				<failOnViolation>true</failOnViolation>
				<verbose>true</verbose>
				<targetJdk>1.8</targetJdk>
				<includeTests>false</includeTests>
				<excludes>
				</excludes>
				<excludeRoots>
					<excludeRoot>target/generated-sources/stubs</excludeRoot>
				</excludeRoots>
			</configuration>
			<executions>
				<execution>
					<phase>test</phase>
					<goals>
						<goal>pmd</goal>
						<goal>cpd</goal>
						<goal>cpd-check</goal>
						<goal>check</goal>
					</goals>
				</execution>
			</executions>
		</plugin>
	</plugins>
</build>

Maven Commands

mvn pmd:check
mvn pmd:pmd

#cdp checks for copy paste issues

mvn pmd:cdp-check
mvn pmd:cdp

#Generates the report site
mvn site

Eclipse/Maven: CheckStyle Integration

This tutorial will guide you through configuration CheckStyle in your Maven application and install the Eclipse plugin.

First Open Eclipse MarketPlace then search for “Checkstyle”.

Next you need to click Install and accept the license agreement reading it first. Then it will complete and need to restart Eclipse.

Once Eclipse opens again you right click the project(s) you want to activate CheckStyle for and activate it. There are also properties you can configure through Eclipse’s preferences. I suggest you go there and configure it. You can also customize your checkstyle or make your own. Up to you.

Pom.xml

Build

When you run “mvn checkstyle:check” if will then run and will fail the build if you have any issues.

<build>
	<plugins>
		<plugin>
			<groupId>org.apache.maven.plugins</groupId>
			<artifactId>maven-checkstyle-plugin</artifactId>
			<version>3.0.0</version>
			<executions>
				<execution>
					<id>validate</id>
					<phase>validate</phase>
					<configuration>
						<encoding>UTF-8</encoding>
						<consoleOutput>true</consoleOutput>
						<failsOnError>true</failsOnError>
						<linkXRef>false</linkXRef>
					</configuration>
					<goals>
						<goal>check</goal>
					</goals>
				</execution>
			</executions>
		</plugin>
	</plugins>
</build>

Reporting

You can generate a HTML report with the following by running “mvn checkstyle:checkstyle”.

<reporting>
	<plugins>
		<plugin>
			<groupId>org.apache.maven.plugins</groupId>
			<artifactId>maven-checkstyle-plugin</artifactId>
			<version>3.0.0</version>
			<reportSets>
				<reportSet>
					<reports>
						<report>checkstyle</report>
					</reports>
				</reportSet>
			</reportSets>
		</plugin>
	</plugins>
</reporting>

Java: Command Line Arguments Parsing

This tutorial will guide you through how to do command line argument parsing easily. For this example we will use Commons-Cli package.

pom.xml

<properties>
	<commonscli.version>1.4</commonscli.version>
</properties>

<dependencies>
	<!-- https://mvnrepository.com/artifact/commons-cli/commons-cli -->
	<dependency>
		<groupId>commons-cli</groupId>
		<artifactId>commons-cli</artifactId>
		<version>${commonscli.version}</version>
	</dependency>
</dependencies>

Imports

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;

Main

public static void main(String[] args) {
	final Options options = new Options();
	Option startOption = new Option("s", "start", true, "Start the process.");
	startOption.setRequired(true);
	options.addOption(startOption);

	final HelpFormatter help = new HelpFormatter();
	final CommandLineParser parser = new DefaultParser();
	CommandLine cmd = null;

	try {
		cmd = parser.parse(options, args);
	} catch (final ParseException e) {
		help.printHelp("java -jar myApp.jar", "My Header", options, "-s must be specified");
		return;
	}

	final boolean doStart = Boolean.valueOf(cmd.getOptionValue("s"));

	if (doStart) {
		//Do my work here
	}
}

Avro & Java: Record Parsing

This tutorial will guide you through how to convert json to avro and then back to json. I suggest you first read through the documentation on Avro to familiarize yourself with it. This tutorial assumes you have a maven project already setup and a resources folder.

POM:

Add Avro Dependency

 

 

 

 

Add Jackson Dependency

Avro Schema File:

Next you need to create the avro schema file in your resources folder. Name the file “schema.avsc”. The extension avsc is the Avro schema extension.

{
    "namespace": "test.avro",
    "type": "record",
    "name": "MY_NAME",
    "fields": [
        {"name": "name_1", "type": "int"},
        {"name": "name_2", "type": {"type": "array", "items": "float"}},
        {"name": "name_3", "type": "float"}
    ]
}

Json Record to Validate:

Next you need to create a json file that conforms to your schema you just made. Name the file “record.json” and put it in your resources folder. The contents can be whatever you want as long as it conforms to your schema above.

{ "name_1": 234, "name_2": [23.34,654.98], "name_3": 234.7}

It’s Avro Time:

Imports:

import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;

import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;

Conversion to Avro and Back:

private void run() throws IOException {
	//Get the schema and json record from resources
	final ClassLoader loader = getClass().getClassLoader();
	final File schemaFile = new File(loader.getResource("schema.avsc").getFile());
	final InputStream record = loader.getResourceAsStream("record.json");
	
	//Create avro schema
	final Schema schema = new Schema.Parser().parse(schemaFile);

	//Encode to avro
	final byte[] avro = encodeToAvro(schema, record);

	//Decode back to json
	final JsonNode node = decodeToJson(schema, avro);

	System.out.println(node);
	System.out.println("done");
}

/**
 * Encode json to avro
 * 
 * @param schema the schema the avro pertains to
 * @param record the data to convert to avro
 * @return the avro bytes
 * @throws IOException if decoding fails
 */
private byte[] encodeToAvro(Schema schema, InputStream record) throws IOException {
	final DatumReader<GenericData.Record> reader = new GenericDatumReader<>(schema);
	final DataInputStream din = new DataInputStream(record);
	final Decoder decoder = new DecoderFactory().jsonDecoder(schema, din);
	final Object datum = reader.read(null, decoder);
	final GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema);
	final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
	final Encoder encoder = new EncoderFactory().binaryEncoder(outputStream, null);
	writer.write(datum, encoder);
	encoder.flush();

	return outputStream.toByteArray();
}

/**
 * Decode avro back to json.
 * 
 * @param schema the schema the avro pertains to
 * @param avro the avro bytes
 * @return the json
 * @throws IOException if jackson fails
 */
private JsonNode decodeToJson(Schema schema, byte[] avro) throws IOException {
	final ObjectMapper mapper = new ObjectMapper();
	final DatumReader<GenericData.Record> reader = new GenericDatumReader<>(schema);
	final Decoder decoder = new DecoderFactory().binaryDecoder(avro, null);
	final JsonNode node = mapper.readTree(reader.read(null, decoder).toString());

	return node;
}