Use Universql with Java applications via JDBC.
Connect to Universql from Java applications using Snowflake's JDBC driver, providing seamless integration with Java-based applications and frameworks.
Add the dependency to your Maven pom.xml
:
<dependency>
<groupId>net.snowflake</groupId>
<artifactId>snowflake-jdbc</artifactId>
<version>3.13.33</version>
</dependency>
Or for Gradle (build.gradle
):
implementation 'net.snowflake:snowflake-jdbc:3.13.33'
Basic JDBC usage example:
import java.sql.*;
public class SnowflakeExample {
public static void main(String[] args) {
String url = "jdbc:snowflake://{universql_server}"; // Universql server
String user = "your_username";
String password = "your_password";
try (Connection conn = DriverManager.getConnection(url, user, password)) {
// Set session parameters
try (Statement stmt = conn.createStatement()) {
stmt.execute("USE WAREHOUSE compute_wh");
stmt.execute("USE DATABASE mydb");
stmt.execute("USE SCHEMA myschema");
}
// Execute a query
String sql = """
SELECT
DATE_TRUNC('month', created_at) as month,
COUNT(*) as user_count,
COUNT(DISTINCT country) as countries
FROM users
GROUP BY 1
ORDER BY 1 DESC
LIMIT 12
""";
try (PreparedStatement pstmt = conn.prepareStatement(sql);
ResultSet rs = pstmt.executeQuery()) {
while (rs.next()) {
System.out.printf("Month: %s, Users: %d, Countries: %d%n",
rs.getDate("month"),
rs.getLong("user_count"),
rs.getLong("countries")
);
}
}
} catch (SQLException e) {
e.printStackTrace();
}
}
}
Implement connection pooling using HikariCP:
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
public class ConnectionPool {
private static HikariDataSource dataSource;
public static void initPool() {
HikariConfig config = new HikariConfig();
config.setJdbcUrl("jdbc:snowflake://{universql_server}"); // Universql server
config.setUsername("username");
config.setPassword("password");
// Snowflake-specific settings
config.addDataSourceProperty("warehouse", "compute_wh");
config.addDataSourceProperty("db", "mydb");
config.addDataSourceProperty("schema", "myschema");
// Pool configuration
config.setMaximumPoolSize(10);
config.setMinimumIdle(5);
config.setIdleTimeout(300000);
config.setConnectionTimeout(20000);
dataSource = new HikariDataSource(config);
}
public static Connection getConnection() throws SQLException {
return dataSource.getConnection();
}
}
Configure Snowflake with Spring Boot:
@Configuration
public class SnowflakeConfig {
@Bean
@ConfigurationProperties("spring.datasource.hikari")
public HikariConfig hikariConfig() {
return new HikariConfig();
}
@Bean
public DataSource dataSource() {
return new HikariDataSource(hikariConfig());
}
@Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
}
Application properties (application.yml
):
spring:
datasource:
hikari:
jdbc-url: jdbc:snowflake://{universql_server} # Universql server
username: ${SNOWFLAKE_USER}
password: ${SNOWFLAKE_PASSWORD}
driver-class-name: net.snowflake.client.jdbc.SnowflakeDriver
maximum-pool-size: 10
data-source-properties:
warehouse: compute_wh
db: mydb
schema: myschema
Using with Spring Repository:
@Repository
public class UserRepository {
private final JdbcTemplate jdbcTemplate;
public UserRepository(JdbcTemplate jdbcTemplate) {
this.jdbcTemplate = jdbcTemplate;
}
public List<User> findActiveUsers() {
return jdbcTemplate.query(
"""
SELECT id, name, email, created_at
FROM users
WHERE active = true
ORDER BY created_at DESC
""",
(rs, rowNum) -> new User(
rs.getLong("id"),
rs.getString("name"),
rs.getString("email"),
rs.getTimestamp("created_at").toLocalDateTime()
)
);
}
public void createUser(User user) {
jdbcTemplate.update(
"""
INSERT INTO users (name, email, active)
VALUES (?, ?, ?)
""",
user.getName(),
user.getEmail(),
true
);
}
}
Perform batch operations efficiently:
String sql = "INSERT INTO products (name, price, category) VALUES (?, ?, ?)";
try (Connection conn = dataSource.getConnection();
PreparedStatement pstmt = conn.prepareStatement(sql)) {
conn.setAutoCommit(false);
for (Product product : products) {
pstmt.setString(1, product.getName());
pstmt.setBigDecimal(2, product.getPrice());
pstmt.setString(3, product.getCategory());
pstmt.addBatch();
}
int[] results = pstmt.executeBatch();
conn.commit();
}
Handle Snowflake-specific errors:
import net.snowflake.client.jdbc.SnowflakeSQLException;
try {
// Execute query
stmt.executeQuery("SELECT * FROM non_existent_table");
} catch (SnowflakeSQLException e) {
System.err.println("Snowflake Error Code: " + e.getErrorCode());
System.err.println("SQL State: " + e.getSQLState());
System.err.println("Message: " + e.getMessage());
// Handle specific error codes
switch (e.getErrorCode()) {
case 2003:
System.err.println("Table not found");
break;
case 90100:
System.err.println("Invalid warehouse");
break;
default:
System.err.println("Unknown error");
}
} catch (SQLException e) {
System.err.println("Generic SQL error: " + e.getMessage());
}
Use Connection Pooling
String url = "jdbc:snowflake://{universql_server}"; // Universql server
config.setJdbcUrl(url);
config.setMaximumPoolSize(10);
Prepare Statements
String sql = "SELECT * FROM users WHERE status = ?";
try (PreparedStatement pstmt = conn.prepareStatement(sql)) {
pstmt.setString(1, "active");
ResultSet rs = pstmt.executeQuery();
}
Handle Resources Properly
try (Connection conn = dataSource.getConnection();
PreparedStatement pstmt = conn.prepareStatement(sql);
ResultSet rs = pstmt.executeQuery()) {
// Process results
}
Use Transactions When Needed
conn.setAutoCommit(false);
try {
// Execute statements
conn.commit();
} catch (SQLException e) {
conn.rollback();
throw e;
}