From ff4d99d0636fa99869effdab5cf0dfe5b8bf9235 Mon Sep 17 00:00:00 2001 From: Julian Reschke Date: Thu, 2 Oct 2025 14:55:27 +0100 Subject: [PATCH 1/2] OAK-11967: RDBDocumentStore: code cleanup --- .../plugins/document/rdb/RDBBlobStore.java | 73 +++---- .../plugins/document/rdb/RDBBlobStoreDB.java | 7 +- .../rdb/RDBCommonVendorSpecificCode.java | 12 +- .../document/rdb/RDBConnectionHandler.java | 19 +- .../document/rdb/RDBDataSourceFactory.java | 17 +- .../document/rdb/RDBDocumentSerializer.java | 12 +- .../document/rdb/RDBDocumentStore.java | 203 ++++++++---------- .../document/rdb/RDBDocumentStoreDB.java | 30 +-- .../document/rdb/RDBDocumentStoreJDBC.java | 133 +++++------- .../oak/plugins/document/rdb/RDBExport.java | 30 +-- .../oak/plugins/document/rdb/RDBHelper.java | 2 +- .../plugins/document/rdb/RDBJDBCTools.java | 27 ++- .../plugins/document/rdb/RDBJSONSupport.java | 36 ++-- .../document/rdb/RDBMissingLastRevSeeker.java | 8 +- .../oak/plugins/document/rdb/RDBRow.java | 15 +- .../document/rdb/RDBVersionGCSupport.java | 112 +++++----- .../plugins/document/rdb/package-info.java | 6 +- .../rdb/RDBDocumentStoreJDBCTest.java | 5 +- 18 files changed, 319 insertions(+), 428 deletions(-) diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStore.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStore.java index 7c19c44edae..33d16a02190 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStore.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStore.java @@ -93,9 +93,9 @@ public RDBBlobStore(@NotNull DataSource ds) { @Override public void close() { - String dropped = ""; + StringBuilder dropped = new StringBuilder(); if (!this.tablesToBeDropped.isEmpty()) { - LOG.debug("attempting to drop: " + this.tablesToBeDropped); + LOG.debug("attempting to drop: {}", this.tablesToBeDropped); for (String tname : this.tablesToBeDropped) { Connection con = null; try { @@ -106,28 +106,26 @@ public void close() { stmt.execute("drop table " + tname); stmt.close(); con.commit(); - dropped += tname + " "; + dropped.append(tname).append(" "); } catch (SQLException ex) { - LOG.debug("attempting to drop: " + tname, ex); + LOG.debug("attempting to drop: {}", tname, ex); } finally { closeStatement(stmt); } } catch (SQLException ex) { - LOG.debug("attempting to drop: " + tname, ex); + LOG.debug("attempting to drop: {}", tname, ex); } finally { this.ch.closeConnection(con); } } - dropped = dropped.trim(); + dropped = new StringBuilder(dropped.toString().trim()); } this.ch.close(); - LOG.info("RDBBlobStore (" + getModuleVersion() + ") closed" - + (dropped.isEmpty() ? "" : " (tables dropped: " + dropped + ")")); + LOG.info("RDBBlobStore ({}) closed{}", getModuleVersion(), (dropped.length() == 0) ? "" : " (tables dropped: " + dropped + ")"); } - @SuppressWarnings("deprecation") @Override protected void finalize() throws Throwable { if (!this.ch.isClosed() && this.callStack != null) { @@ -160,12 +158,12 @@ protected void finalize() throws Throwable { // from options protected String tnData; protected String tnMeta; - private Set tablesToBeDropped = new HashSet(); + private final Set tablesToBeDropped = new HashSet<>(); private boolean readOnly; private void initialize(DataSource ds, DocumentNodeStoreBuilder builder, RDBOptions options) throws Exception { - this.readOnly = builder == null ? false : builder.getReadOnlyMode(); + this.readOnly = builder != null && builder.getReadOnlyMode(); this.tnData = RDBJDBCTools.createTableName(options.getTablePrefix(), "DATASTORE_DATA"); this.tnMeta = RDBJDBCTools.createTableName(options.getTablePrefix(), "DATASTORE_META"); @@ -176,10 +174,7 @@ private void initialize(DataSource ds, DocumentNodeStoreBuilder builder, RDBO int isolation = con.getTransactionIsolation(); String isolationDiags = RDBJDBCTools.isolationLevelToString(isolation); if (isolation != Connection.TRANSACTION_READ_COMMITTED) { - LOG.info("Detected transaction isolation level " + isolationDiags + " is " - + (isolation < Connection.TRANSACTION_READ_COMMITTED ? "lower" : "higher") + " than expected " - + RDBJDBCTools.isolationLevelToString(Connection.TRANSACTION_READ_COMMITTED) - + " - check datasource configuration"); + LOG.info("Detected transaction isolation level {} is {} than expected {} - check datasource configuration", isolationDiags, isolation < Connection.TRANSACTION_READ_COMMITTED ? "lower" : "higher", RDBJDBCTools.isolationLevelToString(Connection.TRANSACTION_READ_COMMITTED)); } DatabaseMetaData md = con.getMetaData(); @@ -195,8 +190,8 @@ private void initialize(DataSource ds, DocumentNodeStoreBuilder builder, RDBO md.getDriverMinorVersion()).replaceAll("[\r\n\t]", " ").trim(); String dbUrl = md.getURL(); - List tablesCreated = new ArrayList(); - List tablesPresent = new ArrayList(); + List tablesCreated = new ArrayList<>(); + List tablesPresent = new ArrayList<>(); Map tableInfo = new HashMap<>(); Statement createStatement = null; @@ -223,7 +218,7 @@ private void initialize(DataSource ds, DocumentNodeStoreBuilder builder, RDBO // table does not appear to exist con.rollback(); - LOG.debug("trying to read from '" + tableName + "'", ex); + LOG.debug("trying to read from '{}'", tableName, ex); if (this.readOnly) { throw new SQLException("Would like to create table '" + tableName + "', but RDBBlobStore has been initialized in 'readonly' mode"); @@ -231,13 +226,13 @@ private void initialize(DataSource ds, DocumentNodeStoreBuilder builder, RDBO createStatement = con.createStatement(); + String ct; if (this.tnMeta.equals(tableName)) { - String ct = db.getMetaTableCreationStatement(tableName); - createStatement.execute(ct); + ct = db.getMetaTableCreationStatement(tableName); } else { - String ct = db.getDataTableCreationStatement(tableName); - createStatement.execute(ct); + ct = db.getDataTableCreationStatement(tableName); } + createStatement.execute(ct); createStatement.close(); createStatement = null; @@ -265,15 +260,12 @@ private void initialize(DataSource ds, DocumentNodeStoreBuilder builder, RDBO Map diag = db.getAdditionalDiagnostics(this.ch, this.tnData); - LOG.info("RDBBlobStore (" + getModuleVersion() + ") instantiated for database " + dbDesc + ", using driver: " - + driverDesc + ", connecting to: " + dbUrl + (diag.isEmpty() ? "" : (", properties: " + diag.toString())) - + ", transaction isolation level: " + isolationDiags + ", " + tableInfo); + LOG.info("RDBBlobStore ({}) instantiated for database {}, using driver: {}, connecting to: {}{}, transaction isolation level: {}, {}", getModuleVersion(), dbDesc, driverDesc, dbUrl, diag.isEmpty() ? "" : (", properties: " + diag), isolationDiags, tableInfo); if (!tablesPresent.isEmpty()) { - LOG.info("Tables present upon startup: " + tablesPresent); + LOG.info("Tables present upon startup: {}", tablesPresent); } if (!tablesCreated.isEmpty()) { - LOG.info("Tables created upon startup: " + tablesCreated - + (options.isDropTablesOnClose() ? " (will be dropped on exit)" : "")); + LOG.info("Tables created upon startup: {}{}", tablesCreated, options.isDropTablesOnClose() ? " (will be dropped on exit)" : ""); } String moreDiags = db.evaluateDiagnostics(diag); @@ -341,7 +333,7 @@ private void storeBlockInDatabase(byte[] digest, int level, byte[] data) throws } } catch (SQLException ex) { this.ch.rollbackConnection(con); - // the insert failed although it should have succeeded; see whether the blob already exists + // the insert failed, although it should have succeeded; see whether the blob already exists prep = con.prepareStatement("select DATA from " + this.tnData + " where ID = ?"); ResultSet rs = null; byte[] dbdata = null; @@ -370,7 +362,7 @@ else if (!Arrays.equals(data, dbdata)) { } else { // just recover - LOG.info("recovered from DB inconsistency for id " + id + ": meta record was missing (impact will be minor performance degradation)"); + LOG.info("recovered from DB inconsistency for id {}: meta record was missing (impact will be minor performance degradation)", id); } } try { @@ -389,7 +381,7 @@ else if (!Arrays.equals(data, dbdata)) { } } catch (SQLException e) { // already exists - ok - LOG.debug("inserting meta record for id " + id, e); + LOG.debug("inserting meta record for id {}", id, e); } } } finally { @@ -510,7 +502,7 @@ private int sweepFromDatabase() throws SQLException { prepCheck = con.prepareStatement("select ID from " + this.tnMeta + " where LASTMOD < ?"); prepCheck.setLong(1, minLastModified); rs = prepCheck.executeQuery(); - ArrayList ids = new ArrayList(); + ArrayList ids = new ArrayList<>(); while (rs.next()) { ids.add(rs.getString(1)); } @@ -569,8 +561,7 @@ public long countDeleteChunks(List chunkIds, long maxLastModifiedTime) t metaStatement.append(" and LASTMOD <= ?"); // delete if there is NO entry where the last modified of // the meta is YOUNGER than x - dataStatement.append(" and not exists(select * from " + this.tnMeta + " where " + this.tnMeta + ".ID = " - + this.tnData + ".ID and LASTMOD > ?)"); + dataStatement.append(" and not exists(select * from ").append(this.tnMeta).append(" where ").append(this.tnMeta).append(".ID = ").append(this.tnData).append(".ID and LASTMOD > ?)"); } prepMeta = con.prepareStatement(metaStatement.toString()); @@ -619,12 +610,12 @@ public Iterator getAllChunkIds(long maxLastModifiedTime) throws Exceptio */ private static class ChunkIdIterator extends AbstractIterator { - private long maxLastModifiedTime; - private RDBConnectionHandler ch; - private static int BATCHSIZE = 1024 * 64; - private List results = new LinkedList(); + private final long maxLastModifiedTime; + private final RDBConnectionHandler ch; + private static final int BATCHSIZE = 1024 * 64; + private final List results = new LinkedList<>(); private String lastId = null; - private String metaTable; + private final String metaTable; public ChunkIdIterator(RDBConnectionHandler ch, long maxLastModifiedTime, String metaTable) { this.maxLastModifiedTime = maxLastModifiedTime; @@ -647,8 +638,8 @@ protected String computeNext() { } private boolean refill() { - StringBuffer query = new StringBuffer(); - query.append("select ID from " + metaTable); + StringBuilder query = new StringBuilder(); + query.append("select ID from ").append(metaTable); if (maxLastModifiedTime > 0) { query.append(" where LASTMOD <= ?"); if (lastId != null) { diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStoreDB.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStoreDB.java index 930e1ed7edf..096954d3fa7 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStoreDB.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStoreDB.java @@ -152,12 +152,11 @@ public String getMetaTableCreationStatement(String tableName) { protected RDBCommonVendorSpecificCode vendorCode = RDBCommonVendorSpecificCode.DEFAULT; - private RDBBlobStoreDB(String description) { + RDBBlobStoreDB(String description) { this.description = description; - this.vendorCode = RDBCommonVendorSpecificCode.DEFAULT; } - private RDBBlobStoreDB(String description, RDBCommonVendorSpecificCode vendorCode) { + RDBBlobStoreDB(String description, RDBCommonVendorSpecificCode vendorCode) { this.description = description; this.vendorCode = vendorCode; } @@ -187,7 +186,7 @@ public static RDBBlobStoreDB getValue(String desc) { } } - LOG.error("DB type " + desc + " unknown, trying default settings"); + LOG.error("DB type {} unknown, trying default settings", desc); DEFAULT.description = desc + " - using default settings"; return DEFAULT; } diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBCommonVendorSpecificCode.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBCommonVendorSpecificCode.java index cafe169cb20..b3b4573f2ef 100755 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBCommonVendorSpecificCode.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBCommonVendorSpecificCode.java @@ -39,7 +39,7 @@ public enum RDBCommonVendorSpecificCode { DB2() { @Override - public Map getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) { + public @NotNull Map getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) { Connection con = null; PreparedStatement stmt = null; ResultSet rs = null; @@ -85,7 +85,7 @@ public Map getAdditionalDiagnostics(RDBConnectionHandler ch, Str MSSQL() { @Override - public Map getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) { + public @NotNull Map getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) { Connection con = null; PreparedStatement stmt = null; ResultSet rs = null; @@ -116,7 +116,7 @@ public Map getAdditionalDiagnostics(RDBConnectionHandler ch, Str MYSQL() { @Override - public Map getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) { + public @NotNull Map getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) { Connection con = null; PreparedStatement stmt = null; ResultSet rs = null; @@ -153,7 +153,7 @@ public Map getAdditionalDiagnostics(RDBConnectionHandler ch, Str ORACLE() { @Override - public Map getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) { + public @NotNull Map getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) { Connection con = null; Statement stmt = null; ResultSet rs = null; @@ -181,7 +181,7 @@ public Map getAdditionalDiagnostics(RDBConnectionHandler ch, Str POSTGRES() { @Override - public Map getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) { + public @NotNull Map getAdditionalDiagnostics(RDBConnectionHandler ch, String tableName) { Connection con = null; PreparedStatement stmt = null; ResultSet rs = null; @@ -212,7 +212,7 @@ public Map getAdditionalDiagnostics(RDBConnectionHandler ch, Str protected String description; - private RDBCommonVendorSpecificCode() { + RDBCommonVendorSpecificCode() { } @NotNull diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBConnectionHandler.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBConnectionHandler.java index 419f73d459b..98f2e897896 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBConnectionHandler.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBConnectionHandler.java @@ -46,7 +46,7 @@ public class RDBConnectionHandler implements Closeable { * This becomes a problem when the pool implemented by the {@link DataSource} re-uses the connection, and may * affect subsequent users of that connection. This system property allows to enable a check to be done upon * {@link #closeConnection(Connection)} so that problems can be detected early rather than late. - * See also https://issues.apache.org/jira/browse/OAK-2337. + * See also .... */ private static final boolean CHECKCONNECTIONONCLOSE = SystemPropertySupplier .create("org.apache.jackrabbit.oak.plugins.document.rdb.RDBConnectionHandler.CHECKCONNECTIONONCLOSE", Boolean.FALSE) @@ -155,7 +155,7 @@ private Connection getConnection() throws IllegalStateException, SQLException { if (LOG.isDebugEnabled()) { long elapsed = System.currentTimeMillis() - ts; if (elapsed >= 20) { - LOG.debug("Obtaining a new connection from " + this.ds + " took " + elapsed + "ms", new Exception("call stack")); + LOG.debug("Obtaining a new connection from {} took {}ms", this.ds, elapsed, new Exception("call stack")); } } return c; @@ -176,8 +176,7 @@ private void setReadOnly(Connection c, boolean ro) throws SQLException { c.setReadOnly(true); this.setReadOnlyThrows = Boolean.FALSE; } catch (SQLException ex) { - LOG.error("Connection class " + c.getClass() - + " erroneously throws SQLException on setReadOnly(true); not trying again"); + LOG.error("Connection class {} erroneously throws SQLException on setReadOnly(true); not trying again", c.getClass()); this.setReadOnlyThrows = Boolean.TRUE; } } else if (!this.setReadOnlyThrows) { @@ -190,8 +189,7 @@ private void setReadOnly(Connection c, boolean ro) throws SQLException { c.setReadOnly(false); this.setReadWriteThrows = Boolean.FALSE; } catch (SQLException ex) { - LOG.error("Connection class " + c.getClass() - + " erroneously throws SQLException on setReadOnly(false); not trying again"); + LOG.error("Connection class {} erroneously throws SQLException on setReadOnly(false); not trying again", c.getClass()); this.setReadWriteThrows = Boolean.TRUE; } } else if (!this.setReadWriteThrows) { @@ -222,7 +220,7 @@ public String dump(long now) { } // map holding references to currently open connections - private ConcurrentMap, ConnectionHolder> connectionMap = new ConcurrentHashMap<>(); + private final ConcurrentMap, ConnectionHolder> connectionMap = new ConcurrentHashMap<>(); // time in millis for a connection in the map to be logged as "old"; note // that this is meant to catch both connection leaks and long-running @@ -255,8 +253,7 @@ private void dumpConnectionMap(long ts) { } } if (cnt > 0) { - LOG.trace(cnt + " connections with age >= " + LOGTHRESHOLD + "ms active while obtaining new connection: " - + sb.toString()); + LOG.trace("{} connections with age >= " + LOGTHRESHOLD + "ms active while obtaining new connection: {}", cnt, sb); } } } @@ -264,7 +261,7 @@ private void dumpConnectionMap(long ts) { private void remember(Connection c) { if (LOG.isTraceEnabled()) { - connectionMap.put(new WeakReference(c), new ConnectionHolder()); + connectionMap.put(new WeakReference<>(c), new ConnectionHolder()); } } @@ -288,7 +285,7 @@ private static String getCaller(StackTraceElement[] elements) { } sb.append('.').append(e.getMethodName()).append('(').append(loc).append(')'); } else { - sb.append(e.toString()); + sb.append(e); } prevClass = e.getClassName(); } diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDataSourceFactory.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDataSourceFactory.java index eaaa60f589c..d8d475c9ce2 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDataSourceFactory.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDataSourceFactory.java @@ -22,7 +22,6 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.sql.Connection; -import java.sql.Driver; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; @@ -62,18 +61,18 @@ public static DataSource forJdbcUrl(String url, String username, String passwd) } catch (Exception ex) { String message = "trying to create datasource " + classname; LOG.debug(message, ex); - LOG.info(message + " (" + ex.getMessage() + ")"); + LOG.info("{} ({})", message, ex.getMessage()); throw new DocumentStoreException(message, ex); } } /** - * A {@link Closeable} {@link DataSource} based on a generic {@link Source} + * A {@link Closeable} {@link DataSource} based on a generic {@link DataSource} * . */ private static class CloseableDataSource implements DataSource, Closeable { - private DataSource ds; + private final DataSource ds; public CloseableDataSource(DataSource ds) { this.ds = ds; @@ -116,13 +115,9 @@ public void close() throws IOException { try { Method clmethod = dsclazz.getMethod("close"); clmethod.invoke(ds); - } catch (NoSuchMethodException e) { - LOG.debug("Class " + dsclazz + " does not have close() method"); - } catch (IllegalArgumentException e) { - LOG.debug("Class " + dsclazz + " does not have close() method"); - } catch (InvocationTargetException e) { - throw new IOException("trying to close datasource", e); - } catch (IllegalAccessException e) { + } catch (NoSuchMethodException | IllegalArgumentException e) { + LOG.debug("Class {} does not have close() method", dsclazz); + } catch (InvocationTargetException | IllegalAccessException e) { throw new IOException("trying to close datasource", e); } } diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentSerializer.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentSerializer.java index 1c70ee1abac..1561e9f661b 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentSerializer.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentSerializer.java @@ -125,7 +125,7 @@ public String asString(UpdateOp update, Set columnProperties) { } else if (op.type == UpdateOp.Operation.Type.REMOVE || op.type == UpdateOp.Operation.Type.REMOVE_MAP_ENTRY) { sb.append("\"*\","); } else { - throw new DocumentStoreException("Can't serialize " + update.toString() + " for JSON append"); + throw new DocumentStoreException("Can't serialize " + update + " for JSON append"); } appendJsonString(sb, key.getName()); sb.append(","); @@ -162,10 +162,10 @@ public T fromRow(@NotNull Collection collection, @NotNul doc.put(CMODCOUNT, row.getCollisionsModcount()); } if (row.hasBinaryProperties() != null) { - doc.put(HASBINARY, row.hasBinaryProperties().longValue()); + doc.put(HASBINARY, row.hasBinaryProperties()); } if (row.deletedOnce() != null) { - doc.put(DELETEDONCE, row.deletedOnce().booleanValue()); + doc.put(DELETEDONCE, row.deletedOnce()); } if (row.getSchemaVersion() >= 2) { if (row.getSdType() != RDBRow.LONG_UNSET) { @@ -240,7 +240,7 @@ public T fromRow(@NotNull Collection collection, @NotNul return doc; } catch (Exception ex) { String message = String.format("Error processing persisted data for document '%s'", row.getId()); - if (charData.length() > 0) { + if (!charData.isEmpty()) { int last = charData.charAt(charData.length() - 1); if (last != '}' && last != '"' && last != ']') { message += " (DATA column might be truncated)"; @@ -272,7 +272,7 @@ private void applyUpdate(T doc, List> updateSt @SuppressWarnings("unchecked") Map m = (Map) old; if (m == null) { - m = new TreeMap(comparator); + m = new TreeMap<>(comparator); doc.put(key, m); } m.put(rev, value); @@ -340,7 +340,7 @@ private static void checkSdType(Document doc) { // low level operations - private static byte[] GZIPSIG = { 31, -117 }; + private static final byte[] GZIPSIG = { 31, -117 }; private static String fromBlobData(byte[] bdata) { try { diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java index 301c127dd35..a98ec2def0f 100755 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java @@ -50,7 +50,6 @@ import java.util.Objects; import java.util.Set; import java.util.TreeMap; -import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -334,7 +333,7 @@ public int remove(Collection collection, Map int remove(Collection collection, String indexedProperty, long startValue, long endValue) throws DocumentStoreException { try { - List conditions = new ArrayList(); + List conditions = new ArrayList<>(); conditions.add(new QueryCondition(indexedProperty, ">", startValue)); conditions.add(new QueryCondition(indexedProperty, "<", endValue)); return deleteWithCondition(collection, conditions); @@ -367,7 +366,7 @@ public List createOrUpdate(Collection collection, Lis // fall back to sequential mode if batches are turned off using system // property, or the number of update operations is small if (!BATCHUPDATES || updateOps.size() < MINIMALBULKUPDATESIZE) { - List results = new ArrayList(updateOps.size()); + List results = new ArrayList<>(updateOps.size()); for (UpdateOp update : updateOps) { results.add(createOrUpdate(collection, update)); } @@ -377,13 +376,13 @@ public List createOrUpdate(Collection collection, Lis } } - private static int MINIMALBULKUPDATESIZE = 3; + private static final int MINIMALBULKUPDATESIZE = 3; private List internalCreateOrUpdate(Collection collection, List updateOps) { final Stopwatch watch = startWatch(); - Map results = new LinkedHashMap(); - Map operationsToCover = new LinkedHashMap(); - Set duplicates = new HashSet(); + Map results = new LinkedHashMap<>(); + Map operationsToCover = new LinkedHashMap<>(); + Set duplicates = new HashSet<>(); for (UpdateOp updateOp : updateOps) { UpdateUtils.assertUnconditional(updateOp); @@ -398,7 +397,7 @@ private List internalCreateOrUpdate(Collection collec } } - Map oldDocs = new HashMap(); + Map oldDocs = new HashMap<>(); if (collection == Collection.NODES) { oldDocs.putAll(readDocumentCached(collection, operationsToCover.keySet())); } @@ -446,7 +445,7 @@ private List internalCreateOrUpdate(Collection collec } private Map readDocumentCached(Collection collection, Set keys) { - Map documents = new HashMap(); + Map documents = new HashMap<>(); if (collection == Collection.NODES) { for (String key : keys) { @@ -472,7 +471,7 @@ private Map readDocumentCached(Collection col } private Map readDocumentsUncached(Collection collection, Set keys) { - Map result = new HashMap(); + Map result = new HashMap<>(); Connection connection = null; RDBTableMetaData tmd = getTable(collection); @@ -514,7 +513,7 @@ private CacheChangesTracker obtainTracker(Collection col } private Map bulkUpdate(Collection collection, List updates, Map oldDocs, boolean upsert) { - Set missingDocs = new HashSet(); + Set missingDocs = new HashSet<>(); for (UpdateOp op : updates) { if (!oldDocs.containsKey(op.getId())) { missingDocs.add(op.getId()); @@ -532,8 +531,8 @@ private Map bulkUpdate(Collection collectio oldDocs.putAll(freshDocs); try (CacheChangesTracker tracker = obtainTracker(collection, SetUtils.union(oldDocs.keySet(), missingDocs) )) { - List docsToUpdate = new ArrayList(updates.size()); - Set keysToUpdate = new HashSet(); + List docsToUpdate = new ArrayList<>(updates.size()); + Set keysToUpdate = new HashSet<>(); for (UpdateOp update : updates) { String id = update.getId(); T modifiedDoc = collection.newDocument(this); @@ -573,7 +572,7 @@ private Map bulkUpdate(Collection collectio nodesCache.putNonConflictingDocs(tracker, docsToCache); } - Map result = new HashMap(); + Map result = new HashMap<>(); for (UpdateOp op : updates) { if (successfulUpdates.contains(op.getId())) { result.put(op, oldDocs.get(op.getId())); @@ -659,16 +658,11 @@ public String getDroppedTables() { } // table names - private static Map, String> TABLEMAP; - private static List TABLENAMES; + private static final Map, String> TABLEMAP; + private static final List TABLENAMES; static { - Map, String> tmp = new HashMap, String>(); - tmp.put(Collection.CLUSTER_NODES, "CLUSTERNODES"); - tmp.put(Collection.JOURNAL, "JOURNAL"); - tmp.put(Collection.NODES, "NODES"); - tmp.put(Collection.SETTINGS, "SETTINGS"); - TABLEMAP = Collections.unmodifiableMap(tmp); - List tl = new ArrayList(TABLEMAP.values()); + TABLEMAP = Map.of(Collection.CLUSTER_NODES, "CLUSTERNODES", Collection.JOURNAL, "JOURNAL", Collection.NODES, "NODES", Collection.SETTINGS, "SETTINGS"); + List tl = new ArrayList<>(TABLEMAP.values()); Collections.sort(tl); TABLENAMES = Collections.unmodifiableList(tl); } @@ -692,7 +686,7 @@ static class RDBTableMetaData { private int dataLimitInOctets = 16384; private String schemaInfo = ""; private String indexInfo = ""; - private Set columnOnlyProperties = Collections.unmodifiableSet(COLUMNPROPERTIES); + private final Set columnOnlyProperties = Collections.unmodifiableSet(COLUMNPROPERTIES); private Set columnProperties = Collections.unmodifiableSet(COLUMNPROPERTIES); public RDBTableMetaData(@Nullable String catalog, @NotNull String name) { @@ -774,13 +768,13 @@ public void setIndexInfo(String indexInfo) { } } - private final Map, RDBTableMetaData> tableMeta = new HashMap, RDBTableMetaData>(); + private final Map, RDBTableMetaData> tableMeta = new HashMap<>(); @Override public void dispose() { if (!this.tablesToBeDropped.isEmpty()) { - String dropped = ""; - LOG.debug("attempting to drop: " + this.tablesToBeDropped); + StringBuilder dropped = new StringBuilder(); + LOG.debug("attempting to drop: {}", this.tablesToBeDropped); for (String tname : this.tablesToBeDropped) { Connection con = null; try { @@ -791,19 +785,19 @@ public void dispose() { stmt.execute("drop table " + tname); stmt.close(); con.commit(); - dropped += tname + " "; + dropped.append(tname).append(" "); } catch (SQLException ex) { - LOG.debug("attempting to drop: " + tname, ex); + LOG.debug("attempting to drop: {}", tname, ex); } finally { closeStatement(stmt); } } catch (SQLException ex) { - LOG.debug("attempting to drop: " + tname, ex); + LOG.debug("attempting to drop: {}", tname, ex); } finally { this.ch.closeConnection(con); } } - this.droppedTables = dropped.trim(); + this.droppedTables = dropped.toString().trim(); } this.ch.close(); @@ -813,8 +807,7 @@ public void dispose() { } catch (IOException ex) { LOG.warn("Error occurred while closing nodes cache", ex); } - LOG.info("RDBDocumentStore (" + getModuleVersion() + ") disposed" + getCnStats() - + (this.droppedTables.isEmpty() ? "" : " (tables dropped: " + this.droppedTables + ")")); + LOG.info("RDBDocumentStore ({}) disposed{}{}", getModuleVersion(), getCnStats(), this.droppedTables.isEmpty() ? "" : " (tables dropped: " + this.droppedTables + ")"); } @Override @@ -878,7 +871,7 @@ public Map getStats() { Map builder = new HashMap<>(); tableMeta.forEach((k, v) -> toMapBuilder(builder, k, v)); if (LOG.isDebugEnabled()) { - LOG.debug("statistics obtained: " + builder.toString()); + LOG.debug("statistics obtained: {}", builder); } return Collections.unmodifiableMap(builder); } @@ -899,7 +892,7 @@ private void toMapBuilder(Map builder, Coll builder.put(prefix + ".count", Long.toString(c)); } catch (DocumentStoreException ex) { - LOG.debug("getting entry count for " + prefix, ex); + LOG.debug("getting entry count for {}", prefix, ex); } } @@ -922,7 +915,7 @@ private void toMapBuilder(Map builder, Coll private RDBConnectionHandler ch; // from options - private Set tablesToBeDropped = new HashSet(); + private final Set tablesToBeDropped = new HashSet<>(); // ratio between Java characters and UTF-8 encoding // a) single characters will fit into 3 bytes @@ -959,24 +952,22 @@ private void toMapBuilder(Map builder, Coll public static String VERSIONPROP = "__version"; // set of supported indexed properties - private static final Set INDEXEDPROPERTIES = new HashSet(Arrays.asList(new String[] { MODIFIED, - NodeDocument.HAS_BINARY_FLAG, NodeDocument.DELETED_ONCE, NodeDocument.SD_TYPE, NodeDocument.SD_MAX_REV_TIME_IN_SECS, VERSIONPROP })); + private static final Set INDEXEDPROPERTIES = new HashSet<>(Arrays.asList(MODIFIED, + NodeDocument.HAS_BINARY_FLAG, NodeDocument.DELETED_ONCE, NodeDocument.SD_TYPE, NodeDocument.SD_MAX_REV_TIME_IN_SECS, VERSIONPROP)); // set of required table columns - private static final Set REQUIREDCOLUMNS = Collections.unmodifiableSet(new HashSet(Arrays.asList( - new String[] { "id", "dsize", "deletedonce", "bdata", "data", "cmodcount", "modcount", "hasbinary", "modified" }))); + private static final Set REQUIREDCOLUMNS = Set.of("id", "dsize", "deletedonce", "bdata", "data", "cmodcount", "modcount", "hasbinary", "modified"); // set of optional table columns - private static final Set OPTIONALCOLUMNS = Collections - .unmodifiableSet(new HashSet(Arrays.asList(new String[] { "version", "sdtype", "sdmaxrevtime" }))); + private static final Set OPTIONALCOLUMNS = Set.of("version", "sdtype", "sdmaxrevtime"); // set of properties not serialized to JSON - private static final Set COLUMNPROPERTIES = new HashSet(Arrays.asList( - new String[] { ID, NodeDocument.HAS_BINARY_FLAG, NodeDocument.DELETED_ONCE, COLLISIONSMODCOUNT, MODIFIED, MODCOUNT })); + private static final Set COLUMNPROPERTIES = new HashSet<>(Arrays.asList( + ID, NodeDocument.HAS_BINARY_FLAG, NodeDocument.DELETED_ONCE, COLLISIONSMODCOUNT, MODIFIED, MODCOUNT)); // set of properties not serialized to JSON, schema version 2 - private static final Set COLUMNPROPERTIES2 = new HashSet(Arrays.asList( - new String[] { ID, NodeDocument.HAS_BINARY_FLAG, NodeDocument.DELETED_ONCE, COLLISIONSMODCOUNT, MODIFIED, MODCOUNT, - NodeDocument.SD_TYPE, NodeDocument.SD_MAX_REV_TIME_IN_SECS, VERSIONPROP })); + private static final Set COLUMNPROPERTIES2 = new HashSet<>(Arrays.asList( + ID, NodeDocument.HAS_BINARY_FLAG, NodeDocument.DELETED_ONCE, COLLISIONSMODCOUNT, MODIFIED, MODCOUNT, + NodeDocument.SD_TYPE, NodeDocument.SD_MAX_REV_TIME_IN_SECS, VERSIONPROP)); private final RDBDocumentSerializer ser = new RDBDocumentSerializer(this); @@ -1012,10 +1003,7 @@ private void initialize(DataSource ds, DocumentNodeStoreBuilder builder, RDBO int isolation = con.getTransactionIsolation(); String isolationDiags = RDBJDBCTools.isolationLevelToString(isolation); if (isolation != Connection.TRANSACTION_READ_COMMITTED) { - LOG.info("Detected transaction isolation level " + isolationDiags + " is " - + (isolation < Connection.TRANSACTION_READ_COMMITTED ? "lower" : "higher") + " than expected " - + RDBJDBCTools.isolationLevelToString(Connection.TRANSACTION_READ_COMMITTED) - + " - check datasource configuration"); + LOG.info("Detected transaction isolation level {} is {} than expected {} - check datasource configuration", isolationDiags, isolation < Connection.TRANSACTION_READ_COMMITTED ? "lower" : "higher", RDBJDBCTools.isolationLevelToString(Connection.TRANSACTION_READ_COMMITTED)); } String dbDesc = String.format("%s %s (%d.%d)", md.getDatabaseProductName(), md.getDatabaseProductVersion(), @@ -1037,7 +1025,7 @@ private void initialize(DataSource ds, DocumentNodeStoreBuilder builder, RDBO LOG.error(versionDiags); } - if (! "".equals(dbInfo.getInitializationStatement())) { + if (!dbInfo.getInitializationStatement().isEmpty()) { Statement stmt = null; try { stmt = con.createStatement(); @@ -1050,8 +1038,8 @@ private void initialize(DataSource ds, DocumentNodeStoreBuilder builder, RDBO } } - List tablesCreated = new ArrayList(); - List tablesPresent = new ArrayList(); + List tablesCreated = new ArrayList<>(); + List tablesPresent = new ArrayList<>(); try { createTableFor(con, Collection.CLUSTER_NODES, this.tableMeta.get(Collection.CLUSTER_NODES), tablesCreated, tablesPresent, options.getInitialSchema(), options.getUpgradeToSchema()); @@ -1083,15 +1071,12 @@ private void initialize(DataSource ds, DocumentNodeStoreBuilder builder, RDBO Map diag = dbInfo.getAdditionalDiagnostics(this.ch, this.tableMeta.get(Collection.NODES).getName()); - LOG.info("RDBDocumentStore (" + getModuleVersion() + ") instantiated for database " + dbDesc + ", using driver: " - + driverDesc + ", connecting to: " + dbUrl + (diag.isEmpty() ? "" : (", properties: " + diag.toString())) - + ", transaction isolation level: " + isolationDiags + tableDiags); + LOG.info("RDBDocumentStore ({}) instantiated for database {}, using driver: {}, connecting to: {}{}, transaction isolation level: {}{}", getModuleVersion(), dbDesc, driverDesc, dbUrl, diag.isEmpty() ? "" : (", properties: " + diag), isolationDiags, tableDiags); if (!tablesPresent.isEmpty()) { - LOG.info("Tables present upon startup: " + tablesPresent); + LOG.info("Tables present upon startup: {}", tablesPresent); } if (!tablesCreated.isEmpty()) { - LOG.info("Tables created upon startup: " + tablesCreated - + (options.isDropTablesOnClose() ? " (will be dropped on exit)" : "")); + LOG.info("Tables created upon startup: {}{}", tablesCreated, options.isDropTablesOnClose() ? " (will be dropped on exit)" : ""); } } @@ -1243,8 +1228,7 @@ private static Map getIndexInformation(ResultSet rs, S info.columns = columns; // skip indices on tables in other schemas in case we have that information if (rmetSchemaName.isEmpty() || inSchema.isEmpty() || rmetSchemaName.equals(inSchema)) { - String tname = asQualifiedDbName(inSchema, rs.getString("TABLE_NAME")); - info.tname = tname; + info.tname = asQualifiedDbName(inSchema, rs.getString("TABLE_NAME")); String cname = rs.getString("COLUMN_NAME"); if (cname != null) { columns.add(cname.toUpperCase(Locale.ENGLISH)); @@ -1292,8 +1276,8 @@ private void createTableFor(Connection con, Collection col, obtainFlagsFromResultSetMeta(met, tmd); // check that all required columns are present - Set requiredColumns = new HashSet(REQUIREDCOLUMNS); - Set unknownColumns = new HashSet(); + Set requiredColumns = new HashSet<>(REQUIREDCOLUMNS); + Set unknownColumns = new HashSet<>(); boolean hasVersionColumn = false; boolean hasSDTypeColumn = false; for (int i = 1; i <= met.getColumnCount(); i++) { @@ -1313,20 +1297,20 @@ private void createTableFor(Connection con, Collection col, if (!requiredColumns.isEmpty()) { String message = String.format("Table %s: the following required columns are missing: %s", tableName, - requiredColumns.toString()); + requiredColumns); LOG.error(message); throw new DocumentStoreException(message); } if (!unknownColumns.isEmpty()) { String message = String.format("Table %s: the following columns are unknown and will not be maintained: %s", - tableName, unknownColumns.toString()); + tableName, unknownColumns); LOG.info(message); } String tableInfo = RDBJDBCTools.dumpResultSetMeta(met); tmd.setSchemaInfo(tableInfo); - Set indexOn = new HashSet(); + Set indexOn = new HashSet<>(); String indexInfo = dumpIndexData(con.getMetaData(), met, tableName, indexOn); tmd.setIndexInfo(indexInfo); @@ -1359,7 +1343,7 @@ private void createTableFor(Connection con, Collection col, // table does not appear to exist con.rollback(); - LOG.debug("trying to read from '" + tableName + "'", ex); + LOG.debug("trying to read from '{}'", tableName, ex); if (this.readOnly) { throw new SQLException("Would like to create table '" + tableName + "', but RDBDocumentStore has been initialized in 'readonly' mode"); @@ -1391,7 +1375,7 @@ private void createTableFor(Connection con, Collection col, getTableMetaData(con, col, tmd); } catch (SQLException ex2) { - LOG.error("Failed to create table '" + tableName + "' in '" + dbname + "'", ex2); + LOG.error("Failed to create table '{}' in '{}'", tableName, dbname, ex2); throw ex2; } } @@ -1412,7 +1396,7 @@ private boolean upgradeTable(Connection con, String tableName, int level) throws upgradeStatement.execute(statement); upgradeStatement.close(); con.commit(); - LOG.info("Upgraded " + tableName + " to DB level " + level + " using '" + statement + "'"); + LOG.info("Upgraded {} to DB level {} using '{}'", tableName, level, statement); wasChanged = true; } catch (SQLException exup) { con.rollback(); @@ -1442,7 +1426,7 @@ private boolean addModifiedIndex(Connection con, String tableName) throws SQLExc upgradeStatement.execute(statement); upgradeStatement.close(); con.commit(); - LOG.info("Added 'modified' index to " + tableName + " using '" + statement + "'"); + LOG.info("Added 'modified' index to {} using '{}'", tableName, statement); wasChanged = true; } catch (SQLException exup) { con.rollback(); @@ -1460,7 +1444,7 @@ private boolean addModifiedIndex(Connection con, String tableName) throws SQLExc return wasChanged; } - + private static void getTableMetaData(Connection con, Collection col, RDBTableMetaData tmd) throws SQLException { Statement checkStatement = null; ResultSet checkResultSet = null; @@ -1521,15 +1505,12 @@ private T readDocumentCached(final Collection collection doc = null; } final NodeDocument cachedDoc = doc; - doc = nodesCache.get(id, new Callable() { - @Override - public NodeDocument call() throws Exception { - NodeDocument doc = (NodeDocument) readDocumentUncached(collection, id, cachedDoc); - if (doc != null) { - doc.seal(); - } - return wrap(doc); + doc = nodesCache.get(id, () -> { + NodeDocument doc1 = (NodeDocument) readDocumentUncached(collection, id, cachedDoc); + if (doc1 != null) { + doc1.seal(); } + return wrap(doc1); }); // inspect the doc whether it can be used long lastCheckTime = doc.getLastCheckTime(); @@ -1555,16 +1536,15 @@ public NodeDocument call() throws Exception { } } - @Nullable private boolean internalCreate(Collection collection, List updates) { final Stopwatch watch = startWatch(); - List ids = new ArrayList(updates.size()); + List ids = new ArrayList<>(updates.size()); boolean success = true; try { // try up to CHUNKSIZE ops in one transaction for (List chunks : ListUtils.partitionList(updates, CHUNKSIZE)) { - List docs = new ArrayList(); + List docs = new ArrayList<>(); for (UpdateOp update : chunks) { ids.add(update.getId()); maintainUpdateStats(collection, update.getId()); @@ -1630,7 +1610,7 @@ private T internalCreateOrUpdate(Collection collection, oldDoc = readDocumentUncached(collection, update.getId(), null); if (oldDoc == null) { // something else went wrong - LOG.error("insert failed, but document " + update.getId() + " is not present, aborting", ex); + LOG.error("insert failed, but document {} is not present, aborting", update.getId(), ex); throw (ex); } return internalUpdate(collection, update, oldDoc, checkConditions, retries); @@ -1642,7 +1622,7 @@ private T internalCreateOrUpdate(Collection collection, result = internalCreateOrUpdate(collection, update, allowCreate, checkConditions, retries - 1); } else { - LOG.error("update of " + update.getId() + " failed, race condition?"); + LOG.error("update of {} failed, race condition?", update.getId()); throw new DocumentStoreException("update of " + update.getId() + " failed, race condition?", null, DocumentStoreException.Type.TRANSIENT); } @@ -1694,7 +1674,7 @@ private T internalUpdate(Collection collection, UpdateOp if (oldDoc == null) { // document was there but is now gone - LOG.debug("failed to apply update because document is gone in the meantime: " + update.getId(), new Exception("call stack")); + LOG.debug("failed to apply update because document is gone in the meantime: {}", update.getId(), new Exception("call stack")); return null; } @@ -1772,7 +1752,7 @@ private List internalQuery(Collection collection, Str connection.commit(); int size = dbresult.size(); - List result = new ArrayList(size); + List result = new ArrayList<>(size); for (int i = 0; i < size; i++) { // free RDBRow as early as possible RDBRow row = dbresult.set(i, null); @@ -1823,7 +1803,7 @@ private List internalQuery(Collection collection, Str } } - private static interface MyCloseableIterable extends Closeable, Iterable { + private interface MyCloseableIterable extends Closeable, Iterable { } protected Iterable queryAsIterable(final Collection collection, String fromKey, String toKey, @@ -1843,9 +1823,9 @@ protected Iterable queryAsIterable(final Collection c final String from = collection == Collection.NODES && NodeDocument.MIN_ID_VALUE.equals(fromKey) ? null : fromKey; final String to = collection == Collection.NODES && NodeDocument.MAX_ID_VALUE.equals(toKey) ? null : toKey; - return new MyCloseableIterable() { + return new MyCloseableIterable<>() { - Set> returned = new HashSet<>(); + final Set> returned = new HashSet<>(); @Override public Iterator iterator() { @@ -2086,7 +2066,7 @@ private boolean updateDocument(@NotNull Collection colle if (!success && shouldRetry) { data = ser.asString(document, tmd.getColumnOnlyProperties()); Object m = document.get(MODIFIED); - long modified = (m instanceof Long) ? ((Long)m).longValue() : 0; + long modified = (m instanceof Long) ? (Long) m : 0; success = db.update(connection, tmd, document.getId(), modified, hasBinary, deletedOnce, modcount, cmodcount, oldmodcount, data); connection.commit(); @@ -2119,7 +2099,7 @@ private static void continueIfStringOverflow(SQLException ex) throws SQLExceptio } private static boolean isAppendableUpdate(UpdateOp update) { - return NOAPPEND == false; + return !NOAPPEND; } private static long getModifiedFromOperation(Operation op) { @@ -2137,7 +2117,7 @@ private boolean insertDocuments(Collection collection, L } catch (SQLException ex) { this.ch.rollbackConnection(connection); - List ids = new ArrayList(); + List ids = new ArrayList<>(); for (T doc : documents) { ids.add(doc.getId()); } @@ -2160,7 +2140,7 @@ private boolean insertDocuments(Collection collection, L for (Document d : documents) { String data = ser.asString(d, tmd.getColumnOnlyProperties()); - byte bytes[] = asBytes(data); + byte[] bytes = asBytes(data); if (bytes.length > longest) { longest = bytes.length; longestChars = data.length(); @@ -2175,7 +2155,7 @@ private boolean insertDocuments(Collection collection, L } if (!messages.isEmpty()) { - LOG.debug("additional diagnostics: " + messages); + LOG.debug("additional diagnostics: {}", messages); } throw handleException(message, ex, collection, ids); @@ -2212,9 +2192,7 @@ private boolean insertDocuments(Collection collection, L // Whether to use JDBC batch commands for the createOrUpdate (default: true) private static final boolean BATCHUPDATES = SystemPropertySupplier .create(RDBDocumentStore.class.getName() + ".BATCHUPDATES", Boolean.TRUE).loggingTo(LOG) - .formatSetMessage((name, value) -> { - return String.format("Batch updates disabled (system property %s set to '%s')", name, value); - }).get(); + .formatSetMessage((name, value) -> String.format("Batch updates disabled (system property %s set to '%s')", name, value)).get(); public static byte[] asBytes(@NotNull String data) { byte[] bytes = data.getBytes(StandardCharsets.UTF_8); @@ -2280,15 +2258,6 @@ private static NodeDocument wrap(@Nullable NodeDocument doc) { return doc == null ? NodeDocument.NULL : doc; } - @NotNull - private static String idOf(@NotNull Document doc) { - String id = doc.getId(); - if (id == null) { - throw new IllegalArgumentException("non-null ID expected"); - } - return id; - } - private static long modcountOf(@NotNull Document doc) { Long n = doc.getModCount(); return n != null ? n : -1; @@ -2296,7 +2265,7 @@ private static long modcountOf(@NotNull Document doc) { private static long modifiedOf(@NotNull Document doc) { Object l = doc.get(NodeDocument.MODIFIED_IN_SECS); - return (l instanceof Long) ? ((Long)l).longValue() : -1; + return (l instanceof Long) ? (Long) l : -1; } @NotNull @@ -2340,13 +2309,13 @@ private static String dumpKeysAndModcounts(Map d } // keeping track of CLUSTER_NODES updates - private Map cnUpdates = new ConcurrentHashMap(); + private final Map cnUpdates = new ConcurrentHashMap<>(); private void maintainUpdateStats(Collection collection, String key) { if (collection == Collection.CLUSTER_NODES) { synchronized (this) { Long old = cnUpdates.get(key); - old = old == null ? Long.valueOf(1) : old + 1; + old = old == null ? 1L : old + 1; cnUpdates.put(key, old); } } @@ -2357,8 +2326,8 @@ private String getCnStats() { return ""; } else { List> tmp = new ArrayList<>(cnUpdates.entrySet()); - Collections.sort(tmp, (Entry o1, Entry o2) -> o1.getKey().compareTo(o2.getKey())); - return " (Cluster Node updates: " + tmp.toString() + ")"; + tmp.sort(Entry.comparingByKey()); + return " (Cluster Node updates: " + tmp + ")"; } } @@ -2385,7 +2354,7 @@ private DocumentStoreException handleException(String messa return handleException(message, ex, collection, Collections.singleton(id)); } - protected class UnsupportedIndexedPropertyException extends DocumentStoreException { + protected static class UnsupportedIndexedPropertyException extends DocumentStoreException { private static final long serialVersionUID = -8392572622365260105L; @@ -2416,7 +2385,7 @@ public void close() { protected static class QueryCondition { private final String propertyName, operator; - private final List operands; + private final List operands; public QueryCondition(String propertyName, String operator, long value) { this.propertyName = propertyName; @@ -2424,7 +2393,7 @@ public QueryCondition(String propertyName, String operator, long value) { this.operands = Collections.singletonList(value); } - public QueryCondition(String propertyName, String operator, List values) { + public QueryCondition(String propertyName, String operator, List values) { this.propertyName = propertyName; this.operator = operator; this.operands = values; @@ -2444,7 +2413,7 @@ public String getOperator() { return operator; } - public List getOperands() { + public List getOperands() { return this.operands; } @@ -2455,7 +2424,7 @@ public String toString() { } else if (this.operands.size() == 1) { return String.format("%s %s %s", propertyName, operator, operands.get(0).toString()); } else { - return String.format("%s %s %s", propertyName, operator, operands.toString()); + return String.format("%s %s %s", propertyName, operator, operands); } } } diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreDB.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreDB.java index f7deadc9684..c659258e8de 100755 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreDB.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreDB.java @@ -70,7 +70,7 @@ public String checkVersion(DatabaseMetaData md) throws SQLException { @Override public Map getAdditionalStatistics(RDBConnectionHandler ch, String catalog, String tableName) { - Map result = new HashMap(); + Map result = new HashMap<>(); Connection con = null; try { @@ -141,13 +141,13 @@ public String getTableCreationStatement(String tableName, int schema) { @Override public Map getAdditionalStatistics(RDBConnectionHandler ch, String catalog, String tableName) { - Map result = new HashMap(); + Map result = new HashMap<>(); Connection con = null; SortedSet indexNames = Collections.emptySortedSet(); // get index names try { - SortedSet in = new TreeSet(); + SortedSet in = new TreeSet<>(); con = ch.getROConnection(); try (PreparedStatement stmt = con.prepareStatement("SELECT indexname FROM pg_indexes WHERE tablename=?")) { stmt.setString(1, tableName.toLowerCase(Locale.ENGLISH)); @@ -222,7 +222,7 @@ public String getTableCreationStatement(String tableName, int schema) { @Override public List getIndexCreationStatements(String tableName, int schema) { - List statements = new ArrayList(); + List statements = new ArrayList<>(); String pkName = tableName + "_pk"; statements.add("create unique index " + pkName + " on " + tableName + " ( ID ) cluster"); statements.add("alter table " + tableName + " add constraint " + pkName + " primary key ( ID )"); @@ -233,7 +233,7 @@ public List getIndexCreationStatements(String tableName, int schema) { @Override public Map getAdditionalStatistics(RDBConnectionHandler ch, String catalog, String tableName) { - Map result = new HashMap(); + Map result = new HashMap<>(); Connection con = null; @@ -326,7 +326,7 @@ public String getTableCreationStatement(String tableName, int schema) { @Override public Map getAdditionalStatistics(RDBConnectionHandler ch, String catalog, String tableName) { - Map result = new HashMap(); + Map result = new HashMap<>(); Connection con = null; @@ -342,7 +342,7 @@ public Map getAdditionalStatistics(RDBConnectionHandler ch, Stri try (ResultSet rs = stmt.executeQuery()) { while (rs.next()) { String data = extractFields(rs, tableStats); - result.put("_data", data.toString()); + result.put("_data", data); } } } @@ -437,7 +437,7 @@ public int setParameters(PreparedStatement stmt, int startIndex) throws SQLExcep @Override public Map getAdditionalStatistics(RDBConnectionHandler ch, String catalog, String tableName) { - Map result = new HashMap(); + Map result = new HashMap<>(); Connection con = null; @@ -454,7 +454,7 @@ public Map getAdditionalStatistics(RDBConnectionHandler ch, Stri try (ResultSet rs = stmt.executeQuery()) { while (rs.next()) { String data = extractFields(rs, tableStats); - result.put("_data", data.toString()); + result.put("_data", data); } } } @@ -553,7 +553,7 @@ private long parseSize(String readable) { @Override public Map getAdditionalStatistics(RDBConnectionHandler ch, String catalog, String tableName) { - Map result = new HashMap(); + Map result = new HashMap<>(); Connection con = null; // table data @@ -625,7 +625,7 @@ public String makeIndexConditionalForColumn(String columnName) { public enum FETCHFIRSTSYNTAX { FETCHFIRST, LIMIT, TOP - }; + } /** * Check the database brand and version @@ -735,7 +735,7 @@ public Map getAdditionalDiagnostics(RDBConnectionHandler ch, Str *
size of individual indexes
* *

- * Additionally, a information obtained from the databases system + * Additionally, an information obtained from the databases system * tables/views can be included: *

*
_data
@@ -848,12 +848,12 @@ protected String extractFields(ResultSet rs, String indexStats) throws SQLExcept return data.toString(); } - private RDBDocumentStoreDB(String description) { + RDBDocumentStoreDB(String description) { this.description = description; this.vendorCode = RDBCommonVendorSpecificCode.DEFAULT; } - private RDBDocumentStoreDB(String description, RDBCommonVendorSpecificCode vendorCode) { + RDBDocumentStoreDB(String description, RDBCommonVendorSpecificCode vendorCode) { this.description = description; this.vendorCode = vendorCode; } @@ -873,7 +873,7 @@ public static RDBDocumentStoreDB getValue(String desc) { } } - LOG.error("DB type " + desc + " unknown, trying default settings"); + LOG.error("DB type {} unknown, trying default settings", desc); DEFAULT.description = desc + " - using default settings"; return DEFAULT; } diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBC.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBC.java index 54c2d5f81c1..760228a5b64 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBC.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBC.java @@ -35,6 +35,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -81,7 +82,7 @@ public class RDBDocumentStoreJDBC { private final RDBDocumentSerializer ser; private final int queryHitsLimit, queryTimeLimit; - private static final Long INITIALMODCOUNT = Long.valueOf(1); + private static final Long INITIALMODCOUNT = 1L; public RDBDocumentStoreJDBC(RDBDocumentStoreDB dbInfo, RDBDocumentSerializer ser, int queryHitsLimit, int queryTimeLimit) { this.dbInfo = dbInfo; @@ -96,19 +97,18 @@ public boolean appendingUpdate(Connection connection, RDBTableMetaData tmd, Stri String appendDataWithComma = "," + appendData; PreparedStatementComponent stringAppend = this.dbInfo.getConcatQuery(appendDataWithComma, tmd.getDataLimitInOctets()); StringBuilder t = new StringBuilder(); - t.append("update " + tmd.getName() + " set "); + t.append("update ").append(tmd.getName()).append(" set "); t.append(setModifiedConditionally ? "MODIFIED = case when ? > MODIFIED then ? else MODIFIED end, " : "MODIFIED = ?, "); t.append("HASBINARY = ?, DELETEDONCE = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = DSIZE + ?, "); if (tmd.hasVersion()) { t.append("VERSION = " + SCHEMAVERSION + ", "); } - t.append("DATA = " + stringAppend.getStatementComponent() + " "); + t.append("DATA = ").append(stringAppend.getStatementComponent()).append(" "); t.append("where ID = ?"); if (oldmodcount != null) { t.append(" and MODCOUNT = ?"); } - PreparedStatement stmt = connection.prepareStatement(t.toString()); - try { + try (PreparedStatement stmt = connection.prepareStatement(t.toString())) { int si = 1; stmt.setObject(si++, modified, Types.BIGINT); if (setModifiedConditionally) { @@ -127,11 +127,9 @@ public boolean appendingUpdate(Connection connection, RDBTableMetaData tmd, Stri } int result = stmt.executeUpdate(); if (result != 1) { - LOG.debug("DB append update failed for " + tmd.getName() + "/" + id + " with oldmodcount=" + oldmodcount); + LOG.debug("DB append update failed for {}/{} with oldmodcount={}", tmd.getName(), id, oldmodcount); } return result == 1; - } finally { - stmt.close(); } } @@ -148,7 +146,7 @@ public int delete(Connection connection, RDBTableMetaData tmd, List allI inClause.setParameters(stmt, 1); int result = stmt.executeUpdate(); if (result != ids.size()) { - LOG.debug("DB delete failed for " + tmd.getName() + "/" + ids); + LOG.debug("DB delete failed for {}/{}", tmd.getName(), ids); } count += result; } finally { @@ -161,8 +159,7 @@ public int delete(Connection connection, RDBTableMetaData tmd, List allI public int delete(Connection connection, RDBTableMetaData tmd, Map toDelete) throws SQLException { - PreparedStatement stmt = connection.prepareStatement("delete from " + tmd.getName() + " where ID=? and MODIFIED=?"); - try { + try (PreparedStatement stmt = connection.prepareStatement("delete from " + tmd.getName() + " where ID=? and MODIFIED=?")) { for (Entry entry : toDelete.entrySet()) { setIdInStatement(tmd, stmt, 1, entry.getKey()); stmt.setLong(2, entry.getValue()); @@ -176,8 +173,6 @@ public int delete(Connection connection, RDBTableMetaData tmd, Map } } return updatedRows; - } finally { - stmt.close(); } } @@ -187,22 +182,19 @@ public int deleteWithCondition(Connection connection, RDBTableMetaData tmd, List StringBuilder query = new StringBuilder("delete from " + tmd.getName()); String whereClause = buildWhereClause(null, null, null, conditions); - if (whereClause.length() != 0) { + if (!whereClause.isEmpty()) { query.append(" where ").append(whereClause); } - PreparedStatement stmt = connection.prepareStatement(query.toString()); - try { + try (PreparedStatement stmt = connection.prepareStatement(query.toString())) { int si = 1; for (QueryCondition cond : conditions) { if (cond.getOperands().size() != 1) { throw new DocumentStoreException("unexpected condition: " + cond); } - stmt.setLong(si++, (Long)cond.getOperands().get(0)); + stmt.setLong(si++, (Long) cond.getOperands().get(0)); } return stmt.executeUpdate(); - } finally { - stmt.close(); } } @@ -210,7 +202,7 @@ public long determineServerTimeDifferenceMillis(Connection connection) { String sql = this.dbInfo.getCurrentTimeStampInSecondsSyntax(); if (sql.isEmpty()) { - LOG.debug("{}: unsupported database, skipping DB server time check", this.dbInfo.toString()); + LOG.debug("{}: unsupported database, skipping DB server time check", this.dbInfo); return 0; } else { PreparedStatement stmt = null; @@ -295,7 +287,7 @@ public Set insert(Connection connection, RDBTableMe } finally { stmt.close(); } - Set succesfullyInserted = new HashSet(); + Set succesfullyInserted = new HashSet<>(); for (int i = 0; i < results.length; i++) { int result = results[i]; if (result != 1 && result != Statement.SUCCESS_NO_INFO) { @@ -316,7 +308,7 @@ public Set insert(Connection connection, RDBTableMe * If the {@code upsert} parameter is set to true, the method will also try to insert new documents, those * which modcount equals to 1. *

- * The order of applying updates will be different than order of the passed list, so there shouldn't be two + * The order of applying updates will be different from order of the passed list, so there shouldn't be two * updates related to the same document. An {@link IllegalArgumentException} will be thrown if there are. * * @param connection JDBC connection @@ -330,15 +322,14 @@ public Set update(Connection connection, RDBTableMe throws SQLException { assertNoDuplicatedIds(documents); - Set successfulUpdates = new HashSet(); - List updatedKeys = new ArrayList(); + Set successfulUpdates = new HashSet<>(); + List updatedKeys = new ArrayList<>(); List modCounts = LOG.isTraceEnabled() ? new ArrayList<>() : null; int[] batchResults = new int[0]; - PreparedStatement stmt = connection.prepareStatement("update " + tmd.getName() + try (PreparedStatement stmt = connection.prepareStatement("update " + tmd.getName() + " set MODIFIED = ?, HASBINARY = ?, DELETEDONCE = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = ?, DATA = ?, " - + (tmd.hasVersion() ? (" VERSION = " + SCHEMAVERSION + ", ") : "") + "BDATA = ? where ID = ? and MODCOUNT = ?"); - try { + + (tmd.hasVersion() ? (" VERSION = " + SCHEMAVERSION + ", ") : "") + "BDATA = ? where ID = ? and MODCOUNT = ?")) { boolean batchIsEmpty = true; for (T document : sortDocuments(documents)) { Long modcount = (Long) document.get(MODCOUNT); @@ -385,8 +376,6 @@ public Set update(Connection connection, RDBTableMe } catch (BatchUpdateException ex) { LOG.debug("Some of the batch updates failed", ex); batchResults = ex.getUpdateCounts(); - } finally { - stmt.close(); } if (!updatedKeys.isEmpty() && LOG.isTraceEnabled()) { @@ -411,7 +400,7 @@ public Set update(Connection connection, RDBTableMe } if (upsert) { - List toBeInserted = new ArrayList(documents.size()); + List toBeInserted = new ArrayList<>(documents.size()); for (T doc : documents) { if (INITIALMODCOUNT.equals(doc.get(MODCOUNT))) { toBeInserted.add(doc); @@ -419,9 +408,7 @@ public Set update(Connection connection, RDBTableMe } if (!toBeInserted.isEmpty()) { - for (String id : insert(connection, tmd, toBeInserted)) { - successfulUpdates.add(id); - } + successfulUpdates.addAll(insert(connection, tmd, toBeInserted)); } } return successfulUpdates; @@ -437,7 +424,7 @@ private static void assertNoDuplicatedIds(List documents public List query(Connection connection, RDBTableMetaData tmd, String minId, String maxId, List excludeKeyPatterns, List conditions, int limit) throws SQLException { long start = System.currentTimeMillis(); - List result = new ArrayList(); + List result = new ArrayList<>(); long dataTotal = 0, bdataTotal = 0; PreparedStatement stmt = null; String fields; @@ -496,7 +483,7 @@ public List query(Connection connection, RDBTableMetaData tmd, String mi maxId, excludeKeyPatterns, conditions, limit); String resultRange = ""; - if (result.size() > 0) { + if (!result.isEmpty()) { resultRange = String.format(" Result range: '%s'...'%s'.", result.get(0).getId(), result.get(result.size() - 1).getId()); } @@ -542,8 +529,7 @@ public long getLong(Connection connection, RDBTableMetaData tmd, String aggregat long elapsed = System.currentTimeMillis() - start; String params = String.format("params minid '%s' maxid '%s' excludeKeyPatterns %s conditions %s.", minId, maxId, excludeKeyPatterns, conditions); - LOG.debug("Aggregate query " + selector + " on " + tmd.getName() + " with " + params + " -> " + result + ", took " - + elapsed + "ms"); + LOG.debug("Aggregate query {} on {} with {} -> {}, took {}ms", selector, tmd.getName(), params, result, elapsed); } } } @@ -556,9 +542,9 @@ public Iterator queryAsIterator(RDBConnectionHandler ch, RDBTableMetaDat private class ResultSetIterator implements Iterator, Closeable { - private RDBConnectionHandler ch; + private final RDBConnectionHandler ch; private Connection connection; - private RDBTableMetaData tmd; + private final RDBTableMetaData tmd; private PreparedStatement stmt; private ResultSet rs; private RDBRow next = null; @@ -566,7 +552,7 @@ private class ResultSetIterator implements Iterator, Closeable { private long elapsed = 0; private String message = null; private long cnt = 0; - private long pstart; + private final long pstart; public ResultSetIterator(RDBConnectionHandler ch, RDBTableMetaData tmd, String minId, String maxId, List excludeKeyPatterns, List conditions, int limit, String sortBy) throws SQLException { @@ -659,19 +645,18 @@ private RDBRow internalNext() { } @Override - public void close() throws IOException { + public void close() { internalClose(); } - @SuppressWarnings("deprecation") @Override public void finalize() throws Throwable { try { if (this.connection != null) { if (this.callstack != null) { - LOG.error("finalizing unclosed " + this + "; check caller", this.callstack); + LOG.error("finalizing unclosed {}; check caller", this, this.callstack); } else { - LOG.error("finalizing unclosed " + this); + LOG.error("finalizing unclosed {}", this); } } } finally { @@ -685,7 +670,7 @@ private void internalClose() { this.ch.closeConnection(this.connection); this.connection = null; if (LOG.isDebugEnabled()) { - LOG.debug(this.message + " -> " + this.cnt + " results in " + elapsed + "ms"); + LOG.debug("{} -> {} results in {}ms", this.message, this.cnt, elapsed); } } } @@ -697,17 +682,17 @@ private PreparedStatement prepareQuery(Connection connection, RDBTableMetaData t StringBuilder selectClause = new StringBuilder(); if (limit != Integer.MAX_VALUE && this.dbInfo.getFetchFirstSyntax() == FETCHFIRSTSYNTAX.TOP) { - selectClause.append("TOP " + limit + " "); + selectClause.append("TOP ").append(limit).append(" "); } - selectClause.append(columns + " from " + tmd.getName()); + selectClause.append(columns).append(" from ").append(tmd.getName()); String whereClause = buildWhereClause(minId, maxId, excludeKeyPatterns, conditions); StringBuilder query = new StringBuilder(); query.append("select ").append(selectClause); - if (whereClause.length() != 0) { + if (!whereClause.isEmpty()) { query.append(" where ").append(whereClause); } @@ -718,10 +703,10 @@ private PreparedStatement prepareQuery(Connection connection, RDBTableMetaData t if (limit != Integer.MAX_VALUE) { switch (this.dbInfo.getFetchFirstSyntax()) { case LIMIT: - query.append(" LIMIT " + limit); + query.append(" LIMIT ").append(limit); break; case FETCHFIRST: - query.append(" FETCH FIRST " + limit + " ROWS ONLY"); + query.append(" FETCH FIRST ").append(limit).append(" ROWS ONLY"); break; default: break; @@ -753,7 +738,7 @@ private PreparedStatement prepareQuery(Connection connection, RDBTableMetaData t public List read(Connection connection, RDBTableMetaData tmd, Collection allKeys) throws SQLException { - List rows = new ArrayList(); + List rows = new ArrayList<>(); for (List keys : IterableUtils.partition(allKeys, RDBJDBCTools.MAX_IN_CLAUSE)) { long pstart = PERFLOG.start(PERFLOG.isDebugEnabled() ? ("reading: " + keys) : null); @@ -797,7 +782,7 @@ public List read(Connection connection, RDBTableMetaData tmd, Collection modcount, modified, (data == null ? 0 : data.length()), (bdata == null ? 0 : bdata.length)); } } catch (SQLException ex) { - LOG.debug("attempting to read " + keys, ex); + LOG.debug("attempting to read {}", keys, ex); PERFLOG.end(pstart, 10, "read: table={} -> exception={}", tmd.getName(), ex.getMessage()); // DB2 throws an SQLException for invalid keys; handle this more @@ -847,7 +832,7 @@ public RDBRow read(Connection connection, RDBTableMetaData tmd, String id, long // or the database does not support CASE in SELECT sql.append("DATA, BDATA "); } - sql.append("from " + tmd.getName() + " where ID = ?"); + sql.append("from ").append(tmd.getName()).append(" where ID = ?"); PreparedStatement stmt = connection.prepareStatement(sql.toString()); ResultSet rs = null; @@ -886,7 +871,7 @@ public RDBRow read(Connection connection, RDBTableMetaData tmd, String id, long return null; } } catch (SQLException ex) { - LOG.debug("attempting to read " + id + " (id length is " + id.length() + ")", ex); + LOG.debug("attempting to read {} (id length is {})", id, id.length(), ex); PERFLOG.end(pstart, 10, "read: table={}, id={}, lastmodcount={}, lastmodified={} -> exception={}", tmd.getName(), id, lastmodcount, lastmodified, ex.getMessage()); @@ -912,15 +897,13 @@ public boolean update(Connection connection, RDBTableMetaData tmd, String id, Lo Boolean deletedOnce, Long modcount, Long cmodcount, Long oldmodcount, String data) throws SQLException { StringBuilder t = new StringBuilder(); - t.append("update " + tmd.getName() + " set "); - t.append("MODIFIED = ?, HASBINARY = ?, DELETEDONCE = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = ?, DATA = ?, " - + (tmd.hasVersion() ? (" VERSION = " + SCHEMAVERSION + ", ") : "") + "BDATA = ? "); + t.append("update ").append(tmd.getName()).append(" set "); + t.append("MODIFIED = ?, HASBINARY = ?, DELETEDONCE = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = ?, DATA = ?, ").append(tmd.hasVersion() ? (" VERSION = " + SCHEMAVERSION + ", ") : "").append("BDATA = ? "); t.append("where ID = ?"); if (oldmodcount != null) { t.append(" and MODCOUNT = ?"); } - PreparedStatement stmt = connection.prepareStatement(t.toString()); - try { + try (PreparedStatement stmt = connection.prepareStatement(t.toString())) { int si = 1; stmt.setObject(si++, modified, Types.BIGINT); stmt.setObject(si++, hasBinaryAsNullOrInteger(hasBinary), Types.SMALLINT); @@ -945,17 +928,15 @@ public boolean update(Connection connection, RDBTableMetaData tmd, String id, Lo } int result = stmt.executeUpdate(); if (result != 1) { - LOG.debug("DB update failed for " + tmd.getName() + "/" + id + " with oldmodcount=" + oldmodcount); + LOG.debug("DB update failed for {}/{} with oldmodcount={}", tmd.getName(), id, oldmodcount); } return result == 1; - } finally { - stmt.close(); } } private final static Map INDEXED_PROP_MAPPING; static { - Map tmp = new HashMap(); + Map tmp = new HashMap<>(); tmp.put(MODIFIED, "MODIFIED"); tmp.put(NodeDocument.HAS_BINARY_FLAG, "HASBINARY"); tmp.put(NodeDocument.DELETED_ONCE, "DELETEDONCE"); @@ -968,17 +949,7 @@ public boolean update(Connection connection, RDBTableMetaData tmd, String id, Lo private final static Set SUPPORTED_OPS; static { - Set tmp = new HashSet(); - tmp.add(">="); - tmp.add(">"); - tmp.add("<="); - tmp.add("<"); - tmp.add("="); - tmp.add("in"); - tmp.add("is null"); - tmp.add("is not null"); - tmp.add("null or <"); - SUPPORTED_OPS = Collections.unmodifiableSet(tmp); + SUPPORTED_OPS = Set.of(">=", ">", "<=", "<", "=", "in", "is null", "is not null", "null or <"); } // some DBs do not accept null character as string @@ -1029,7 +1000,7 @@ private static String buildWhereClause(String minId, String maxId, List } result.append(column).append(" ").append(realOperand); - List operands = cond.getOperands(); + List operands = cond.getOperands(); if (operands.size() == 1) { result.append(" ?"); } else if (operands.size() > 1) { @@ -1072,7 +1043,7 @@ private static void setIdInStatement(RDBTableMetaData tmd, PreparedStatement stm stmt.setString(idx, id); } } catch (IOException ex) { - LOG.warn("Invalid ID: " + id, ex); + LOG.warn("Invalid ID: {}", id, ex); throw asDocumentStoreException(ex, "Invalid ID: " + id); } } @@ -1093,13 +1064,13 @@ private static long readLongFromResultSet(ResultSet res, int index) throws SQLEx @Nullable private static Boolean readBooleanOrNullFromResultSet(ResultSet res, int index) throws SQLException { long v = res.getLong(index); - return res.wasNull() ? null : Boolean.valueOf(v != 0); + return res.wasNull() ? null : v != 0; } @Nullable private static Long readLongOrNullFromResultSet(ResultSet res, int index) throws SQLException { long v = res.getLong(index); - return res.wasNull() ? null : Long.valueOf(v); + return res.wasNull() ? null : v; } private static final Integer INT_FALSE = 0; @@ -1107,7 +1078,7 @@ private static Long readLongOrNullFromResultSet(ResultSet res, int index) throws @Nullable private static Integer deletedOnceAsNullOrInteger(Boolean b) { - return b == null ? null : (b.booleanValue() ? INT_TRUE : INT_FALSE); + return b == null ? null : (b ? INT_TRUE : INT_FALSE); } @Nullable @@ -1116,8 +1087,8 @@ private static Integer hasBinaryAsNullOrInteger(Number n) { } private static List sortDocuments(Collection documents) { - List result = new ArrayList(documents); - Collections.sort(result, (o1, o2) -> Objects.toString(o1.getId(), "").compareTo(Objects.toString(o2.getId(), ""))); + List result = new ArrayList<>(documents); + result.sort(Comparator.comparing(o -> Objects.toString(o.getId(), ""))); return result; } } diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBExport.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBExport.java index 12f252f3856..728caedfa33 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBExport.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBExport.java @@ -62,11 +62,11 @@ public class RDBExport { private enum Format { JSON, JSONARRAY, CSV - }; + } private static final RDBJSONSupport JSON = new RDBJSONSupport(false); - private static final Set EXCLUDE_COLUMNS = new HashSet(); + private static final Set EXCLUDE_COLUMNS = new HashSet<>(); static { EXCLUDE_COLUMNS.add(Document.ID); } @@ -135,7 +135,7 @@ public static void main(String[] args) throws ClassNotFoundException, SQLExcepti // JSON output with fieldList missing "_id" if ((format == Format.JSON || format == Format.JSONARRAY) && !fieldList.isEmpty() && !fieldList.contains("_id")) { - fieldList = new ArrayList(fieldList); + fieldList = new ArrayList<>(fieldList); fieldList.add(0, "_id"); } @@ -200,7 +200,7 @@ private static void dumpFile(String filename, String lobdir, Format format, Prin String sbdata = fields.get(iBData); byte[] bytes = null; - if (sbdata.length() != 0) { + if (!sbdata.isEmpty()) { String lobfile = sbdata.replace("/", ""); if (!lobfile.endsWith(".lob")) { @@ -212,8 +212,8 @@ private static void dumpFile(String filename, String lobdir, Format format, Prin lobfile = lobfile.substring(0, lastdot); System.err.println("lastdot: " + lastdot + "; length: " + length + "; lobfile: " + lobfile + "; lastdot: " + lastdot + "; startpos: " + startpos); - int s = Integer.valueOf(startpos); - int l = Integer.valueOf(length); + int s = Integer.parseInt(startpos); + int l = Integer.parseInt(length); File lf = new File(lobDirectory, lobfile); InputStream is = new FileInputStream(lf); bytes = new byte[l]; @@ -227,7 +227,7 @@ private static void dumpFile(String filename, String lobdir, Format format, Prin } try { RDBRow row = new RDBRow(id, "1".equals(shasbinary) ? 1L : 0L, "1".equals(sdeletedonce), - smodified.length() == 0 ? 0 : Long.parseLong(smodified), Long.parseLong(smodcount), + smodified.isEmpty() ? 0 : Long.parseLong(smodified), Long.parseLong(smodcount), Long.parseLong(scmodcount), -1L, -1L, -1L, sdata, bytes); StringBuilder fulljson = dumpRow(ser, id, row); if (format == Format.CSV) { @@ -255,7 +255,7 @@ private static void dumpFile(String filename, String lobdir, Format format, Prin } protected static List parseDel(String line) { - ArrayList result = new ArrayList(); + ArrayList result = new ArrayList<>(); boolean inQuoted = false; char quotechar = '"'; @@ -358,13 +358,13 @@ private static void dumpJDBC(String url, String user, String pw, String table, S @Nullable private static Boolean readBooleanOrNullFromResultSet(ResultSet res, String field) throws SQLException { long v = res.getLong(field); - return res.wasNull() ? null : Boolean.valueOf(v != 0); + return res.wasNull() ? null : v != 0; } @Nullable private static Long readLongOrNullFromResultSet(ResultSet res, String field) throws SQLException { long v = res.getLong(field); - return res.wasNull() ? null : Long.valueOf(v); + return res.wasNull() ? null : v; } @NotNull @@ -448,7 +448,7 @@ private static void dumpJsonValuetoCsv(StringBuilder buf, Object o) { if (o == null) { buf.append("null"); } else if (o instanceof Boolean) { - buf.append(o.toString()); + buf.append(o); } else if (o instanceof Long) { buf.append(((Long) o).longValue()); } else { @@ -473,24 +473,24 @@ private static void printUsage() { private static void printHelp() { System.err.println("Export Apache OAK RDB data to JSON files"); - System.err.println(""); + System.err.println(); System.err.println("Generic options:"); System.err.println(" --help produce this help message"); System.err.println(" --version show version information"); - System.err.println(""); + System.err.println(); System.err.println("JDBC options:"); System.err.println(" -j/--jdbc-url JDBC-URL JDBC URL of database to connect to"); System.err.println(" -u/--username username database username"); System.err.println(" -p/--password password database password"); System.err.println(" -c/--collection table table name (defaults to 'nodes')"); System.err.println(" -q/--query query SQL where clause (minus 'where')"); - System.err.println(""); + System.err.println(); System.err.println("Dump file options:"); System.err.println(" --columns column-names column names (comma separated)"); System.err.println(" --from-db2-dump file name of DB2 DEL export file"); System.err.println(" --lobdir dir name of DB2 DEL export file LOB directory"); System.err.println(" (defaults to ./lobdir under the dump file)"); - System.err.println(""); + System.err.println(); System.err.println("Output options:"); System.err.println(" -o/--out file Output to name file (instead of stdout)"); System.err.println(" --jsonArray Output a JSON array (instead of one"); diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBHelper.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBHelper.java index 4f761396c9e..ea0f9bb30f5 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBHelper.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBHelper.java @@ -30,7 +30,7 @@ */ public class RDBHelper { - private static String[] DATABASES = { "Apache Derby", "DB2", "H2", "Microsoft SQL Server", "MySQL", "Oracle", "PostgreSQL", + private static final String[] DATABASES = { "Apache Derby", "DB2", "H2", "Microsoft SQL Server", "MySQL", "Oracle", "PostgreSQL", "default" }; public static void main(String[] args) { diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBJDBCTools.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBJDBCTools.java index 45bdde190c7..c9162c1bbf3 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBJDBCTools.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBJDBCTools.java @@ -105,7 +105,7 @@ protected static String driverForDBType(String type) { throws IllegalArgumentException { String p = checkLegalTableName(prefix); String b = checkLegalTableName(basename); - if (p.length() != 0 && !p.endsWith("_")) { + if (!p.isEmpty() && !p.endsWith("_")) { p += "_"; } return p + b; @@ -153,7 +153,7 @@ protected static String dumpResultSetMeta(ResultSetMetaData met) { try { StringBuilder sb = new StringBuilder(); sb.append(String.format("%s.%s: ", met.getSchemaName(1).trim(), met.getTableName(1).trim())); - Map types = new TreeMap(); + Map types = new TreeMap<>(); for (int i = 1; i <= met.getColumnCount(); i++) { if (i > 1) { sb.append(", "); @@ -162,7 +162,7 @@ protected static String dumpResultSetMeta(ResultSetMetaData met) { dumpColumnMeta(met.getColumnName(i), met.getColumnType(i), met.getColumnTypeName(i), met.getPrecision(i))); types.put(met.getColumnTypeName(i), met.getColumnType(i)); } - sb.append(" /* " + types.toString() + " */"); + sb.append(" /* ").append(types).append(" */"); return sb.toString(); } catch (SQLException ex) { return "Column metadata unavailable: " + ex.getMessage(); @@ -173,7 +173,7 @@ protected static String dumpResultSetMeta(ResultSetMetaData met) { * Return a string containing additional messages from chained exceptions. */ protected static @NotNull String getAdditionalMessages(SQLException ex) { - List messages = new ArrayList(); + List messages = new ArrayList<>(); String message = ex.getMessage(); SQLException next = ex.getNextException(); while (next != null) { @@ -229,8 +229,7 @@ protected static String versionCheck(DatabaseMetaData md, int dbmax, int dbmin, int min = md.getDatabaseMinorVersion(); if (maj < dbmax || (maj == dbmax && min < dbmin)) { - result.append( - "Unsupported " + dbname + " version: " + maj + "." + min + ", expected at least " + dbmax + "." + dbmin); + result.append("Unsupported ").append(dbname).append(" version: ").append(maj).append(".").append(min).append(", expected at least ").append(dbmax).append(".").append(dbmin); } } @@ -242,8 +241,7 @@ protected static String versionCheck(DatabaseMetaData md, int dbmax, int dbmin, if (result.length() != 0) { result.append(", "); } - result.append("Unsupported " + dbname + " driver version: " + md.getDriverName() + " " + maj + "." + min - + ", expected at least " + drmax + "." + drmin); + result.append("Unsupported ").append(dbname).append(" driver version: ").append(md.getDriverName()).append(" ").append(maj).append(".").append(min).append(", expected at least ").append(drmax).append(".").append(drmin); } } @@ -309,8 +307,7 @@ public interface PreparedStatementComponent { * @return a string suitable for inclusion into a * {@link PreparedStatement} */ - @NotNull - public String getStatementComponent(); + @NotNull String getStatementComponent(); /** * Set the parameters need by the statement component returned by @@ -323,14 +320,14 @@ public interface PreparedStatementComponent { * @return index of next parameter to set * @throws SQLException */ - public int setParameters(PreparedStatement stmt, int startIndex) throws SQLException; + int setParameters(PreparedStatement stmt, int startIndex) throws SQLException; } /** * Appends following SQL condition to the builder: {@code ID in (?,?,?)}. - * The field name {@code ID} and the number of place holders is - * configurable. If the number of place holders is greater than - * {@code maxListLength}, then the condition will have following form: + * The field name {@code ID} and the number of placeholders is + * configurable. If the number of placeholders is greater than + * {@code maxListLength}, then the condition will have the following form: * {@code (ID in (?,?,?) or ID in (?,?,?) or ID in (?,?))} * * @param builder @@ -417,7 +414,7 @@ public int setParameters(PreparedStatement stmt, int startIndex) throws SQLExcep stmt.setString(startIndex++, value); } } catch (IOException ex) { - LOG.warn("Invalid ID: " + value, ex); + LOG.warn("Invalid ID: {}", value, ex); throw asDocumentStoreException(ex, "Invalid ID: " + value); } } diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBJSONSupport.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBJSONSupport.java index 4ecaaba7cac..92de4c9daca 100755 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBJSONSupport.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBJSONSupport.java @@ -20,6 +20,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.TreeMap; import org.apache.jackrabbit.oak.commons.LongUtils; @@ -45,8 +46,8 @@ *

  • a {@link Map}, mapping member names to representations. * *

    - * The boolean parameter of the constructor ({link - * {@link #RDBJSONSupport(boolean)}) allows changing the default for the maps to + * The boolean parameter of the constructor + * ({@link #RDBJSONSupport(boolean)}) allows changing the default for the maps to * use sorted maps using {@link Revision}s as keys, as used internally be the * {@link DocumentNodeStore}. */ @@ -86,20 +87,13 @@ public Object parse(@NotNull JsopTokenizer json) { case JsopReader.NUMBER: String t = json.getToken(); Long parsed = LongUtils.tryParse(t); - if (parsed != null) { - return parsed; - } else { - return Double.parseDouble(t); - } + return Objects.requireNonNullElseGet(parsed, () -> Double.parseDouble(t)); case JsopReader.STRING: return json.getToken(); case '{': if (useRevisionMaps) { - Map map = new TreeMap(StableRevisionComparator.REVERSE); - while (true) { - if (json.matches('}')) { - break; - } + Map map = new TreeMap<>(StableRevisionComparator.REVERSE); + while (!json.matches('}')) { String k = json.readString(); if (k == null) { throw new IllegalArgumentException("unexpected null revision"); @@ -110,11 +104,8 @@ public Object parse(@NotNull JsopTokenizer json) { } return map; } else { - Map map = new HashMap(); - while (true) { - if (json.matches('}')) { - break; - } + Map map = new HashMap<>(); + while (!json.matches('}')) { String k = json.readString(); if (k == null) { throw new IllegalArgumentException("unexpected null key"); @@ -126,11 +117,8 @@ public Object parse(@NotNull JsopTokenizer json) { return map; } case '[': - List list = new ArrayList(); - while (true) { - if (json.matches(']')) { - break; - } + List list = new ArrayList<>(); + while (!json.matches(']')) { list.add(parse(json)); json.matches(','); } @@ -169,9 +157,9 @@ public static void appendJsonValue(StringBuilder sb, Object value) { if (value == null) { sb.append("null"); } else if (value instanceof Number) { - sb.append(value.toString()); + sb.append(value); } else if (value instanceof Boolean) { - sb.append(value.toString()); + sb.append(value); } else if (value instanceof String) { appendJsonString(sb, (String) value); } else if (value instanceof Map) { diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBMissingLastRevSeeker.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBMissingLastRevSeeker.java index c9d9829d27e..275c3ae95de 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBMissingLastRevSeeker.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBMissingLastRevSeeker.java @@ -44,10 +44,8 @@ public class RDBMissingLastRevSeeker extends MissingLastRevSeeker { private static final int DEFAULTMODE = 2; private static final int MODE = SystemPropertySupplier.create(RDBMissingLastRevSeeker.class.getName() + ".MODE", DEFAULTMODE) - .loggingTo(LOG).validateWith(value -> (value == 1 || value == 2)).formatSetMessage((name, value) -> { - return String.format("Strategy for %s set to %s (via system property %s)", RDBMissingLastRevSeeker.class.getName(), - name, value); - }).get(); + .loggingTo(LOG).validateWith(value -> (value == 1 || value == 2)).formatSetMessage((name, value) -> String.format("Strategy for %s set to %s (via system property %s)", RDBMissingLastRevSeeker.class.getName(), + name, value)).get(); private final RDBDocumentStore store; @@ -59,7 +57,7 @@ public RDBMissingLastRevSeeker(RDBDocumentStore store, Clock clock) { @Override @NotNull public Iterable getCandidates(final long startTime) { - LOG.debug("Running getCandidates() in mode " + MODE); + LOG.debug("Running getCandidates() in mode {}", MODE); if (MODE == 1) { return super.getCandidates(startTime); } else { diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBRow.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBRow.java index 9d638a04d4d..60c569c8960 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBRow.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBRow.java @@ -20,7 +20,7 @@ import org.jetbrains.annotations.Nullable; /** - * Container for the information in a RDB database column. + * Container for the information in an RDB database column. *

    * Note that the String "data" and the byte[] "bdata" may be {@code null} when * the SQL SELECT request was conditional on "modcount" being unchanged. @@ -43,12 +43,12 @@ public RDBRow(String id, Long hasBinaryProperties, Boolean deletedOnce, Long mod this.id = id; this.hasBinaryProperties = hasBinaryProperties; this.deletedOnce = deletedOnce; - this.modified = modified != null ? modified.longValue() : LONG_UNSET; - this.modcount = modcount != null ? modcount.longValue() : LONG_UNSET; - this.cmodcount = cmodcount != null ? cmodcount.longValue() : LONG_UNSET; - this.schemaVersion = schemaVersion != null ? schemaVersion.longValue() : LONG_UNSET; - this.sdType = sdType != null ? sdType.longValue() : LONG_UNSET; - this.sdMaxRevTime = sdMaxRevTime != null ? sdMaxRevTime.longValue() : LONG_UNSET; + this.modified = modified != null ? modified : LONG_UNSET; + this.modcount = modcount != null ? modcount : LONG_UNSET; + this.cmodcount = cmodcount != null ? cmodcount : LONG_UNSET; + this.schemaVersion = schemaVersion != null ? schemaVersion : LONG_UNSET; + this.sdType = sdType != null ? sdType : LONG_UNSET; + this.sdMaxRevTime = sdMaxRevTime != null ? sdMaxRevTime : LONG_UNSET; this.data = data; this.bdata = bdata; } @@ -115,7 +115,6 @@ public long getSdMaxRevTime() { return sdMaxRevTime; } - @Nullable public byte[] getBdata() { return bdata; } diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBVersionGCSupport.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBVersionGCSupport.java index 919a212113a..863d2e1eb11 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBVersionGCSupport.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBVersionGCSupport.java @@ -16,8 +16,6 @@ */ package org.apache.jackrabbit.oak.plugins.document.rdb; -import java.io.Closeable; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -53,7 +51,7 @@ public class RDBVersionGCSupport extends VersionGCSupport { private static final Logger LOG = LoggerFactory.getLogger(RDBVersionGCSupport.class); - private RDBDocumentStore store; + private final RDBDocumentStore store; // 1: seek using historical, paging mode // 2: use custom single query directly using RDBDocumentStore API @@ -71,7 +69,7 @@ public RDBVersionGCSupport(RDBDocumentStore store) { @Override public Iterable getPossiblyDeletedDocs(final long fromModified, final long toModified) { - List conditions = new ArrayList(); + List conditions = new ArrayList<>(); conditions.add(new QueryCondition(NodeDocument.DELETED_ONCE, "=", 1)); conditions.add(new QueryCondition(NodeDocument.MODIFIED_IN_SECS, "<", NodeDocument.getModifiedInSecs(toModified))); conditions.add(new QueryCondition(NodeDocument.MODIFIED_IN_SECS, ">=", NodeDocument.getModifiedInSecs(fromModified))); @@ -102,7 +100,7 @@ private Iterable getSplitDocuments() { private Iterable identifyGarbageMode1(final Set gcTypes, final RevisionVector sweepRevs, final long oldestRevTimeStamp) { - return IterableUtils.filter(getSplitDocuments(), getGarbageCheckPredicate(gcTypes, sweepRevs, oldestRevTimeStamp)::test); + return IterableUtils.filter(getSplitDocuments(), getGarbageCheckPredicate(gcTypes, sweepRevs, oldestRevTimeStamp)); } private Predicate getGarbageCheckPredicate(final Set gcTypes, final RevisionVector sweepRevs, @@ -126,7 +124,7 @@ private Iterable identifyGarbageMode2(final Set gcTy gcTypeCodes.add(type.typeCode()); } - List conditions1 = new ArrayList(); + List conditions1 = new ArrayList<>(); conditions1.add(new QueryCondition(NodeDocument.SD_TYPE, "in", gcTypeCodes)); conditions1.add(new QueryCondition(NodeDocument.SD_MAX_REV_TIME_IN_SECS, "<=", NodeDocument.getModifiedInSecs(oldestRevTimeStamp))); conditions1.add(new QueryCondition(RDBDocumentStore.VERSIONPROP, ">=", 2)); @@ -134,7 +132,7 @@ private Iterable identifyGarbageMode2(final Set gcTy it1 = store.queryAsIterable(Collection.NODES, null, null, Collections.emptyList(), conditions1, Integer.MAX_VALUE, null); - List conditions2 = new ArrayList(); + List conditions2 = new ArrayList<>(); conditions2.add(new QueryCondition(RDBDocumentStore.VERSIONPROP, "null or <", 2)); it2 = store.queryAsIterable(Collection.NODES, null, null, excludeKeyPatterns, conditions2, Integer.MAX_VALUE, null); @@ -155,31 +153,28 @@ private Iterable identifyGarbageMode2(final Set gcTy Predicate pred = getGarbageCheckPredicate(gcTypes, sweepRevs, oldestRevTimeStamp); - final CountingPredicate cp1 = new CountingPredicate(name1, pred); - final CountingPredicate cp2 = new CountingPredicate(name2, pred); - - return CloseableIterable.wrap(IterableUtils.chainedIterable(IterableUtils.filter(fit1, cp1::test), IterableUtils.filter(fit2, cp2::test)), - new Closeable() { - @Override - public void close() throws IOException { - Utils.closeIfCloseable(fit1); - Utils.closeIfCloseable(fit2); - if (LOG.isDebugEnabled()) { - String stats1 = cp1.getStats(); - String stats2 = cp2.getStats(); - String message = ""; - if (!stats1.isEmpty()) { - message = stats1; - } - if (!stats2.isEmpty()) { - if (!message.isEmpty()) { - message += ", "; - } - message += stats2; - } + final CountingPredicate cp1 = new CountingPredicate<>(name1, pred); + final CountingPredicate cp2 = new CountingPredicate<>(name2, pred); + + return CloseableIterable.wrap(IterableUtils.chainedIterable(IterableUtils.filter(fit1, cp1), IterableUtils.filter(fit2, cp2)), + () -> { + Utils.closeIfCloseable(fit1); + Utils.closeIfCloseable(fit2); + if (LOG.isDebugEnabled()) { + String stats1 = cp1.getStats(); + String stats2 = cp2.getStats(); + String message = ""; + if (!stats1.isEmpty()) { + message = stats1; + } + if (!stats2.isEmpty()) { if (!message.isEmpty()) { - LOG.debug(message); + message += ", "; } + message += stats2; + } + if (!message.isEmpty()) { + LOG.debug(message); } } }); @@ -239,39 +234,34 @@ public long getDeletedOnceCount() { } private Iterable getIterator(final List excludeKeyPatterns, final List conditions) { - return new Iterable() { + return () -> new AbstractIterator<>() { + + private static final int BATCH_SIZE = 100; + private String startId = NodeDocument.MIN_ID_VALUE; + private Iterator batch = nextBatch(); + @Override - public Iterator iterator() { - return new AbstractIterator() { - - private static final int BATCH_SIZE = 100; - private String startId = NodeDocument.MIN_ID_VALUE; - private Iterator batch = nextBatch(); - - @Override - protected NodeDocument computeNext() { - // read next batch if necessary - if (!batch.hasNext()) { - batch = nextBatch(); - } - - NodeDocument doc; - if (batch.hasNext()) { - doc = batch.next(); - // remember current id - startId = doc.getId(); - } else { - doc = endOfData(); - } - return doc; - } + protected NodeDocument computeNext() { + // read next batch if necessary + if (!batch.hasNext()) { + batch = nextBatch(); + } - private Iterator nextBatch() { - List result = store.query(Collection.NODES, startId, NodeDocument.MAX_ID_VALUE, - excludeKeyPatterns, conditions, BATCH_SIZE); - return result.iterator(); - } - }; + NodeDocument doc; + if (batch.hasNext()) { + doc = batch.next(); + // remember current id + startId = doc.getId(); + } else { + doc = endOfData(); + } + return doc; + } + + private Iterator nextBatch() { + List result = store.query(Collection.NODES, startId, NodeDocument.MAX_ID_VALUE, + excludeKeyPatterns, conditions, BATCH_SIZE); + return result.iterator(); } }; } diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/package-info.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/package-info.java index 9c9c3b05ffa..9689489a011 100755 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/package-info.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/package-info.java @@ -16,10 +16,6 @@ */ /** - * Implementations of {@link DocumentStore} and {@link BlobStore} for relational databases. + * Implementations of {@link org.apache.jackrabbit.oak.plugins.document.DocumentStore} and {@link org.apache.jackrabbit.oak.spi.blob.BlobStore} for relational databases. */ package org.apache.jackrabbit.oak.plugins.document.rdb; - -import org.apache.jackrabbit.oak.plugins.document.DocumentStore; -import org.apache.jackrabbit.oak.spi.blob.BlobStore; - diff --git a/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBCTest.java b/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBCTest.java index 985bd763ffe..14248f53be7 100755 --- a/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBCTest.java +++ b/oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBCTest.java @@ -53,6 +53,7 @@ import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.QueryCondition; import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.RDBTableMetaData; import org.apache.jackrabbit.oak.plugins.document.util.Utils; +import org.jetbrains.annotations.NotNull; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; @@ -289,13 +290,13 @@ public MyConnectionHandler(DataSource ds) { } @Override - public Connection getROConnection() throws SQLException { + public @NotNull Connection getROConnection() throws SQLException { cnt.incrementAndGet(); return super.getROConnection(); } @Override - public Connection getRWConnection() throws SQLException { + public @NotNull Connection getRWConnection() throws SQLException { throw new RuntimeException(); } From 42818c0f29fc925a267e9a43afcb8afd4ce33bf2 Mon Sep 17 00:00:00 2001 From: Julian Reschke Date: Mon, 6 Oct 2025 15:28:50 +0100 Subject: [PATCH 2/2] OAK-11967: RDBDocumentStore: code cleanup --- .../oak/plugins/document/rdb/RDBDocumentStoreJDBC.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBC.java b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBC.java index 760228a5b64..c3c12ab4473 100644 --- a/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBC.java +++ b/oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStoreJDBC.java @@ -159,7 +159,8 @@ public int delete(Connection connection, RDBTableMetaData tmd, List allI public int delete(Connection connection, RDBTableMetaData tmd, Map toDelete) throws SQLException { - try (PreparedStatement stmt = connection.prepareStatement("delete from " + tmd.getName() + " where ID=? and MODIFIED=?")) { + String statement = "delete from " + tmd.getName() + " where ID=? and MODIFIED=?"; + try (PreparedStatement stmt = connection.prepareStatement(statement)) { for (Entry entry : toDelete.entrySet()) { setIdInStatement(tmd, stmt, 1, entry.getKey()); stmt.setLong(2, entry.getValue()); @@ -327,9 +328,10 @@ public Set update(Connection connection, RDBTableMe List modCounts = LOG.isTraceEnabled() ? new ArrayList<>() : null; int[] batchResults = new int[0]; - try (PreparedStatement stmt = connection.prepareStatement("update " + tmd.getName() + String statement = "update " + tmd.getName() + " set MODIFIED = ?, HASBINARY = ?, DELETEDONCE = ?, MODCOUNT = ?, CMODCOUNT = ?, DSIZE = ?, DATA = ?, " - + (tmd.hasVersion() ? (" VERSION = " + SCHEMAVERSION + ", ") : "") + "BDATA = ? where ID = ? and MODCOUNT = ?")) { + + (tmd.hasVersion() ? (" VERSION = " + SCHEMAVERSION + ", ") : "") + "BDATA = ? where ID = ? and MODCOUNT = ?"; + try (PreparedStatement stmt = connection.prepareStatement(statement)) { boolean batchIsEmpty = true; for (T document : sortDocuments(documents)) { Long modcount = (Long) document.get(MODCOUNT);