This repository was archived by the owner on Jun 23, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 10
First support for secondary metadata query. #114
Open
aptls
wants to merge
4
commits into
master
Choose a base branch
from
aptls-metadata
base: master
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Changes from all commits
Commits
Show all changes
4 commits
Select commit
Hold shift + click to select a range
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -128,6 +128,8 @@ private static boolean isNullOrEmptyString(String str) { | |
| private String everyDocIdSql; | ||
| private String singleDocContentSql; | ||
| private MetadataColumns metadataColumns; | ||
| private String extraMetadataSql; | ||
| private MetadataColumns extraMetadataColumns; | ||
| private ResponseGenerator respGenerator; | ||
| private String aclSql; | ||
| private String aclPrincipalDelimiter; | ||
|
|
@@ -150,6 +152,9 @@ public void initConfig(Config config) { | |
| // when set to true, if "db.metadataColumns" is blank, it will use all | ||
| // returned columns as metadata. | ||
| config.addKey("db.includeAllColumnsAsMetadata", "false"); | ||
| config.addKey("db.extraMetadataSql", ""); | ||
| config.addKey("db.extraMetadataSqlParameters", ""); | ||
| config.addKey("db.extraMetadataColumns", ""); | ||
| config.addKey("db.modeOfOperation", null); | ||
| config.addKey("db.updateSql", ""); | ||
| config.addKey("db.aclSql", ""); | ||
|
|
@@ -234,6 +239,27 @@ public void init(AdaptorContext context) throws Exception { | |
| } | ||
| } | ||
|
|
||
| if (!cfg.getValue("db.extraMetadataSql").isEmpty()) { | ||
| extraMetadataSql = cfg.getValue("db.extraMetadataSql"); | ||
| log.config("extra metadata sql: " + extraMetadataSql); | ||
| String extraMetadataColumnsConfig = | ||
| cfg.getValue("db.extraMetadataColumns"); | ||
| if (extraMetadataColumnsConfig.isEmpty()) { | ||
| extraMetadataColumns = null; | ||
| log.config("extra metadata columns: Use all columns in ResultSet"); | ||
| } else { | ||
| extraMetadataColumns = new MetadataColumns(extraMetadataColumnsConfig); | ||
| log.config("extra metadata columns: " + extraMetadataColumns); | ||
| } | ||
| } else { | ||
| if (!cfg.getValue("db.extraMetadataColumns").isEmpty()) { | ||
| ignored.add("db.extraMetadataColumns"); | ||
| } | ||
| if (!cfg.getValue("db.extraMetadataSqlParameters").isEmpty()) { | ||
| ignored.add("db.extraMetadataSqlParameters"); | ||
| } | ||
| } | ||
|
|
||
| modeOfOperation = cfg.getValue("db.modeOfOperation"); | ||
| log.config("mode of operation: " + modeOfOperation); | ||
|
|
||
|
|
@@ -258,6 +284,15 @@ public void init(AdaptorContext context) throws Exception { | |
| } else if (!metadataColumns.isEmpty()) { | ||
| ignored.add("db.metadataColumns"); | ||
| } | ||
| if (!cfg.getValue("db.extraMetadataSql").isEmpty()) { | ||
| ignored.add("db.extraMetadataSql"); | ||
| } | ||
| if (!cfg.getValue("db.extraMetadataColumns").isEmpty()) { | ||
| ignored.add("db.extraMetadataColumns"); | ||
| } | ||
| if (!cfg.getValue("db.extraMetadataSqlParameters").isEmpty()) { | ||
| ignored.add("db.extraMetadataSqlParameters"); | ||
| } | ||
| } | ||
| } else if (singleDocContentSql.isEmpty()) { | ||
| throw new InvalidConfigurationException( | ||
|
|
@@ -311,7 +346,9 @@ public void init(AdaptorContext context) throws Exception { | |
| = new UniqueKey.Builder(cfg.getValue("db.uniqueKey")) | ||
| .setDocIdIsUrl(docIdIsUrl) | ||
| .setContentSqlColumns(cfg.getValue("db.singleDocContentSqlParameters")) | ||
| .setAclSqlColumns(cfg.getValue("db.aclSqlParameters")); | ||
| .setAclSqlColumns(cfg.getValue("db.aclSqlParameters")) | ||
| .setMetadataSqlColumns( | ||
| cfg.getValue("db.extraMetadataSqlParameters")); | ||
|
|
||
| // Verify all column names. | ||
| try (Connection conn = makeNewConnection()) { | ||
|
|
@@ -332,6 +369,10 @@ public void init(AdaptorContext context) throws Exception { | |
| "db.metadataColumns", metadataColumns.keySet()); | ||
| } | ||
| } | ||
| if (extraMetadataColumns != null) { | ||
| verifyColumnNames(conn, "db.extraMetadataSql", extraMetadataSql, | ||
| "db.extraMetadataColumns", extraMetadataColumns.keySet()); | ||
| } | ||
| if (respGenerator instanceof ResponseGenerator.SingleColumnContent) { | ||
| ResponseGenerator.SingleColumnContent content = | ||
| (ResponseGenerator.SingleColumnContent) respGenerator; | ||
|
|
@@ -477,6 +518,9 @@ public void getDocIds(DocIdPusher pusher) throws IOException, | |
| builder.setDeleteFromIndex(true); | ||
| } else if ("urlAndMetadataLister".equals(modeOfOperation)) { | ||
| addMetadataToRecordBuilder(builder, rs); | ||
| if (extraMetadataSql != null) { | ||
| addExtraMetadataToRecordBuilder(builder, conn, id.getUniqueId()); | ||
| } | ||
| } | ||
| DocIdPusher.Record record = builder.build(); | ||
| log.log(Level.FINEST, "doc id: {0}", id); | ||
|
|
@@ -501,15 +545,10 @@ private interface MetadataHandler { | |
| /* | ||
| * Adds all specified metadata columns to the record or response being built. | ||
| */ | ||
| private void addMetadata(MetadataHandler meta, ResultSet rs) | ||
| throws SQLException, IOException { | ||
| private void addMetadata(MetadataHandler meta, ResultSet rs, | ||
| MetadataColumns columns) throws SQLException, IOException { | ||
| ResultSetMetaData rsMetaData = rs.getMetaData(); | ||
| synchronized (this) { | ||
| if (metadataColumns == null) { | ||
| metadataColumns = new MetadataColumns(rsMetaData); | ||
| } | ||
| } | ||
| for (Map.Entry<String, String> entry : metadataColumns.entrySet()) { | ||
| for (Map.Entry<String, String> entry : columns.entrySet()) { | ||
| int index; | ||
| try { | ||
| index = rs.findColumn(entry.getKey()); | ||
|
|
@@ -633,25 +672,73 @@ private void addMetadata(MetadataHandler meta, ResultSet rs) | |
| @VisibleForTesting | ||
| void addMetadataToRecordBuilder(final DocIdPusher.Record.Builder builder, | ||
| ResultSet rs) throws SQLException, IOException { | ||
| synchronized (this) { | ||
| if (metadataColumns == null) { | ||
| metadataColumns = new MetadataColumns(rs.getMetaData()); | ||
| } | ||
| } | ||
| addMetadata( | ||
| new MetadataHandler() { | ||
| @Override public void addMetadata(String k, String v) { | ||
| builder.addMetadata(k, v); | ||
| } | ||
| }, | ||
| rs); | ||
| rs, metadataColumns); | ||
| } | ||
|
|
||
| @VisibleForTesting | ||
| void addMetadataToResponse(final Response resp, ResultSet rs) | ||
| throws SQLException, IOException { | ||
| synchronized (this) { | ||
| if (metadataColumns == null) { | ||
| metadataColumns = new MetadataColumns(rs.getMetaData()); | ||
| } | ||
| } | ||
| addMetadata( | ||
| new MetadataHandler() { | ||
| @Override public void addMetadata(String k, String v) { | ||
| resp.addMetadata(k, v); | ||
| } | ||
| }, | ||
| rs); | ||
| rs, metadataColumns); | ||
| } | ||
|
|
||
| private void addExtraMetadataToRecordBuilder( | ||
| final DocIdPusher.Record.Builder builder, Connection conn, | ||
| String uniqueId) throws SQLException, IOException { | ||
| addExtraMetadata(conn, uniqueId, | ||
| new MetadataHandler() { | ||
| @Override public void addMetadata(String k, String v) { | ||
| builder.addMetadata(k, v); | ||
| } | ||
| }); | ||
| } | ||
|
|
||
| private void addExtraMetadataToResponse(final Response resp, Connection conn, | ||
| String uniqueId) throws SQLException, IOException { | ||
| addExtraMetadata(conn, uniqueId, | ||
| new MetadataHandler() { | ||
| @Override public void addMetadata(String k, String v) { | ||
| resp.addMetadata(k, v); | ||
| } | ||
| }); | ||
| } | ||
|
|
||
| private void addExtraMetadata(Connection conn, String uniqueId, | ||
| MetadataHandler handler) throws SQLException, IOException { | ||
| try (PreparedStatement stmt = prepareExtraMetadataStatement(conn, uniqueId); | ||
| ResultSet rs = stmt.executeQuery()) { | ||
| synchronized (this) { | ||
| if (extraMetadataColumns == null) { | ||
| extraMetadataColumns = new MetadataColumns(rs.getMetaData()); | ||
| } | ||
| } | ||
| while (rs.next()) { | ||
| addMetadata(handler, rs, extraMetadataColumns); | ||
| } | ||
| } catch (SQLException ex) { | ||
| throw new IOException(ex); | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This line is untested. I'm not sure how to trigger this. A flat invalid query would fail in validation. Maybe a type mismatch in the parameter expression: |
||
| } | ||
| } | ||
|
|
||
| /** Gives the bytes of a document referenced with id. */ | ||
|
|
@@ -675,6 +762,9 @@ public void getDocContent(Request req, Response resp) throws IOException { | |
| } | ||
| // Generate response metadata first. | ||
| addMetadataToResponse(resp, rs); | ||
| if (extraMetadataSql != null) { | ||
| addExtraMetadataToResponse(resp, conn, id.getUniqueId()); | ||
| } | ||
| // Generate Acl if aclSql is provided. | ||
| if (aclSql != null) { | ||
| resp.setAcl(getAcl(conn, id.getUniqueId())); | ||
|
|
@@ -838,6 +928,14 @@ private PreparedStatement getStreamFromDb(Connection conn, | |
| return st; | ||
| } | ||
|
|
||
| private PreparedStatement prepareExtraMetadataStatement(Connection conn, | ||
| String uniqueId) throws SQLException { | ||
| PreparedStatement st = conn.prepareStatement(extraMetadataSql); | ||
| uniqueKey.setMetadataSqlValues(st, uniqueId); | ||
| log.log(Level.FINER, "about to get extra metadata: {0}", uniqueId); | ||
| return st; | ||
| } | ||
|
|
||
| /** | ||
| * Mechanism that accepts stream of DocIdPusher.Record instances, buffers | ||
| * them, and sends them when it has accumulated maximum allowed amount per | ||
|
|
@@ -921,7 +1019,7 @@ static ResponseGenerator loadResponseGenerator(Config config) { | |
| throw new InvalidConfigurationException(errmsg, ex); | ||
| } | ||
| } | ||
|
|
||
| @VisibleForTesting | ||
| static ResponseGenerator loadResponseGeneratorInternal(Method method, | ||
| Map<String, String> config) { | ||
|
|
@@ -1026,6 +1124,9 @@ public void getModifiedDocIds(DocIdPusher pusher) | |
| builder.setDeleteFromIndex(true); | ||
| } else if ("urlAndMetadataLister".equals(modeOfOperation)) { | ||
| addMetadataToRecordBuilder(builder, rs); | ||
| if (extraMetadataSql != null) { | ||
| addExtraMetadataToRecordBuilder(builder, conn, id.getUniqueId()); | ||
| } | ||
| } | ||
| DocIdPusher.Record record = builder.build(); | ||
| log.log(Level.FINEST, "doc id: {0}", id); | ||
|
|
||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -55,15 +55,17 @@ static enum ColumnType { | |
| private final Map<String, ColumnType> types; // types of DocId columns | ||
| private final List<String> contentSqlCols; // columns for content query | ||
| private final List<String> aclSqlCols; // columns for acl query | ||
| private final List<String> metadataSqlCols; // columns for metadata query | ||
| private final boolean docIdIsUrl; | ||
|
|
||
| private UniqueKey(List<String> docIdSqlCols, Map<String, ColumnType> types, | ||
| List<String> contentSqlCols, List<String> aclSqlCols, | ||
| boolean docIdIsUrl) { | ||
| List<String> metadataSqlCols, boolean docIdIsUrl) { | ||
| this.docIdSqlCols = docIdSqlCols; | ||
| this.types = types; | ||
| this.contentSqlCols = contentSqlCols; | ||
| this.aclSqlCols = aclSqlCols; | ||
| this.metadataSqlCols = metadataSqlCols; | ||
| this.docIdIsUrl = docIdIsUrl; | ||
| } | ||
|
|
||
|
|
@@ -114,18 +116,34 @@ String makeUniqueId(ResultSet rs) throws SQLException, URISyntaxException { | |
|
|
||
| private void setSqlValues(PreparedStatement st, String uniqueId, | ||
| List<String> sqlCols) throws SQLException { | ||
| // parse on / that isn't preceded by escape char _ | ||
| // (a / that is preceded by _ is part of column value) | ||
| String parts[] = uniqueId.split("(?<!_)/", -1); | ||
| if (parts.length != docIdSqlCols.size()) { | ||
| String errmsg = "Wrong number of values for primary key: " | ||
| + "id: " + uniqueId + ", parts: " + Arrays.asList(parts); | ||
| throw new IllegalStateException(errmsg); | ||
| } | ||
| Map<String, String> zip = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); | ||
| for (int i = 0; i < parts.length; i++) { | ||
| String columnValue = decodeSlashInData(parts[i]); | ||
| zip.put(docIdSqlCols.get(i), columnValue); | ||
| // Parameters to SQL queries are taken from the unique id. When | ||
| // reading extraMetadata when docIdIsUrl is true, we're now | ||
| // passing the unique id to this method for the first time. Don't | ||
| // try to split the id; we know it must be a single unique string. | ||
|
|
||
| // TODO(aptls): while this may be OK in that we don't want to try | ||
| // to split the id when we know it's a URL, the underlying use | ||
| // case of using value(s) from the unique id to retrieve the extra | ||
| // metadata might not want to have to use this URL as the metadata | ||
| // key. We want to change this to be able to retrieve the metadata | ||
| // key value(s) from the doc id or content result set instead of | ||
| // the key. | ||
| if (docIdIsUrl) { | ||
| zip.put(docIdSqlCols.get(0), uniqueId); | ||
| } else { | ||
| // parse on / that isn't preceded by escape char _ | ||
| // (a / that is preceded by _ is part of column value) | ||
| String parts[] = uniqueId.split("(?<!_)/", -1); | ||
| if (parts.length != docIdSqlCols.size()) { | ||
| String errmsg = "Wrong number of values for primary key: " | ||
| + "id: " + uniqueId + ", parts: " + Arrays.asList(parts); | ||
| throw new IllegalStateException(errmsg); | ||
| } | ||
| for (int i = 0; i < parts.length; i++) { | ||
| String columnValue = decodeSlashInData(parts[i]); | ||
| zip.put(docIdSqlCols.get(i), columnValue); | ||
| } | ||
| } | ||
| for (int i = 0; i < sqlCols.size(); i++) { | ||
| String colName = sqlCols.get(i); | ||
|
|
@@ -171,6 +189,11 @@ void setAclSqlValues(PreparedStatement st, String uniqueId) | |
| setSqlValues(st, uniqueId, aclSqlCols); | ||
| } | ||
|
|
||
| void setMetadataSqlValues(PreparedStatement st, String uniqueId) | ||
| throws SQLException { | ||
| setSqlValues(st, uniqueId, metadataSqlCols); | ||
| } | ||
|
|
||
| @VisibleForTesting | ||
| static String encodeSlashInData(String data) { | ||
| if (-1 == data.indexOf('/') && -1 == data.indexOf('_')) { | ||
|
|
@@ -218,7 +241,7 @@ static String decodeSlashInData(String id) { | |
| @Override | ||
| public String toString() { | ||
| return "UniqueKey(" + docIdSqlCols + "," + types + "," + contentSqlCols + "," | ||
| + aclSqlCols + ")"; | ||
| + aclSqlCols + "," + metadataSqlCols + ")"; | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Har, OK. Either way is fine. |
||
| } | ||
|
|
||
| /** Builder to create instances of {@code UniqueKey}. */ | ||
|
|
@@ -227,6 +250,7 @@ static class Builder { | |
| private Map<String, ColumnType> types; // types of DocId columns | ||
| private List<String> contentSqlCols; // columns for content query | ||
| private List<String> aclSqlCols; // columns for acl query | ||
| private List<String> metadataSqlCols; // columns for metadata query | ||
| private boolean docIdIsUrl = false; | ||
|
|
||
| /** | ||
|
|
@@ -282,6 +306,7 @@ static class Builder { | |
| docIdSqlCols = Collections.unmodifiableList(tmpNames); | ||
| contentSqlCols = docIdSqlCols; | ||
| aclSqlCols = docIdSqlCols; | ||
| metadataSqlCols = docIdSqlCols; | ||
| } | ||
|
|
||
| /** | ||
|
|
@@ -345,6 +370,19 @@ Builder setAclSqlColumns(String aclSqlColumns) | |
| return this; | ||
| } | ||
|
|
||
| Builder setMetadataSqlColumns(String metadataSqlColumns) | ||
| throws InvalidConfigurationException { | ||
| if (metadataSqlColumns == null) { | ||
| throw new NullPointerException(); | ||
| } | ||
| if (!metadataSqlColumns.trim().isEmpty()) { | ||
| this.metadataSqlCols = | ||
| splitIntoNameList("db.extraMetadataSqlParameters", | ||
| metadataSqlColumns, types.keySet()); | ||
| } | ||
| return this; | ||
| } | ||
|
|
||
| private static List<String> splitIntoNameList(String paramConfig, | ||
| String cols, Set<String> validNames) { | ||
| List<String> columnNames = new ArrayList<String>(); | ||
|
|
@@ -434,6 +472,10 @@ List<String> getAclSqlColumns() { | |
| return aclSqlCols; | ||
| } | ||
|
|
||
| List<String> getMetadataSqlColumns() { | ||
| return metadataSqlCols; | ||
| } | ||
|
|
||
| /** Return the Map of column types. */ | ||
| @VisibleForTesting | ||
| Map<String, ColumnType> getColumnTypes() { | ||
|
|
@@ -464,7 +506,7 @@ UniqueKey build() throws InvalidConfigurationException { | |
| + " The key must be a single string column when docId.isUrl=true."); | ||
| } | ||
| return new UniqueKey(docIdSqlCols, types, contentSqlCols, aclSqlCols, | ||
| docIdIsUrl); | ||
| metadataSqlCols, docIdIsUrl); | ||
| } | ||
| } | ||
| } | ||
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This line isn't tested, since you lifted this out of the helper in two places, only the other line is executed.