Repository: metamodel Updated Branches: refs/heads/master 01df1a825 -> 90f111890
METAMODEL-203: Fixed Fixes #64 Project: http://git-wip-us.apache.org/repos/asf/metamodel/repo Commit: http://git-wip-us.apache.org/repos/asf/metamodel/commit/90f11189 Tree: http://git-wip-us.apache.org/repos/asf/metamodel/tree/90f11189 Diff: http://git-wip-us.apache.org/repos/asf/metamodel/diff/90f11189 Branch: refs/heads/master Commit: 90f111890246d16b589c38f42171b0bb969983e6 Parents: 01df1a8 Author: Kasper Sørensen <i.am.kasper.soren...@gmail.com> Authored: Mon Oct 26 11:14:25 2015 +0100 Committer: Kasper Sørensen <i.am.kasper.soren...@gmail.com> Committed: Mon Oct 26 11:14:25 2015 +0100 ---------------------------------------------------------------------- CHANGES.md | 1 + .../apache/metamodel/DataContextFactory.java | 41 +++++++++++++------- mongodb/pom.xml | 2 +- .../mongodb/MongoDbUpdateCallback.java | 3 +- .../mongodb/MongoDbDataContextTest.java | 16 ++++---- 5 files changed, 39 insertions(+), 24 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/metamodel/blob/90f11189/CHANGES.md ---------------------------------------------------------------------- diff --git a/CHANGES.md b/CHANGES.md index 433bd72..83667ee 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -5,6 +5,7 @@ * [METAMODEL-144] - Automated binary packaging of the MetaModel project. * [METAMODEL-197] - ElasticSearch schema update/change after CREATE TABLE statements. * [METAMODEL-199] - Fixed a bug in query parser when parsing two consecutive WHERE items with parentheses around them. + * [METAMODEL-203] - Upgraded MongoDB dependency version and API to the 3.x line. ### Apache MetaModel 4.4.0 http://git-wip-us.apache.org/repos/asf/metamodel/blob/90f11189/full/src/main/java/org/apache/metamodel/DataContextFactory.java ---------------------------------------------------------------------- diff --git a/full/src/main/java/org/apache/metamodel/DataContextFactory.java b/full/src/main/java/org/apache/metamodel/DataContextFactory.java index c7f7409..07ff989 100644 --- a/full/src/main/java/org/apache/metamodel/DataContextFactory.java +++ b/full/src/main/java/org/apache/metamodel/DataContextFactory.java @@ -22,16 +22,16 @@ import java.io.File; import java.io.InputStream; import java.net.URL; import java.sql.Connection; +import java.util.Arrays; import java.util.Collection; import javax.sql.DataSource; import org.apache.metamodel.cassandra.CassandraDataContext; -import org.apache.metamodel.elasticsearch.ElasticSearchDataContext; -import org.ektorp.http.StdHttpClient.Builder; import org.apache.metamodel.couchdb.CouchDbDataContext; import org.apache.metamodel.csv.CsvConfiguration; import org.apache.metamodel.csv.CsvDataContext; +import org.apache.metamodel.elasticsearch.ElasticSearchDataContext; import org.apache.metamodel.excel.ExcelConfiguration; import org.apache.metamodel.excel.ExcelDataContext; import org.apache.metamodel.fixedwidth.FixedWidthConfiguration; @@ -46,12 +46,16 @@ import org.apache.metamodel.sugarcrm.SugarCrmDataContext; import org.apache.metamodel.util.FileHelper; import org.apache.metamodel.util.SimpleTableDef; import org.apache.metamodel.xml.XmlDomDataContext; +import org.ektorp.http.StdHttpClient.Builder; import org.elasticsearch.client.Client; import org.xml.sax.InputSource; import com.datastax.driver.core.Cluster; +import com.google.common.base.Strings; import com.mongodb.DB; -import com.mongodb.Mongo; +import com.mongodb.MongoClient; +import com.mongodb.MongoCredential; +import com.mongodb.ServerAddress; /** * A factory for DataContext objects. This class substantially easens the task @@ -346,7 +350,8 @@ public class DataContextFactory { * table-based model * @return a DataContext object that matches the request */ - public static DataContext createXmlDataContext(InputSource inputSource, String schemaName, boolean autoFlattenTables) { + public static DataContext createXmlDataContext(InputSource inputSource, String schemaName, + boolean autoFlattenTables) { XmlDomDataContext dc = new XmlDomDataContext(inputSource, schemaName, autoFlattenTables); return dc; } @@ -496,7 +501,8 @@ public class DataContextFactory { * the types of tables to include in the generated schemas * @return a DataContext object that matches the request */ - public static UpdateableDataContext createJdbcDataContext(DataSource ds, String catalogName, TableType[] tableTypes) { + public static UpdateableDataContext createJdbcDataContext(DataSource ds, String catalogName, + TableType[] tableTypes) { return new JdbcDataContext(ds, tableTypes, catalogName); } @@ -535,14 +541,19 @@ public class DataContextFactory { public static UpdateableDataContext createMongoDbDataContext(String hostname, Integer port, String databaseName, String username, char[] password, SimpleTableDef[] tableDefs) { try { - DB mongoDb; + final ServerAddress serverAddress; if (port == null) { - mongoDb = new Mongo(hostname).getDB(databaseName); + serverAddress = new ServerAddress(hostname); } else { - mongoDb = new Mongo(hostname, port).getDB(databaseName); + serverAddress = new ServerAddress(hostname, port); } - if (username != null) { - mongoDb.authenticate(username, password); + + final DB mongoDb; + if (Strings.isNullOrEmpty(username)) { + mongoDb = new MongoClient(serverAddress).getDB(databaseName); + } else { + final MongoCredential credential = MongoCredential.createCredential(username, databaseName, password); + mongoDb = new MongoClient(serverAddress, Arrays.asList(credential)).getDB(databaseName); } if (tableDefs == null || tableDefs.length == 0) { @@ -642,10 +653,11 @@ public class DataContextFactory { /** * Creates a new ElasticSearch datacontext. + * * @param client - * The ElasticSearch client + * The ElasticSearch client * @param indexName - * The ElasticSearch index name + * The ElasticSearch index name * @return a DataContext object that matches the request */ public static UpdateableDataContext createElasticSearchDataContext(Client client, String indexName) { @@ -654,10 +666,11 @@ public class DataContextFactory { /** * Creates a new Cassandra datacontext. + * * @param cluster - * The Cassandra client + * The Cassandra client * @param keySpaceName - * The Cassandra key space name + * The Cassandra key space name * @return a DataContext object that matches the request */ public static DataContext createCassandraDataContext(Cluster cluster, String keySpaceName) { http://git-wip-us.apache.org/repos/asf/metamodel/blob/90f11189/mongodb/pom.xml ---------------------------------------------------------------------- diff --git a/mongodb/pom.xml b/mongodb/pom.xml index 6b80df4..d8dad70 100644 --- a/mongodb/pom.xml +++ b/mongodb/pom.xml @@ -35,7 +35,7 @@ under the License. <dependency> <groupId>org.mongodb</groupId> <artifactId>mongo-java-driver</artifactId> - <version>2.9.1</version> + <version>3.1.0</version> </dependency> <!-- Test dependencies --> http://git-wip-us.apache.org/repos/asf/metamodel/blob/90f11189/mongodb/src/main/java/org/apache/metamodel/mongodb/MongoDbUpdateCallback.java ---------------------------------------------------------------------- diff --git a/mongodb/src/main/java/org/apache/metamodel/mongodb/MongoDbUpdateCallback.java b/mongodb/src/main/java/org/apache/metamodel/mongodb/MongoDbUpdateCallback.java index 16d74a4..0558898 100644 --- a/mongodb/src/main/java/org/apache/metamodel/mongodb/MongoDbUpdateCallback.java +++ b/mongodb/src/main/java/org/apache/metamodel/mongodb/MongoDbUpdateCallback.java @@ -31,6 +31,7 @@ import org.apache.metamodel.insert.RowInsertionBuilder; import org.apache.metamodel.schema.Schema; import org.apache.metamodel.schema.Table; +import com.mongodb.BasicDBObject; import com.mongodb.DBCollection; final class MongoDbUpdateCallback extends AbstractUpdateCallback implements UpdateCallback, Closeable { @@ -67,7 +68,7 @@ final class MongoDbUpdateCallback extends AbstractUpdateCallback implements Upda } protected void createCollection(String name) { - DBCollection collection = _dataContext.getMongoDb().createCollection(name, null); + DBCollection collection = _dataContext.getMongoDb().createCollection(name, new BasicDBObject()); _collections.put(name, collection); } http://git-wip-us.apache.org/repos/asf/metamodel/blob/90f11189/mongodb/src/test/java/org/apache/metamodel/mongodb/MongoDbDataContextTest.java ---------------------------------------------------------------------- diff --git a/mongodb/src/test/java/org/apache/metamodel/mongodb/MongoDbDataContextTest.java b/mongodb/src/test/java/org/apache/metamodel/mongodb/MongoDbDataContextTest.java index 1a4a3af..da89bb1 100644 --- a/mongodb/src/test/java/org/apache/metamodel/mongodb/MongoDbDataContextTest.java +++ b/mongodb/src/test/java/org/apache/metamodel/mongodb/MongoDbDataContextTest.java @@ -69,12 +69,12 @@ public class MongoDbDataContextTest extends MongoDbTestCase { return; } - DBCollection col = db.createCollection(getCollectionName(), null); + DBCollection col = db.createCollection(getCollectionName(), new BasicDBObject()); // delete if already exists { col.drop(); - col = db.createCollection(getCollectionName(), null); + col = db.createCollection(getCollectionName(), new BasicDBObject()); } final BasicDBList list = new BasicDBList(); @@ -110,12 +110,12 @@ public class MongoDbDataContextTest extends MongoDbTestCase { return; } - DBCollection col = db.createCollection(getCollectionName(), null); + DBCollection col = db.createCollection(getCollectionName(), new BasicDBObject()); // delete if already exists { col.drop(); - col = db.createCollection(getCollectionName(), null); + col = db.createCollection(getCollectionName(), new BasicDBObject()); } // create 3 records @@ -160,12 +160,12 @@ public class MongoDbDataContextTest extends MongoDbTestCase { return; } - DBCollection col = db.createCollection(getCollectionName(), null); + DBCollection col = db.createCollection(getCollectionName(), new BasicDBObject()); // delete if already exists { col.drop(); - col = db.createCollection(getCollectionName(), null); + col = db.createCollection(getCollectionName(), new BasicDBObject()); } // create 1000 records @@ -497,12 +497,12 @@ public class MongoDbDataContextTest extends MongoDbTestCase { return; } - DBCollection col = db.createCollection(getCollectionName(), null); + DBCollection col = db.createCollection(getCollectionName(), new BasicDBObject()); // delete if already exists { col.drop(); - col = db.createCollection(getCollectionName(), null); + col = db.createCollection(getCollectionName(), new BasicDBObject()); } final BasicDBObject dbRow = new BasicDBObject();