Till Westmann has uploaded a new change for review.
https://asterix-gerrit.ics.uci.edu/357
Change subject: whitespace fix for commit b361b06
......................................................................
whitespace fix for commit b361b06
Change-Id: I9b01a2cacb2a8efe0a4c0bb97436a9c59edb8e42
---
M
asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
M asterix-doc/src/site/markdown/feeds/tutorial.md
M
asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PushBasedTwitterAdapterFactory.java
M
asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/TwitterUtil.java
M
asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
5 files changed, 88 insertions(+), 94 deletions(-)
git pull ssh://asterix-gerrit.ics.uci.edu:29418/asterixdb
refs/changes/57/357/1
diff --git
a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index c34c297..6ff7632 100644
---
a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++
b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -1522,7 +1522,6 @@
}
}
-
if (ds.getDatasetType() == DatasetType.INTERNAL) {
indexName = stmtIndexDrop.getIndexName().getValue();
Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx,
dataverseName, datasetName, indexName);
@@ -1952,7 +1951,7 @@
MetadataLockManager.INSTANCE.createFeedEnd(dataverseName,
dataverseName + "." + feedName);
}
}
-
+
private void handleCreateFeedPolicyStatement(AqlMetadataProvider
metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx =
MetadataManager.INSTANCE.beginTransaction();
@@ -2050,7 +2049,6 @@
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
@@ -2058,7 +2056,7 @@
MetadataLockManager.INSTANCE.dropFeedEnd(dataverseName,
dataverseName + "." + feedName);
}
}
-
+
private void handleDropFeedPolicyStatement(AqlMetadataProvider
metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
@@ -2085,7 +2083,6 @@
MetadataLockManager.INSTANCE.dropFeedPolicyEnd(dataverseName,
dataverseName + "." + policyName);
}
}
-
private void handleConnectFeedStatement(AqlMetadataProvider
metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
@@ -2130,8 +2127,8 @@
// All Metadata checks have passed. Feed connect request is valid.
//
FeedPolicyAccessor policyAccessor = new
FeedPolicyAccessor(feedPolicy.getProperties());
- Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>> triple =
getFeedConnectionRequest(dataverseName, feed,
- cbfs.getDatasetName(), feedPolicy, mdTxnCtx);
+ Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>> triple =
getFeedConnectionRequest(dataverseName,
+ feed, cbfs.getDatasetName(), feedPolicy, mdTxnCtx);
FeedConnectionRequest connectionRequest = triple.first;
boolean createFeedIntakeJob = triple.second;
@@ -2145,8 +2142,8 @@
metadataProvider, policyAccessor);
// adapter configuration are valid at this stage
// register the feed joints (these are auto-de-registered)
- for (IFeedJoint fj : triple.third){
- FeedLifecycleListener.INSTANCE.registerFeedJoint(fj);
+ for (IFeedJoint fj : triple.third) {
+ FeedLifecycleListener.INSTANCE.registerFeedJoint(fj);
}
runJob(hcc, pair.first, false);
IFeedAdapterFactory adapterFactory = pair.second;
@@ -2156,8 +2153,8 @@
}
eventSubscriber.assertEvent(FeedLifecycleEvent.FEED_INTAKE_STARTED);
} else {
- for (IFeedJoint fj : triple.third){
- FeedLifecycleListener.INSTANCE.registerFeedJoint(fj);
+ for (IFeedJoint fj : triple.third) {
+ FeedLifecycleListener.INSTANCE.registerFeedJoint(fj);
}
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -2190,7 +2187,7 @@
}
}
}
-
+
/**
* Generates a subscription request corresponding to a connect feed
request. In addition, provides a boolean
* flag indicating if feed intake job needs to be started (source primary
feed not found to be active).
@@ -2203,8 +2200,9 @@
* @return
* @throws MetadataException
*/
- private Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>>
getFeedConnectionRequest(String dataverse, Feed feed, String dataset,
- FeedPolicy feedPolicy, MetadataTransactionContext mdTxnCtx) throws
MetadataException {
+ private Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>>
getFeedConnectionRequest(String dataverse,
+ Feed feed, String dataset, FeedPolicy feedPolicy,
MetadataTransactionContext mdTxnCtx)
+ throws MetadataException {
IFeedJoint sourceFeedJoint = null;
FeedConnectionRequest request = null;
List<String> functionsToApply = new ArrayList<String>();
@@ -2259,7 +2257,7 @@
sourceFeedJoint.addConnectionRequest(request);
return new Triple<FeedConnectionRequest, Boolean,
List<IFeedJoint>>(request, needIntakeJob, jointsToRegister);
}
-
+
/*
* Gets the feed joint corresponding to the feed definition. Tuples
constituting the feed are
* available at this feed joint.
@@ -2282,7 +2280,7 @@
return new FeedJointKey(sourceFeed.getFeedId(), appliedFunctions);
}
-
+
private void handleDisconnectFeedStatement(AqlMetadataProvider
metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
DisconnectFeedStatement cfs = (DisconnectFeedStatement) stmt;
@@ -2335,7 +2333,7 @@
dataverseName + "." + cfs.getFeedName());
}
}
-
+
private void handleSubscribeFeedStatement(AqlMetadataProvider
metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
@@ -2367,7 +2365,6 @@
try {
-
JobSpecification alteredJobSpec =
FeedUtil.alterJobSpecificationForFeed(compiled, feedConnectionId, bfs
.getSubscriptionRequest().getPolicyParameters());
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
diff --git a/asterix-doc/src/site/markdown/feeds/tutorial.md
b/asterix-doc/src/site/markdown/feeds/tutorial.md
index 009e2b1..3a70c60 100644
--- a/asterix-doc/src/site/markdown/feeds/tutorial.md
+++ b/asterix-doc/src/site/markdown/feeds/tutorial.md
@@ -34,14 +34,14 @@
AsterixDB currently provides built-in adaptors for several popular
data sources—Twitter, CNN, and RSS feeds. AsterixDB additionally
provides a generic socket-based adaptor that can be used
-to ingest data that is directed at a prescribed socket.
+to ingest data that is directed at a prescribed socket.
In this tutorial, we shall describe building two example data ingestion
pipelines that cover the popular scenario of ingesting data from (a) Twitter
and (b) RSS Feed source.
-####Ingesting Twitter Stream
+####Ingesting Twitter Stream
We shall use the built-in push-based Twitter adaptor.
-As a pre-requisite, we must define a Tweet using the AsterixDB Data Model
(ADM) and the AsterixDB Query Language (AQL). Given below are the type
definition in AQL that create a Tweet datatype which is representative of a
real tweet as obtained from Twitter.
+As a pre-requisite, we must define a Tweet using the AsterixDB Data Model
(ADM) and the AsterixDB Query Language (AQL). Given below are the type
definition in AQL that create a Tweet datatype which is representative of a
real tweet as obtained from Twitter.
create dataverse feeds;
use dataverse feeds;
@@ -66,12 +66,12 @@
create dataset Tweets (Tweet)
primary key id;
-We also create a dataset that we shall use to persist the tweets in AsterixDB.
-Next we make use of the create feed AQL statement to define our example data
feed.
+We also create a dataset that we shall use to persist the tweets in AsterixDB.
+Next we make use of the create feed AQL statement to define our example data
feed.
#####Using the "push_twitter" feed adapter#####
The push_twitter adaptor requires setting up an application account with
Twitter. To retrieve
-tweets, Twitter requires registering an application with Twitter. Registration
involves providing a name and a brief description for the application. Each
application has an associated OAuth authentication credential that includes
OAuth keys and tokens. Accessing the
+tweets, Twitter requires registering an application with Twitter. Registration
involves providing a name and a brief description for the application. Each
application has an associated OAuth authentication credential that includes
OAuth keys and tokens. Accessing the
Twitter API requires providing the following.
1. Consumer Key (API Key)
2. Consumer Secret (API Secret)
@@ -79,31 +79,31 @@
4. Access Token Secret
-The "push_twitter" adaptor takes as configuration the above mentioned
parameters. End-user(s) are required to obtain the above authentication
credentials prior to using the "push_twitter" adaptor. For further information
on obtaining OAuth keys and tokens and registering an application with Twitter,
please visit http://apps.twitter.com
+The "push_twitter" adaptor takes as configuration the above mentioned
parameters. End-user(s) are required to obtain the above authentication
credentials prior to using the "push_twitter" adaptor. For further information
on obtaining OAuth keys and tokens and registering an application with Twitter,
please visit http://apps.twitter.com
-Given below is an example AQL statement that creates a feed - TwitterFeed by
using the
-"push_twitter" adaptor.
+Given below is an example AQL statement that creates a feed - TwitterFeed by
using the
+"push_twitter" adaptor.
create feed TwitterFeed if not exists using "push_twitter"
(("type-name"="Tweet"),
- ("consumer.key"="************"),
+ ("consumer.key"="************"),
("consumer.secret"="**************"),
- ("access.token"="**********"),
+ ("access.token"="**********"),
("access.token.secret"="*************"));
-It is required that the above authentication parameters are provided valid
values.
+It is required that the above authentication parameters are provided valid
values.
Note that the create feed statement does not initiate the flow of data from
Twitter into our AsterixDB instance. Instead, the create feed statement only
results in registering the feed with AsterixDB. The flow of data along a feed
is initiated when it is connected
to a target dataset using the connect feed statement (which we shall revisit
later).
####Ingesting an RSS Feed
-RSS (Rich Site Summary); originally RDF Site Summary; often called Really
Simple Syndication, uses a family of standard web feed formats to publish
frequently updated information: blog entries, news headlines, audio, video. An
RSS document (called "feed", "web feed", or "channel") includes full or
summarized text, and metadata, like publishing date and author's name. RSS
feeds enable publishers to syndicate data automatically.
+RSS (Rich Site Summary); originally RDF Site Summary; often called Really
Simple Syndication, uses a family of standard web feed formats to publish
frequently updated information: blog entries, news headlines, audio, video. An
RSS document (called "feed", "web feed", or "channel") includes full or
summarized text, and metadata, like publishing date and author's name. RSS
feeds enable publishers to syndicate data automatically.
#####Using the "rss_feed" feed adapter#####
-AsterixDB provides a built-in feed adaptor that allows retrieving data given a
collection of RSS end point URLs. As observed in the case of ingesting tweets,
it is required to model an RSS data item using AQL.
+AsterixDB provides a built-in feed adaptor that allows retrieving data given a
collection of RSS end point URLs. As observed in the case of ingesting tweets,
it is required to model an RSS data item using AQL.
create dataverse feeds if not exists;
use dataverse feeds;
@@ -116,23 +116,23 @@
};
create dataset RssDataset (Rss)
- primary key id;
+ primary key id;
-Next, we define an RSS feed using our built-in adaptor "rss_feed".
+Next, we define an RSS feed using our built-in adaptor "rss_feed".
- create feed my_feed using
+ create feed my_feed using
rss_feed (
("type-name"="Rss"),
("url"="http://rss.cnn.com/rss/edition.rss")
);
-In the above definition, the configuration parameter "url" can be a comma
separated list that reflects a collection of RSS URLs, where each URL
corresponds to an RSS endpoint or a RSS feed.
-The "rss_adaptor" retrieves data from each of the specified RSS URLs (comma
separated values) in parallel.
+In the above definition, the configuration parameter "url" can be a comma
separated list that reflects a collection of RSS URLs, where each URL
corresponds to an RSS endpoint or a RSS feed.
+The "rss_adaptor" retrieves data from each of the specified RSS URLs (comma
separated values) in parallel.
-So far, we have discussed the mechanism for retrieving data from the external
world into the AsterixDB system. However, the arriving data may require certain
pre-processing prior to being persisted in AsterixDB storage. Next, we discuss
how the arriving data can be pre-processed.
-
+So far, we have discussed the mechanism for retrieving data from the external
world into the AsterixDB system. However, the arriving data may require certain
pre-processing prior to being persisted in AsterixDB storage. Next, we discuss
how the arriving data can be pre-processed.
+
## <a id="PreprocessingCollectedData">Preprocessing Collected Data</a> ###
@@ -154,7 +154,7 @@
reason about an AQL UDF and involve the use of indexes during
its invocation.
-We consider an example transformation of a raw tweet into its lightweight
version - ProcessedTweet - which is defined next.
+We consider an example transformation of a raw tweet into its lightweight
version - ProcessedTweet - which is defined next.
create type ProcessedTweet if not exists as open {
id: string,
@@ -165,11 +165,11 @@
country: string,
topics: [string]
};
-
-
+
+
The processing required in transforming a collected tweet to its lighter
version (of type ProcessedTweet) involves extracting the topics or hash-tags
(if any) in a tweet
and collecting them in the referred-topics attribute for the tweet.
-Additionally, the latitude and longitude values (doubles) are combined into
the spatial point type. Note that spatial data types are considered as first
class citizens that come with the support for creating indexes. Next we show a
revised version of our example TwitterFeed that involves the use of a UDF. We
assume that the UDF that contains the transformation logic into a
ProcessedTweet is avaialable as a Java UDF inside an AsterixDB library named
'testlib'. We defer the writing of a Java UDF and its installation as part of
an AsterixDB library to a later section of this document.
+Additionally, the latitude and longitude values (doubles) are combined into
the spatial point type. Note that spatial data types are considered as first
class citizens that come with the support for creating indexes. Next we show a
revised version of our example TwitterFeed that involves the use of a UDF. We
assume that the UDF that contains the transformation logic into a
ProcessedTweet is avaialable as a Java UDF inside an AsterixDB library named
'testlib'. We defer the writing of a Java UDF and its installation as part of
an AsterixDB library to a later section of this document.
create feed ProcessedTwitterFeed if not exists
using "push_twitter"
@@ -201,12 +201,12 @@
have an associated UDF to allow for any subsequent processing,
can be persisted into a dataset, and/or can be made to derive other
secondary feeds to form a cascade network. A primary feed and a
-dependent secondary feed form a hierarchy. As an example, we next show an
+dependent secondary feed form a hierarchy. As an example, we next show an
example AQL statement that redefines the previous feed—
ProcessedTwitterFeed in terms of their
respective parent feed (TwitterFeed).
- create secondary feed ProcessedTwitterFeed from feed TwitterFeed
+ create secondary feed ProcessedTwitterFeed from feed TwitterFeed
apply function testlib#addFeatures;
@@ -242,7 +242,7 @@
to do so, the end user makes another use of the connect feed statement.
A logical view of the continuous flow of data established by
connecting the feeds to their respective target datasets is shown in
-Figure 8.
+Figure 8.
The flow of data from a feed into a dataset can be terminated
explicitly by use of the disconnect feed statement.
@@ -278,20 +278,20 @@
-####Policy Parameters
+####Policy Parameters
- *excess.records.spill*: Set to true if records that cannot be processed by
an operator for lack of resources (referred to as excess records hereafter)
should be persisted to the local disk for deferred processing. (Default: false)
-- *excess.records.discard*: Set to true if excess records should be discarded.
(Default: false)
+- *excess.records.discard*: Set to true if excess records should be discarded.
(Default: false)
-- *excess.records.throttle*: Set to true if rate of arrival of records is
required to be reduced in an adaptive manner to prevent having any excess
records (Default: false)
+- *excess.records.throttle*: Set to true if rate of arrival of records is
required to be reduced in an adaptive manner to prevent having any excess
records (Default: false)
-- *excess.records.elastic*: Set to true if the system should attempt to
resolve resource bottlenecks by re-structuring and/or rescheduling the feed
ingestion pipeline. (Default: false)
+- *excess.records.elastic*: Set to true if the system should attempt to
resolve resource bottlenecks by re-structuring and/or rescheduling the feed
ingestion pipeline. (Default: false)
-- *recover.soft.failure*: Set to true if the feed must attempt to survive any
runtime exception. A false value permits an early termination of a feed in such
an event. (Default: true)
+- *recover.soft.failure*: Set to true if the feed must attempt to survive any
runtime exception. A false value permits an early termination of a feed in such
an event. (Default: true)
-- *recover.soft.failure*: Set to true if the feed must attempt to survive a
hardware failures (loss of AsterixDB node(s)). A false value permits the early
termination of a feed in the event of a hardware failure (Default: false)
+- *recover.soft.failure*: Set to true if the feed must attempt to survive a
hardware failures (loss of AsterixDB node(s)). A false value permits the early
termination of a feed in the event of a hardware failure (Default: false)
Note that the end user may choose to form a custom policy. E.g.
it is possible in AsterixDB to create a custom policy that spills excess
@@ -366,7 +366,7 @@
We need to install our Java UDF so that we may use it in AQL
statements/queries. An AsterixDB library has a pre-defined structure which is
as follows.
-- jar file: A jar file that would contain the class files for your UDF source
code.
+- jar file: A jar file that would contain the class files for your UDF source
code.
- library descriptor.xml: This is a descriptor that provide meta-information
about the library.
<externalLibrary xmlns="library">
@@ -386,11 +386,11 @@
- lib: other dependency jars
-If the Java UDF requires additional dependency jars, you may add them under a
"lib" folder is required.
+If the Java UDF requires additional dependency jars, you may add them under a
"lib" folder is required.
We create a zip bundle that contains the jar file and the library descriptor
xml file. The zip would have the following structure.
- $ unzip -l ./tweetlib.zip
+ $ unzip -l ./tweetlib.zip
Archive: ./tweetlib.zip
Length Date Time Name
-------- ---- ---- ----
@@ -408,7 +408,7 @@
- Step 1: Stop the AsterixDB instance if it is in the ACTIVE state.
$ managix stop -n my_asterix
-
+
- Step 2: Install the library using Managix install command. Just to
illustrate, we use the help command to look up the syntax
@@ -426,7 +426,7 @@
We assume you have a library zip bundle that needs to be installed.
To install the library, use the Managix install command. An example is shown
below.
- $ managix install -n my_asterix -d feeds -l testlib -p <put the
absolute path of the library zip bundle here>
+ $ managix install -n my_asterix -d feeds -l testlib -p <put the
absolute path of the library zip bundle here>
You should see the following message:
@@ -438,7 +438,7 @@
You may now use the AsterixDB library in AQL statements and queries. To look
at the installed artifacts, you may execute the following query at the
AsterixDB web-console.
- for $x in dataset Metadata.Function
+ for $x in dataset Metadata.Function
return $x
for $x in dataset Metadata.Library
diff --git
a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PushBasedTwitterAdapterFactory.java
b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PushBasedTwitterAdapterFactory.java
index f77efbf..778b7bf 100644
---
a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PushBasedTwitterAdapterFactory.java
+++
b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PushBasedTwitterAdapterFactory.java
@@ -64,14 +64,14 @@
this.configuration = configuration;
TwitterUtil.initializeConfigurationWithAuthInfo(configuration);
boolean requiredParamsSpecified = validateConfiguration(configuration);
- if(!requiredParamsSpecified){
- StringBuilder builder = new StringBuilder();
- builder.append("One or more parameters are missing from adapter
configuration\n");
- builder.append(AuthenticationConstants.OAUTH_CONSUMER_KEY + "\n");
- builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET +
"\n");
- builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN + "\n");
- builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET +
"\n");
- throw new Exception(builder.toString());
+ if (!requiredParamsSpecified) {
+ StringBuilder builder = new StringBuilder();
+ builder.append("One or more parameters are missing from adapter
configuration\n");
+ builder.append(AuthenticationConstants.OAUTH_CONSUMER_KEY + "\n");
+ builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET +
"\n");
+ builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN + "\n");
+ builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET +
"\n");
+ throw new Exception(builder.toString());
}
}
@@ -79,7 +79,6 @@
public ARecordType getAdapterOutputType() {
return outputType;
}
-
@Override
public boolean isRecordTrackingEnabled() {
@@ -92,12 +91,12 @@
}
private boolean validateConfiguration(Map<String, String> configuration) {
- String consumerKey =
configuration.get(AuthenticationConstants.OAUTH_CONSUMER_KEY);
- String consumerSecret =
configuration.get(AuthenticationConstants.OAUTH_CONSUMER_SECRET);
- String accessToken =
configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN);
- String tokenSecret =
configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET);
-
- if(consumerKey == null || consumerSecret == null || accessToken ==
null || tokenSecret == null){
+ String consumerKey =
configuration.get(AuthenticationConstants.OAUTH_CONSUMER_KEY);
+ String consumerSecret =
configuration.get(AuthenticationConstants.OAUTH_CONSUMER_SECRET);
+ String accessToken =
configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN);
+ String tokenSecret =
configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET);
+
+ if (consumerKey == null || consumerSecret == null || accessToken ==
null || tokenSecret == null) {
return false;
}
return true;
diff --git
a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/TwitterUtil.java
b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/TwitterUtil.java
index 2737582..f9a9ccf 100644
---
a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/TwitterUtil.java
+++
b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/TwitterUtil.java
@@ -31,7 +31,6 @@
public class TwitterUtil {
-
private static Logger LOGGER =
Logger.getLogger(TwitterUtil.class.getName());
public static class ConfigurationConstants {
@@ -83,20 +82,20 @@
public static Twitter getTwitterService(Map<String, String> configuration)
{
ConfigurationBuilder cb = getAuthConfiguration(configuration);
TwitterFactory tf = null;
- try{
- tf = new TwitterFactory(cb.build());
- } catch (Exception e){
- if (LOGGER.isLoggable(Level.WARNING)){
- StringBuilder builder = new StringBuilder();
- builder.append("Twitter Adapter requires the following config
parameters\n");
- builder.append(AuthenticationConstants.OAUTH_CONSUMER_KEY + "\n");
- builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET +
"\n");
- builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN + "\n");
- builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET +
"\n");
- LOGGER.warning(builder.toString());
- LOGGER.warning("Unable to configure Twitter adapter due to
incomplete/incorrect authentication credentials");
- LOGGER.warning("For details on how to obtain OAuth authentication
token, visit
https://dev.twitter.com/oauth/overview/application-owner-access-tokens");
- }
+ try {
+ tf = new TwitterFactory(cb.build());
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ StringBuilder builder = new StringBuilder();
+ builder.append("Twitter Adapter requires the following config
parameters\n");
+ builder.append(AuthenticationConstants.OAUTH_CONSUMER_KEY +
"\n");
+ builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET +
"\n");
+ builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN +
"\n");
+
builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET + "\n");
+ LOGGER.warning(builder.toString());
+ LOGGER.warning("Unable to configure Twitter adapter due to
incomplete/incorrect authentication credentials");
+ LOGGER.warning("For details on how to obtain OAuth
authentication token, visit
https://dev.twitter.com/oauth/overview/application-owner-access-tokens");
+ }
}
Twitter twitter = tf.getInstance();
return twitter;
@@ -152,9 +151,9 @@
break;
}
} catch (Exception e) {
- if(LOGGER.isLoggable(Level.WARNING)){
- LOGGER.warning("unable to load authentication credentials from
auth.properties file" +
- "credential information will be obtained from adapter's
configuration");
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("unable to load authentication credentials from
auth.properties file"
+ + "credential information will be obtained from
adapter's configuration");
}
}
}
diff --git
a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
index 62688de..9800ee6 100644
---
a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
+++
b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -123,9 +123,8 @@
MetadataPrimaryIndexes.INDEX_DATASET,
MetadataPrimaryIndexes.NODE_DATASET,
MetadataPrimaryIndexes.NODEGROUP_DATASET,
MetadataPrimaryIndexes.FUNCTION_DATASET,
MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET,
MetadataPrimaryIndexes.FEED_DATASET,
- MetadataPrimaryIndexes.FEED_POLICY_DATASET,
- MetadataPrimaryIndexes.LIBRARY_DATASET,
MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET,
- MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET };
+ MetadataPrimaryIndexes.FEED_POLICY_DATASET,
MetadataPrimaryIndexes.LIBRARY_DATASET,
+ MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET,
MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET };
secondaryIndexes = new IMetadataIndex[] {
MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX,
MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX,
--
To view, visit https://asterix-gerrit.ics.uci.edu/357
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings
Gerrit-MessageType: newchange
Gerrit-Change-Id: I9b01a2cacb2a8efe0a4c0bb97436a9c59edb8e42
Gerrit-PatchSet: 1
Gerrit-Project: asterixdb
Gerrit-Branch: master
Gerrit-Owner: Till Westmann <[email protected]>