Repository: usergrid
Updated Branches:
  refs/heads/token-service-to-dstax [created] 179f12db0


Initial commit of new token module to replace Hector for access token storage.


Project: http://git-wip-us.apache.org/repos/asf/usergrid/repo
Commit: http://git-wip-us.apache.org/repos/asf/usergrid/commit/179f12db
Tree: http://git-wip-us.apache.org/repos/asf/usergrid/tree/179f12db
Diff: http://git-wip-us.apache.org/repos/asf/usergrid/diff/179f12db

Branch: refs/heads/token-service-to-dstax
Commit: 179f12db0ef55cf9c28936678a933e2de04034eb
Parents: 3f819dc
Author: Michael Russo <russomich...@google.com>
Authored: Fri Jun 16 00:15:14 2017 -0700
Committer: Michael Russo <russomich...@google.com>
Committed: Fri Jun 16 00:15:14 2017 -0700

----------------------------------------------------------------------
 stack/corepersistence/pom.xml                   |   1 +
 stack/corepersistence/token/pom.xml             |  70 +++
 .../persistence/token/guice/TokenModule.java    |  48 ++
 .../token/impl/TokenSerialization.java          |  50 ++
 .../token/impl/TokenSerializationImpl.java      | 536 +++++++++++++++++++
 .../persistence/token/TokenTestModule.java      |  40 ++
 .../token/src/test/resources/log4j.properties   |  38 ++
 7 files changed, 783 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/usergrid/blob/179f12db/stack/corepersistence/pom.xml
----------------------------------------------------------------------
diff --git a/stack/corepersistence/pom.xml b/stack/corepersistence/pom.xml
index 8a45323..45aad4b 100644
--- a/stack/corepersistence/pom.xml
+++ b/stack/corepersistence/pom.xml
@@ -113,6 +113,7 @@ limitations under the License.
         <module>queue</module>
         <module>cache</module>
         <module>actorsystem</module>
+        <module>token</module>
     </modules>
 
     <build>

http://git-wip-us.apache.org/repos/asf/usergrid/blob/179f12db/stack/corepersistence/token/pom.xml
----------------------------------------------------------------------
diff --git a/stack/corepersistence/token/pom.xml 
b/stack/corepersistence/token/pom.xml
new file mode 100644
index 0000000..fcbd73c
--- /dev/null
+++ b/stack/corepersistence/token/pom.xml
@@ -0,0 +1,70 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+
+    <parent>
+        <artifactId>persistence</artifactId>
+        <groupId>org.apache.usergrid</groupId>
+        <version>2.2.0-SNAPSHOT</version>
+    </parent>
+
+    <modelVersion>4.0.0</modelVersion>
+    <description>The module for handling access token persistence</description>
+
+    <artifactId>token</artifactId>
+    <name>Usergrid Token</name>
+
+    <dependencies>
+
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-lang3</artifactId>
+            <version>${commons.lang.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.usergrid</groupId>
+            <artifactId>common</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+
+        <!-- test deps -->
+
+        <dependency>
+            <groupId>org.apache.usergrid</groupId>
+            <artifactId>common</artifactId>
+            <version>${project.version}</version>
+            <classifier>tests</classifier>
+            <scope>test</scope>
+        </dependency>
+
+        <!--
+        <dependency>
+            <groupId>org.apache.usergrid</groupId>
+            <artifactId>collection</artifactId>
+            <version>${project.version}</version>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
+        -->
+
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/usergrid/blob/179f12db/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/guice/TokenModule.java
----------------------------------------------------------------------
diff --git 
a/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/guice/TokenModule.java
 
b/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/guice/TokenModule.java
new file mode 100644
index 0000000..13d6b43
--- /dev/null
+++ 
b/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/guice/TokenModule.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ *  contributor license agreements.  The ASF licenses this file to You
+ * under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.  For additional information regarding
+ * copyright in this work, please see the NOTICE file in the top level
+ * directory of this distribution.
+ */
+package org.apache.usergrid.persistence.token.guice;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Key;
+import com.google.inject.multibindings.Multibinder;
+import org.apache.usergrid.persistence.cache.CacheFactory;
+import org.apache.usergrid.persistence.cache.impl.CacheFactoryImpl;
+import org.apache.usergrid.persistence.cache.impl.TokenSerialization;
+import org.apache.usergrid.persistence.cache.impl.TokenSerializationImpl;
+import org.apache.usergrid.persistence.core.migration.schema.Migration;
+
+
+/**
+ * Wire up cache impl.
+ */
+public class TokenModule extends AbstractModule {
+
+    @Override
+    protected void configure() {
+
+        bind( CacheFactory.class ).to( CacheFactoryImpl.class );
+
+        bind( TokenSerialization.class ).to( TokenSerializationImpl.class );
+
+        Multibinder<Migration> migrationBinding = Multibinder.newSetBinder( 
binder(), Migration.class );
+        migrationBinding.addBinding().to(Key.get(TokenSerialization.class));
+
+    }
+}
+
+

http://git-wip-us.apache.org/repos/asf/usergrid/blob/179f12db/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/impl/TokenSerialization.java
----------------------------------------------------------------------
diff --git 
a/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/impl/TokenSerialization.java
 
b/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/impl/TokenSerialization.java
new file mode 100644
index 0000000..bf9337f
--- /dev/null
+++ 
b/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/impl/TokenSerialization.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.usergrid.persistence.token.impl;
+
+
+import org.apache.usergrid.persistence.core.migration.schema.Migration;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+
+/**
+ * Serialize token information to/from Cassandra. This was ported over to use 
a newer cassandra client from the old
+ * persistence code @
+ *
+ *     
https://github.com/apache/usergrid/tree/3f819dc0679f84edb57c52e69b58622417cfd59f
+ *     org.apache.usergrid.security.tokens.cassandra.TokenServiceImpl
+ *
+ */
+public interface TokenSerialization extends Migration {
+
+    void deleteToken(UUID tokenUUID);
+
+    void revokeToken(UUID tokenUUID, ByteBuffer principalKeyBuffer);
+
+    void updateTokenAccessTime(UUID tokenUUID, int accessdTime, int 
inactiveTime );
+
+    Map<String, Object> getTokenInfo(UUID tokenUUID);
+
+    void putTokenInfo(UUID tokenUUID, Map<String, Object> tokenInfo);
+
+    List<UUID> getTokensForPrincipal(ByteBuffer principalKeyBuffer);
+
+}

http://git-wip-us.apache.org/repos/asf/usergrid/blob/179f12db/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/impl/TokenSerializationImpl.java
----------------------------------------------------------------------
diff --git 
a/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/impl/TokenSerializationImpl.java
 
b/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/impl/TokenSerializationImpl.java
new file mode 100644
index 0000000..b07472d
--- /dev/null
+++ 
b/stack/corepersistence/token/src/main/java/org/apache/usergrid/persistence/token/impl/TokenSerializationImpl.java
@@ -0,0 +1,536 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.usergrid.persistence.token.impl;
+
+import com.datastax.driver.core.*;
+import com.datastax.driver.core.querybuilder.Clause;
+import com.datastax.driver.core.querybuilder.QueryBuilder;
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.PropertyAccessor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.dataformat.smile.SmileFactory;
+import com.google.common.base.Preconditions;
+import com.google.common.hash.Funnel;
+import com.google.inject.Inject;
+import org.apache.usergrid.persistence.core.CassandraConfig;
+import org.apache.usergrid.persistence.core.astyanax.*;
+import org.apache.usergrid.persistence.core.datastax.CQLUtils;
+import org.apache.usergrid.persistence.core.datastax.TableDefinition;
+import org.apache.usergrid.persistence.core.datastax.impl.TableDefinitionImpl;
+import org.apache.usergrid.persistence.core.shard.ExpandingShardLocator;
+import org.apache.usergrid.persistence.core.shard.StringHashUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteBuffer;
+import java.util.*;
+
+
+/**
+ * Serialize cache to Cassandra.
+ */
+public class TokenSerializationImpl implements TokenSerialization {
+
+    public static final Logger logger = 
LoggerFactory.getLogger(TokenSerializationImpl.class);
+
+    private SmileFactory smile = new SmileFactory();
+
+    private ObjectMapper smileMapper = new ObjectMapper( smile );
+
+    private static final String TOKEN_UUID = "uuid";
+    private static final String TOKEN_TYPE = "type";
+    private static final String TOKEN_CREATED = "created";
+    private static final String TOKEN_ACCESSED = "accessed";
+    private static final String TOKEN_INACTIVE = "inactive";
+    private static final String TOKEN_DURATION = "duration";
+    private static final String TOKEN_PRINCIPAL_TYPE = "principal";
+    private static final String TOKEN_ENTITY = "entity";
+    private static final String TOKEN_APPLICATION = "application";
+    private static final String TOKEN_STATE = "state";
+    private static final String TOKEN_WORKFLOW_ORG_ID = "workflowOrgId";
+
+    private static final Set<String> TOKEN_PROPERTIES;
+
+    static {
+        HashSet<String> set = new HashSet<String>();
+        set.add( TOKEN_UUID );
+        set.add( TOKEN_TYPE );
+        set.add( TOKEN_CREATED );
+        set.add( TOKEN_ACCESSED );
+        set.add( TOKEN_INACTIVE );
+        set.add( TOKEN_PRINCIPAL_TYPE );
+        set.add( TOKEN_ENTITY );
+        set.add( TOKEN_APPLICATION );
+        set.add( TOKEN_STATE );
+        set.add( TOKEN_DURATION );
+        set.add( TOKEN_WORKFLOW_ORG_ID );
+        TOKEN_PROPERTIES = Collections.unmodifiableSet(set);
+    }
+
+    private static final HashSet<String> REQUIRED_TOKEN_PROPERTIES = new 
HashSet<String>();
+
+
+    static {
+        REQUIRED_TOKEN_PROPERTIES.add( TOKEN_UUID );
+        REQUIRED_TOKEN_PROPERTIES.add( TOKEN_TYPE );
+        REQUIRED_TOKEN_PROPERTIES.add( TOKEN_CREATED );
+        REQUIRED_TOKEN_PROPERTIES.add( TOKEN_ACCESSED );
+        REQUIRED_TOKEN_PROPERTIES.add( TOKEN_INACTIVE );
+        REQUIRED_TOKEN_PROPERTIES.add( TOKEN_DURATION );
+    }
+
+
+
+    private static final String TOKENS_TABLE = CQLUtils.quote("Tokens");
+    private static final Collection<String> TOKENS_PARTITION_KEYS = 
Collections.singletonList("key");
+    private static final Collection<String> TOKENS_COLUMN_KEYS = 
Collections.singletonList("column1");
+    private static final Map<String, DataType.Name> TOKENS_COLUMNS =
+        new HashMap<String, DataType.Name>() {{
+            put( "key", DataType.Name.BLOB );
+            put( "column1", DataType.Name.BLOB );
+            put( "value", DataType.Name.BLOB ); }};
+    private static final Map<String, String> TOKENS_CLUSTERING_ORDER =
+        new HashMap<String, String>(){{ put( "column1", "ASC" ); }};
+
+    private static final String PRINCIPAL_TOKENS_TABLE = 
CQLUtils.quote("PrincipalTokens");
+    private static final Collection<String> PRINCIPAL_TOKENS_PARTITION_KEYS = 
Collections.singletonList("key");
+    private static final Collection<String> PRINCIPAL_TOKENS_COLUMN_KEYS = 
Collections.singletonList("column1");
+    private static final Map<String, DataType.Name> PRINCIPAL_TOKENS_COLUMNS =
+        new HashMap<String, DataType.Name>() {{
+            put( "key", DataType.Name.BLOB );
+            put( "column1", DataType.Name.UUID );
+            put( "value", DataType.Name.BLOB ); }};
+    private static final Map<String, String> PRINCIPAL_TOKENS_CLUSTERING_ORDER 
=
+        new HashMap<String, String>(){{ put( "column1", "ASC" ); }};
+
+
+
+
+    private final Session session;
+    private final CassandraConfig cassandraConfig;
+
+
+
+
+    @Inject
+    public TokenSerializationImpl(final Session session,
+                                  final CassandraConfig cassandraConfig ) {
+        this.session = session;
+        this.cassandraConfig = cassandraConfig;
+
+    }
+
+
+    @Override
+    public void deleteToken(UUID tokenUUID){
+
+    }
+
+    @Override
+    public void revokeToken(UUID tokenUUID, ByteBuffer principalKeyBuffer){
+
+    }
+
+    @Override
+    public void updateTokenAccessTime(UUID tokenUUID, int accessdTime, int 
inactiveTime ){
+
+    }
+
+    @Override
+    public Map<String, Object> getTokenInfo(UUID tokenUUID){
+
+        Preconditions.checkNotNull(tokenUUID, "token UUID is required");
+
+        List<ByteBuffer> tokenProperties = new ArrayList<>();
+        TOKEN_PROPERTIES.forEach( prop ->
+            tokenProperties.add(DataType.serializeValue(prop, 
ProtocolVersion.NEWEST_SUPPORTED)));
+
+        final ByteBuffer key = DataType.text().serialize(tokenUUID, 
ProtocolVersion.NEWEST_SUPPORTED);
+
+        final Clause inKey = QueryBuilder.eq("key", key);
+        final Clause inColumn = QueryBuilder.in("column1", tokenProperties );
+
+        final Statement statement = 
QueryBuilder.select().all().from(TOKENS_TABLE)
+            .where(inKey)
+            .and(inColumn)
+            .setConsistencyLevel(cassandraConfig.getDataStaxReadCl());
+
+        final ResultSet resultSet = session.execute(statement);
+        final List<Row> rows = resultSet.all();
+
+        Map<String, Object> tokenInfo = new HashMap<>();
+
+        rows.forEach( row -> {
+
+            final String name = row.getString("column1");
+            final Object value = deserializeColumnValue(name, 
row.getBytes("value"));
+
+            if (value == null){
+                throw new RuntimeException("error deserializing token info for 
property: "+name);
+            }
+
+            tokenInfo.put(name, value);
+
+        });
+
+        return tokenInfo;
+    }
+
+    @Override
+    public void putTokenInfo(UUID tokenUUID, Map<String, Object> tokenInfo){
+
+    }
+
+    @Override
+    public List<UUID> getTokensForPrincipal(ByteBuffer principalKeyBuffer){
+        return new ArrayList<>();
+    }
+
+
+    private Object deserializeColumnValue(final String name, final ByteBuffer 
bb){
+
+
+        switch (name) {
+            case TOKEN_TYPE:
+            case TOKEN_PRINCIPAL_TYPE:
+                return DataType.text().deserialize(bb, 
ProtocolVersion.NEWEST_SUPPORTED);
+            case TOKEN_CREATED:
+            case TOKEN_ACCESSED:
+            case TOKEN_INACTIVE:
+            case TOKEN_DURATION:
+                return DataType.bigint().deserialize(bb, 
ProtocolVersion.NEWEST_SUPPORTED);
+            case TOKEN_ENTITY:
+            case TOKEN_APPLICATION:
+            case TOKEN_WORKFLOW_ORG_ID:
+            case TOKEN_UUID:
+                return DataType.uuid().deserialize(bb, 
ProtocolVersion.NEWEST_SUPPORTED);
+            case TOKEN_STATE:
+                fromByteBuffer(bb, Object.class);
+        }
+
+        return null;
+    }
+
+
+    private Object fromByteBuffer( ByteBuffer byteBuffer, Class<?> clazz ) {
+        if ( ( byteBuffer == null ) || !byteBuffer.hasRemaining() ) {
+            return null;
+        }
+        if ( clazz == null ) {
+            clazz = Object.class;
+        }
+
+        Object obj = null;
+        try {
+            obj = smileMapper.readValue( byteBuffer.array(), 
byteBuffer.arrayOffset() + byteBuffer.position(),
+                byteBuffer.remaining(), clazz );
+        }
+        catch ( Exception e ) {
+            logger.error( "Error parsing SMILE bytes", e );
+        }
+        return obj;
+    }
+
+//    @Override
+//    public V readValue(CacheScope scope, K key, TypeReference typeRef ) {
+//
+//        return readValueCQL( scope, key, typeRef);
+//
+//    }
+
+
+//    private V readValueCQL(CacheScope scope, K key, TypeReference typeRef){
+//
+//        Preconditions.checkNotNull(scope, "scope is required");
+//        Preconditions.checkNotNull(key, "key is required");
+//
+//        final String rowKeyString = 
scope.getApplication().getUuid().toString();
+//        final int bucket = BUCKET_LOCATOR.getCurrentBucket(rowKeyString);
+//
+//        // determine column name based on K key to string
+//        final String columnName = key.toString();
+//
+//        final Clause inKey = QueryBuilder.eq("key", getPartitionKey(scope, 
rowKeyString, bucket) );
+//        final Clause inColumn = QueryBuilder.eq("column1", 
DataType.text().serialize(columnName, ProtocolVersion.NEWEST_SUPPORTED) );
+//
+//        final Statement statement = 
QueryBuilder.select().all().from(TOKENS_TABLE)
+//            .where(inKey)
+//            .and(inColumn)
+//            .setConsistencyLevel(cassandraConfig.getDataStaxReadCl());
+//
+//        final ResultSet resultSet = session.execute(statement);
+//        final com.datastax.driver.core.Row row = resultSet.one();
+//
+//        if (row == null){
+//
+//            if(logger.isDebugEnabled()){
+//                logger.debug("Cache value not found for key {}", key );
+//            }
+//
+//            return null;
+//        }
+//
+//
+//        try {
+//
+//            return MAPPER.readValue(row.getBytes("value").array(), typeRef);
+//
+//        } catch (IOException ioe) {
+//            logger.error("Unable to read cached value", ioe);
+//            throw new RuntimeException("Unable to read cached value", ioe);
+//        }
+//
+//
+//    }
+
+
+//    @Override
+//    public V writeValue(CacheScope scope, K key, V value, Integer ttl) {
+//
+//        return writeValueCQL( scope, key, value, ttl);
+//
+//    }
+//
+//    private V writeValueCQL(CacheScope scope, K key, V value, Integer ttl) {
+//
+//        Preconditions.checkNotNull( scope, "scope is required");
+//        Preconditions.checkNotNull( key, "key is required" );
+//        Preconditions.checkNotNull( value, "value is required");
+//        Preconditions.checkNotNull( ttl, "ttl is required");
+//
+//
+//        final String rowKeyString = 
scope.getApplication().getUuid().toString();
+//        final int bucket = BUCKET_LOCATOR.getCurrentBucket(rowKeyString);
+//
+//        // determine column name based on K key to string
+//        final String columnName = key.toString();
+//
+//        // serialize cache item
+//        byte[] cacheBytes;
+//        try {
+//            cacheBytes = MAPPER.writeValueAsBytes(value);
+//        } catch (JsonProcessingException jpe) {
+//            throw new RuntimeException("Unable to serialize cache value", 
jpe);
+//        }
+//
+//        final Using timeToLive = QueryBuilder.ttl(ttl);
+//
+//
+//        // convert to ByteBuffer for the blob DataType in Cassandra
+//        final ByteBuffer bb = ByteBuffer.allocate(cacheBytes.length);
+//        bb.put(cacheBytes);
+//        bb.flip();
+//
+//        final Statement cacheEntry = QueryBuilder.insertInto(TOKENS_TABLE)
+//            .using(timeToLive)
+//            .value("key", getPartitionKey(scope, rowKeyString, bucket))
+//            .value("column1", DataType.text().serialize(columnName, 
ProtocolVersion.NEWEST_SUPPORTED))
+//            .value("value", bb);
+//
+//
+//        session.execute(cacheEntry);
+//
+//        logger.debug("Wrote cache item to scope {}\n   key/value types 
{}/{}\n   key:value: {}:{}",
+//            scope.getApplication().getUuid(),
+//            key.getClass().getSimpleName(),
+//            value.getClass().getSimpleName(),
+//            key,
+//            value);
+//
+//        return value;
+//
+//    }
+//
+//
+//
+//    @Override
+//    public void removeValue(CacheScope scope, K key) {
+//
+//        removeValueCQL(scope, key);
+//
+//    }
+//
+//
+//    private void removeValueCQL(CacheScope scope, K key) {
+//
+//        Preconditions.checkNotNull( scope, "scope is required");
+//        Preconditions.checkNotNull( key, "key is required" );
+//
+//        // determine bucketed row-key based application UUID
+//
+//        final String rowKeyString = 
scope.getApplication().getUuid().toString();
+//        final int bucket = BUCKET_LOCATOR.getCurrentBucket(rowKeyString);
+//
+//        // determine column name based on K key to string
+//        final String columnName = key.toString();
+//
+//
+//        final Clause inKey = QueryBuilder.eq("key", getPartitionKey(scope, 
rowKeyString, bucket) );
+//        final Clause inColumn = QueryBuilder.eq("column1", 
DataType.text().serialize(columnName, ProtocolVersion.NEWEST_SUPPORTED) );
+//
+//        final Statement statement = QueryBuilder.delete().from(TOKENS_TABLE)
+//            .where(inKey)
+//            .and(inColumn);
+//
+//        session.execute(statement);
+//
+//    }
+//
+//
+//    @Override
+//    public void invalidate(CacheScope scope) {
+//
+//        invalidateCQL(scope);
+//        logger.debug("Invalidated scope {}", 
scope.getApplication().getUuid());
+//
+//    }
+//
+//    private void invalidateCQL(CacheScope scope){
+//
+//        Preconditions.checkNotNull(scope, "scope is required");
+//
+//        // determine bucketed row-key based application UUID
+//        final String rowKeyString = 
scope.getApplication().getUuid().toString();
+//        final int bucket = BUCKET_LOCATOR.getCurrentBucket(rowKeyString);
+//
+//        final Clause inKey = QueryBuilder.eq("key", getPartitionKey(scope, 
rowKeyString, bucket) );
+//
+//        final Statement statement = QueryBuilder.delete().from(TOKENS_TABLE)
+//            .where(inKey);
+//
+//        session.execute(statement);
+//
+//    }
+//
+    @Override
+    public Collection<MultiTenantColumnFamilyDefinition> getColumnFamilies() {
+
+        return Collections.emptyList();
+    }
+
+    @Override
+    public Collection<TableDefinition> getTables() {
+
+        final TableDefinition tokens =
+            new TableDefinitionImpl(
+                cassandraConfig.getApplicationKeyspace(),
+                TOKENS_TABLE,
+                TOKENS_PARTITION_KEYS,
+                TOKENS_COLUMN_KEYS,
+                TOKENS_COLUMNS,
+                TableDefinitionImpl.CacheOption.KEYS,
+                TOKENS_CLUSTERING_ORDER);
+
+        final TableDefinition principalTokens =
+            new TableDefinitionImpl(
+                cassandraConfig.getApplicationKeyspace(),
+                PRINCIPAL_TOKENS_TABLE,
+                PRINCIPAL_TOKENS_PARTITION_KEYS,
+                PRINCIPAL_TOKENS_COLUMN_KEYS,
+                PRINCIPAL_TOKENS_COLUMNS,
+                TableDefinitionImpl.CacheOption.KEYS,
+                PRINCIPAL_TOKENS_CLUSTERING_ORDER);
+
+        return Arrays.asList(tokens, principalTokens);
+    }
+//
+//
+//
+//    private ByteBuffer getPartitionKey(CacheScope scope, String key, int 
bucketNumber){
+//
+//        return serializeKeys(scope.getApplication().getUuid(),
+//            scope.getApplication().getType(), bucketNumber, key);
+//
+//    }
+
+    private static ByteBuffer serializeTokenKey(UUID ownerUUID, String 
ownerType, int bucketNumber, String rowKeyString ){
+
+        List<Object> keys = new ArrayList<>(4);
+        keys.add(0, ownerUUID);
+        keys.add(1, ownerType);
+        keys.add(2, bucketNumber);
+        keys.add(3, rowKeyString);
+
+        // UUIDs are 16 bytes, allocate the buffer accordingly
+        int size = 
16+ownerType.getBytes().length+rowKeyString.getBytes().length;
+
+        // ints are 4 bytes, add for the bucket
+        size += 4;
+
+
+        // we always need to add length for the 2 byte short and 1 byte 
equality
+        size += keys.size()*3;
+
+        ByteBuffer stuff = ByteBuffer.allocate(size);
+
+        for (Object key : keys) {
+
+            ByteBuffer kb = DataType.serializeValue(key, 
ProtocolVersion.NEWEST_SUPPORTED);
+            if (kb == null) {
+                kb = ByteBuffer.allocate(0);
+            }
+
+            stuff.putShort((short) kb.remaining());
+            stuff.put(kb.slice());
+            stuff.put((byte) 0);
+
+
+        }
+        stuff.flip();
+        return stuff;
+
+    }
+
+    private static ByteBuffer serializeKeys(UUID ownerUUID, String ownerType, 
int bucketNumber, String rowKeyString ){
+
+        List<Object> keys = new ArrayList<>(4);
+        keys.add(0, ownerUUID);
+        keys.add(1, ownerType);
+        keys.add(2, bucketNumber);
+        keys.add(3, rowKeyString);
+
+        // UUIDs are 16 bytes, allocate the buffer accordingly
+        int size = 
16+ownerType.getBytes().length+rowKeyString.getBytes().length;
+
+        // ints are 4 bytes, add for the bucket
+        size += 4;
+
+
+        // we always need to add length for the 2 byte short and 1 byte 
equality
+        size += keys.size()*3;
+
+        ByteBuffer stuff = ByteBuffer.allocate(size);
+
+        for (Object key : keys) {
+
+            ByteBuffer kb = DataType.serializeValue(key, 
ProtocolVersion.NEWEST_SUPPORTED);
+            if (kb == null) {
+                kb = ByteBuffer.allocate(0);
+            }
+
+            stuff.putShort((short) kb.remaining());
+            stuff.put(kb.slice());
+            stuff.put((byte) 0);
+
+
+        }
+        stuff.flip();
+        return stuff;
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/usergrid/blob/179f12db/stack/corepersistence/token/src/test/java/org/apache/usergrid/persistence/token/TokenTestModule.java
----------------------------------------------------------------------
diff --git 
a/stack/corepersistence/token/src/test/java/org/apache/usergrid/persistence/token/TokenTestModule.java
 
b/stack/corepersistence/token/src/test/java/org/apache/usergrid/persistence/token/TokenTestModule.java
new file mode 100644
index 0000000..b454d73
--- /dev/null
+++ 
b/stack/corepersistence/token/src/test/java/org/apache/usergrid/persistence/token/TokenTestModule.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.usergrid.persistence.token;
+
+
+import org.apache.usergrid.persistence.token.guice.TokenModule;
+import org.apache.usergrid.persistence.token.impl.TokenSerialization;
+import org.apache.usergrid.persistence.token.impl.TokenSerializationImpl;
+import org.apache.usergrid.persistence.core.guice.CommonModule;
+
+
+
+public class TokenTestModule extends 
org.apache.usergrid.persistence.core.guice.TestModule {
+
+    @Override
+    protected void configure() {
+
+        install( new CommonModule() );
+        install( new TokenModule() );
+
+        
bind(TokenSerialization.class).to(TokenSerializationImpl.class).asEagerSingleton();
+    }
+}

http://git-wip-us.apache.org/repos/asf/usergrid/blob/179f12db/stack/corepersistence/token/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/stack/corepersistence/token/src/test/resources/log4j.properties 
b/stack/corepersistence/token/src/test/resources/log4j.properties
new file mode 100644
index 0000000..d9f6b7f
--- /dev/null
+++ b/stack/corepersistence/token/src/test/resources/log4j.properties
@@ -0,0 +1,38 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# suppress inspection "UnusedProperty" for whole file
+log4j.rootLogger=INFO,stdout
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %p 
%c{3}.%M(%L)<%t>- %m%n
+
+#log4j.logger.com.datastax.driver.core=TRACE
+log4j.logger.org.safehaus.chop.plugin=DEBUG
+log4j.logger.org.safehaus.guicyfig=ERROR
+log4j.logger.org.safehaus.chop.api.store.amazon=DEBUG
+log4j.logger.org.apache.http=ERROR
+log4j.logger.com.amazonaws.request=ERROR
+log4j.logger.cassandra.db=ERROR
+
+#log4j.logger.org.apache.usergrid=DEBUG
+#log4j.logger.org.apache.usergrid.persistence.collection=TRACE
+log4j.logger.org.apache.usergrid.persistence.collection.mvcc.stage.delete.VersionCompact=TRACE
+

Reply via email to