http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/proxy/src/main/java/org/apache/accumulo/proxy/thrift/PScanResult.java ---------------------------------------------------------------------- diff --cc proxy/src/main/java/org/apache/accumulo/proxy/thrift/PScanResult.java index 0000000,0000000..4afbbbe new file mode 100644 --- /dev/null +++ b/proxy/src/main/java/org/apache/accumulo/proxy/thrift/PScanResult.java @@@ -1,0 -1,0 +1,554 @@@ ++/* ++ * Licensed to the Apache Software Foundation (ASF) under one or more ++ * contributor license agreements. See the NOTICE file distributed with ++ * this work for additional information regarding copyright ownership. ++ * The ASF licenses this file to You under the Apache License, Version 2.0 ++ * (the "License"); you may not use this file except in compliance with ++ * the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an "AS IS" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++/** ++ * Autogenerated by Thrift Compiler (0.9.0) ++ * ++ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING ++ * @generated ++ */ ++package org.apache.accumulo.proxy.thrift; ++ ++import org.apache.thrift.scheme.IScheme; ++import org.apache.thrift.scheme.SchemeFactory; ++import org.apache.thrift.scheme.StandardScheme; ++ ++import org.apache.thrift.scheme.TupleScheme; ++import org.apache.thrift.protocol.TTupleProtocol; ++import org.apache.thrift.protocol.TProtocolException; ++import org.apache.thrift.EncodingUtils; ++import org.apache.thrift.TException; ++import java.util.List; ++import java.util.ArrayList; ++import java.util.Map; ++import java.util.HashMap; ++import java.util.EnumMap; ++import java.util.Set; ++import java.util.HashSet; ++import java.util.EnumSet; ++import java.util.Collections; ++import java.util.BitSet; ++import java.nio.ByteBuffer; ++import java.util.Arrays; ++import org.slf4j.Logger; ++import org.slf4j.LoggerFactory; ++ ++@SuppressWarnings("all") public class PScanResult implements org.apache.thrift.TBase<PScanResult, PScanResult._Fields>, java.io.Serializable, Cloneable { ++ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PScanResult"); ++ ++ private static final org.apache.thrift.protocol.TField RESULTS_FIELD_DESC = new org.apache.thrift.protocol.TField("results", org.apache.thrift.protocol.TType.LIST, (short)1); ++ private static final org.apache.thrift.protocol.TField MORE_FIELD_DESC = new org.apache.thrift.protocol.TField("more", org.apache.thrift.protocol.TType.BOOL, (short)2); ++ ++ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); ++ static { ++ schemes.put(StandardScheme.class, new PScanResultStandardSchemeFactory()); ++ schemes.put(TupleScheme.class, new PScanResultTupleSchemeFactory()); ++ } ++ ++ public List<PKeyValue> results; // required ++ public boolean more; // required ++ ++ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ ++ @SuppressWarnings("all") public enum _Fields implements org.apache.thrift.TFieldIdEnum { ++ RESULTS((short)1, "results"), ++ MORE((short)2, "more"); ++ ++ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); ++ ++ static { ++ for (_Fields field : EnumSet.allOf(_Fields.class)) { ++ byName.put(field.getFieldName(), field); ++ } ++ } ++ ++ /** ++ * Find the _Fields constant that matches fieldId, or null if its not found. ++ */ ++ public static _Fields findByThriftId(int fieldId) { ++ switch(fieldId) { ++ case 1: // RESULTS ++ return RESULTS; ++ case 2: // MORE ++ return MORE; ++ default: ++ return null; ++ } ++ } ++ ++ /** ++ * Find the _Fields constant that matches fieldId, throwing an exception ++ * if it is not found. ++ */ ++ public static _Fields findByThriftIdOrThrow(int fieldId) { ++ _Fields fields = findByThriftId(fieldId); ++ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); ++ return fields; ++ } ++ ++ /** ++ * Find the _Fields constant that matches name, or null if its not found. ++ */ ++ public static _Fields findByName(String name) { ++ return byName.get(name); ++ } ++ ++ private final short _thriftId; ++ private final String _fieldName; ++ ++ _Fields(short thriftId, String fieldName) { ++ _thriftId = thriftId; ++ _fieldName = fieldName; ++ } ++ ++ public short getThriftFieldId() { ++ return _thriftId; ++ } ++ ++ public String getFieldName() { ++ return _fieldName; ++ } ++ } ++ ++ // isset id assignments ++ private static final int __MORE_ISSET_ID = 0; ++ private byte __isset_bitfield = 0; ++ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; ++ static { ++ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); ++ tmpMap.put(_Fields.RESULTS, new org.apache.thrift.meta_data.FieldMetaData("results", org.apache.thrift.TFieldRequirementType.DEFAULT, ++ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, ++ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, PKeyValue.class)))); ++ tmpMap.put(_Fields.MORE, new org.apache.thrift.meta_data.FieldMetaData("more", org.apache.thrift.TFieldRequirementType.DEFAULT, ++ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); ++ metaDataMap = Collections.unmodifiableMap(tmpMap); ++ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(PScanResult.class, metaDataMap); ++ } ++ ++ public PScanResult() { ++ } ++ ++ public PScanResult( ++ List<PKeyValue> results, ++ boolean more) ++ { ++ this(); ++ this.results = results; ++ this.more = more; ++ setMoreIsSet(true); ++ } ++ ++ /** ++ * Performs a deep copy on <i>other</i>. ++ */ ++ public PScanResult(PScanResult other) { ++ __isset_bitfield = other.__isset_bitfield; ++ if (other.isSetResults()) { ++ List<PKeyValue> __this__results = new ArrayList<PKeyValue>(); ++ for (PKeyValue other_element : other.results) { ++ __this__results.add(new PKeyValue(other_element)); ++ } ++ this.results = __this__results; ++ } ++ this.more = other.more; ++ } ++ ++ public PScanResult deepCopy() { ++ return new PScanResult(this); ++ } ++ ++ @Override ++ public void clear() { ++ this.results = null; ++ setMoreIsSet(false); ++ this.more = false; ++ } ++ ++ public int getResultsSize() { ++ return (this.results == null) ? 0 : this.results.size(); ++ } ++ ++ public java.util.Iterator<PKeyValue> getResultsIterator() { ++ return (this.results == null) ? null : this.results.iterator(); ++ } ++ ++ public void addToResults(PKeyValue elem) { ++ if (this.results == null) { ++ this.results = new ArrayList<PKeyValue>(); ++ } ++ this.results.add(elem); ++ } ++ ++ public List<PKeyValue> getResults() { ++ return this.results; ++ } ++ ++ public PScanResult setResults(List<PKeyValue> results) { ++ this.results = results; ++ return this; ++ } ++ ++ public void unsetResults() { ++ this.results = null; ++ } ++ ++ /** Returns true if field results is set (has been assigned a value) and false otherwise */ ++ public boolean isSetResults() { ++ return this.results != null; ++ } ++ ++ public void setResultsIsSet(boolean value) { ++ if (!value) { ++ this.results = null; ++ } ++ } ++ ++ public boolean isMore() { ++ return this.more; ++ } ++ ++ public PScanResult setMore(boolean more) { ++ this.more = more; ++ setMoreIsSet(true); ++ return this; ++ } ++ ++ public void unsetMore() { ++ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __MORE_ISSET_ID); ++ } ++ ++ /** Returns true if field more is set (has been assigned a value) and false otherwise */ ++ public boolean isSetMore() { ++ return EncodingUtils.testBit(__isset_bitfield, __MORE_ISSET_ID); ++ } ++ ++ public void setMoreIsSet(boolean value) { ++ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __MORE_ISSET_ID, value); ++ } ++ ++ public void setFieldValue(_Fields field, Object value) { ++ switch (field) { ++ case RESULTS: ++ if (value == null) { ++ unsetResults(); ++ } else { ++ setResults((List<PKeyValue>)value); ++ } ++ break; ++ ++ case MORE: ++ if (value == null) { ++ unsetMore(); ++ } else { ++ setMore((Boolean)value); ++ } ++ break; ++ ++ } ++ } ++ ++ public Object getFieldValue(_Fields field) { ++ switch (field) { ++ case RESULTS: ++ return getResults(); ++ ++ case MORE: ++ return Boolean.valueOf(isMore()); ++ ++ } ++ throw new IllegalStateException(); ++ } ++ ++ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ ++ public boolean isSet(_Fields field) { ++ if (field == null) { ++ throw new IllegalArgumentException(); ++ } ++ ++ switch (field) { ++ case RESULTS: ++ return isSetResults(); ++ case MORE: ++ return isSetMore(); ++ } ++ throw new IllegalStateException(); ++ } ++ ++ @Override ++ public boolean equals(Object that) { ++ if (that == null) ++ return false; ++ if (that instanceof PScanResult) ++ return this.equals((PScanResult)that); ++ return false; ++ } ++ ++ public boolean equals(PScanResult that) { ++ if (that == null) ++ return false; ++ ++ boolean this_present_results = true && this.isSetResults(); ++ boolean that_present_results = true && that.isSetResults(); ++ if (this_present_results || that_present_results) { ++ if (!(this_present_results && that_present_results)) ++ return false; ++ if (!this.results.equals(that.results)) ++ return false; ++ } ++ ++ boolean this_present_more = true; ++ boolean that_present_more = true; ++ if (this_present_more || that_present_more) { ++ if (!(this_present_more && that_present_more)) ++ return false; ++ if (this.more != that.more) ++ return false; ++ } ++ ++ return true; ++ } ++ ++ @Override ++ public int hashCode() { ++ return 0; ++ } ++ ++ public int compareTo(PScanResult other) { ++ if (!getClass().equals(other.getClass())) { ++ return getClass().getName().compareTo(other.getClass().getName()); ++ } ++ ++ int lastComparison = 0; ++ PScanResult typedOther = (PScanResult)other; ++ ++ lastComparison = Boolean.valueOf(isSetResults()).compareTo(typedOther.isSetResults()); ++ if (lastComparison != 0) { ++ return lastComparison; ++ } ++ if (isSetResults()) { ++ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.results, typedOther.results); ++ if (lastComparison != 0) { ++ return lastComparison; ++ } ++ } ++ lastComparison = Boolean.valueOf(isSetMore()).compareTo(typedOther.isSetMore()); ++ if (lastComparison != 0) { ++ return lastComparison; ++ } ++ if (isSetMore()) { ++ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.more, typedOther.more); ++ if (lastComparison != 0) { ++ return lastComparison; ++ } ++ } ++ return 0; ++ } ++ ++ public _Fields fieldForId(int fieldId) { ++ return _Fields.findByThriftId(fieldId); ++ } ++ ++ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { ++ schemes.get(iprot.getScheme()).getScheme().read(iprot, this); ++ } ++ ++ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { ++ schemes.get(oprot.getScheme()).getScheme().write(oprot, this); ++ } ++ ++ @Override ++ public String toString() { ++ StringBuilder sb = new StringBuilder("PScanResult("); ++ boolean first = true; ++ ++ sb.append("results:"); ++ if (this.results == null) { ++ sb.append("null"); ++ } else { ++ sb.append(this.results); ++ } ++ first = false; ++ if (!first) sb.append(", "); ++ sb.append("more:"); ++ sb.append(this.more); ++ first = false; ++ sb.append(")"); ++ return sb.toString(); ++ } ++ ++ public void validate() throws org.apache.thrift.TException { ++ // check for required fields ++ // check for sub-struct validity ++ } ++ ++ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { ++ try { ++ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); ++ } catch (org.apache.thrift.TException te) { ++ throw new java.io.IOException(te); ++ } ++ } ++ ++ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { ++ try { ++ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. ++ __isset_bitfield = 0; ++ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); ++ } catch (org.apache.thrift.TException te) { ++ throw new java.io.IOException(te); ++ } ++ } ++ ++ private static class PScanResultStandardSchemeFactory implements SchemeFactory { ++ public PScanResultStandardScheme getScheme() { ++ return new PScanResultStandardScheme(); ++ } ++ } ++ ++ private static class PScanResultStandardScheme extends StandardScheme<PScanResult> { ++ ++ public void read(org.apache.thrift.protocol.TProtocol iprot, PScanResult struct) throws org.apache.thrift.TException { ++ org.apache.thrift.protocol.TField schemeField; ++ iprot.readStructBegin(); ++ while (true) ++ { ++ schemeField = iprot.readFieldBegin(); ++ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { ++ break; ++ } ++ switch (schemeField.id) { ++ case 1: // RESULTS ++ if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { ++ { ++ org.apache.thrift.protocol.TList _list0 = iprot.readListBegin(); ++ struct.results = new ArrayList<PKeyValue>(_list0.size); ++ for (int _i1 = 0; _i1 < _list0.size; ++_i1) ++ { ++ PKeyValue _elem2; // required ++ _elem2 = new PKeyValue(); ++ _elem2.read(iprot); ++ struct.results.add(_elem2); ++ } ++ iprot.readListEnd(); ++ } ++ struct.setResultsIsSet(true); ++ } else { ++ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); ++ } ++ break; ++ case 2: // MORE ++ if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) { ++ struct.more = iprot.readBool(); ++ struct.setMoreIsSet(true); ++ } else { ++ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); ++ } ++ break; ++ default: ++ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); ++ } ++ iprot.readFieldEnd(); ++ } ++ iprot.readStructEnd(); ++ ++ // check for required fields of primitive type, which can't be checked in the validate method ++ struct.validate(); ++ } ++ ++ public void write(org.apache.thrift.protocol.TProtocol oprot, PScanResult struct) throws org.apache.thrift.TException { ++ struct.validate(); ++ ++ oprot.writeStructBegin(STRUCT_DESC); ++ if (struct.results != null) { ++ oprot.writeFieldBegin(RESULTS_FIELD_DESC); ++ { ++ oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.results.size())); ++ for (PKeyValue _iter3 : struct.results) ++ { ++ _iter3.write(oprot); ++ } ++ oprot.writeListEnd(); ++ } ++ oprot.writeFieldEnd(); ++ } ++ oprot.writeFieldBegin(MORE_FIELD_DESC); ++ oprot.writeBool(struct.more); ++ oprot.writeFieldEnd(); ++ oprot.writeFieldStop(); ++ oprot.writeStructEnd(); ++ } ++ ++ } ++ ++ private static class PScanResultTupleSchemeFactory implements SchemeFactory { ++ public PScanResultTupleScheme getScheme() { ++ return new PScanResultTupleScheme(); ++ } ++ } ++ ++ private static class PScanResultTupleScheme extends TupleScheme<PScanResult> { ++ ++ @Override ++ public void write(org.apache.thrift.protocol.TProtocol prot, PScanResult struct) throws org.apache.thrift.TException { ++ TTupleProtocol oprot = (TTupleProtocol) prot; ++ BitSet optionals = new BitSet(); ++ if (struct.isSetResults()) { ++ optionals.set(0); ++ } ++ if (struct.isSetMore()) { ++ optionals.set(1); ++ } ++ oprot.writeBitSet(optionals, 2); ++ if (struct.isSetResults()) { ++ { ++ oprot.writeI32(struct.results.size()); ++ for (PKeyValue _iter4 : struct.results) ++ { ++ _iter4.write(oprot); ++ } ++ } ++ } ++ if (struct.isSetMore()) { ++ oprot.writeBool(struct.more); ++ } ++ } ++ ++ @Override ++ public void read(org.apache.thrift.protocol.TProtocol prot, PScanResult struct) throws org.apache.thrift.TException { ++ TTupleProtocol iprot = (TTupleProtocol) prot; ++ BitSet incoming = iprot.readBitSet(2); ++ if (incoming.get(0)) { ++ { ++ org.apache.thrift.protocol.TList _list5 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); ++ struct.results = new ArrayList<PKeyValue>(_list5.size); ++ for (int _i6 = 0; _i6 < _list5.size; ++_i6) ++ { ++ PKeyValue _elem7; // required ++ _elem7 = new PKeyValue(); ++ _elem7.read(iprot); ++ struct.results.add(_elem7); ++ } ++ } ++ struct.setResultsIsSet(true); ++ } ++ if (incoming.get(1)) { ++ struct.more = iprot.readBool(); ++ struct.setMoreIsSet(true); ++ } ++ } ++ } ++ ++} ++
http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/proxy/src/main/java/org/apache/accumulo/proxy/thrift/PSystemPermission.java ---------------------------------------------------------------------- diff --cc proxy/src/main/java/org/apache/accumulo/proxy/thrift/PSystemPermission.java index 0000000,0000000..9dfdbb4 new file mode 100644 --- /dev/null +++ b/proxy/src/main/java/org/apache/accumulo/proxy/thrift/PSystemPermission.java @@@ -1,0 -1,0 +1,79 @@@ ++/* ++ * Licensed to the Apache Software Foundation (ASF) under one or more ++ * contributor license agreements. See the NOTICE file distributed with ++ * this work for additional information regarding copyright ownership. ++ * The ASF licenses this file to You under the Apache License, Version 2.0 ++ * (the "License"); you may not use this file except in compliance with ++ * the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an "AS IS" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++/** ++ * Autogenerated by Thrift Compiler (0.9.0) ++ * ++ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING ++ * @generated ++ */ ++package org.apache.accumulo.proxy.thrift; ++ ++ ++import java.util.Map; ++import java.util.HashMap; ++import org.apache.thrift.TEnum; ++ ++@SuppressWarnings("all") public enum PSystemPermission implements org.apache.thrift.TEnum { ++ GRANT(0), ++ CREATE_TABLE(1), ++ DROP_TABLE(2), ++ ALTER_TABLE(3), ++ CREATE_USER(4), ++ DROP_USER(5), ++ ALTER_USER(6), ++ SYSTEM(7); ++ ++ private final int value; ++ ++ private PSystemPermission(int value) { ++ this.value = value; ++ } ++ ++ /** ++ * Get the integer value of this enum value, as defined in the Thrift IDL. ++ */ ++ public int getValue() { ++ return value; ++ } ++ ++ /** ++ * Find a the enum type by its integer value, as defined in the Thrift IDL. ++ * @return null if the value is not found. ++ */ ++ public static PSystemPermission findByValue(int value) { ++ switch (value) { ++ case 0: ++ return GRANT; ++ case 1: ++ return CREATE_TABLE; ++ case 2: ++ return DROP_TABLE; ++ case 3: ++ return ALTER_TABLE; ++ case 4: ++ return CREATE_USER; ++ case 5: ++ return DROP_USER; ++ case 6: ++ return ALTER_USER; ++ case 7: ++ return SYSTEM; ++ default: ++ return null; ++ } ++ } ++} http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/proxy/src/main/java/org/apache/accumulo/proxy/thrift/PTablePermission.java ---------------------------------------------------------------------- diff --cc proxy/src/main/java/org/apache/accumulo/proxy/thrift/PTablePermission.java index 0000000,0000000..1e57c9f new file mode 100644 --- /dev/null +++ b/proxy/src/main/java/org/apache/accumulo/proxy/thrift/PTablePermission.java @@@ -1,0 -1,0 +1,73 @@@ ++/* ++ * Licensed to the Apache Software Foundation (ASF) under one or more ++ * contributor license agreements. See the NOTICE file distributed with ++ * this work for additional information regarding copyright ownership. ++ * The ASF licenses this file to You under the Apache License, Version 2.0 ++ * (the "License"); you may not use this file except in compliance with ++ * the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an "AS IS" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++/** ++ * Autogenerated by Thrift Compiler (0.9.0) ++ * ++ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING ++ * @generated ++ */ ++package org.apache.accumulo.proxy.thrift; ++ ++ ++import java.util.Map; ++import java.util.HashMap; ++import org.apache.thrift.TEnum; ++ ++@SuppressWarnings("all") public enum PTablePermission implements org.apache.thrift.TEnum { ++ READ(2), ++ WRITE(3), ++ BULK_IMPORT(4), ++ ALTER_TABLE(5), ++ GRANT(6), ++ DROP_TABLE(7); ++ ++ private final int value; ++ ++ private PTablePermission(int value) { ++ this.value = value; ++ } ++ ++ /** ++ * Get the integer value of this enum value, as defined in the Thrift IDL. ++ */ ++ public int getValue() { ++ return value; ++ } ++ ++ /** ++ * Find a the enum type by its integer value, as defined in the Thrift IDL. ++ * @return null if the value is not found. ++ */ ++ public static PTablePermission findByValue(int value) { ++ switch (value) { ++ case 2: ++ return READ; ++ case 3: ++ return WRITE; ++ case 4: ++ return BULK_IMPORT; ++ case 5: ++ return ALTER_TABLE; ++ case 6: ++ return GRANT; ++ case 7: ++ return DROP_TABLE; ++ default: ++ return null; ++ } ++ } ++} http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/proxy/src/test/java/org/apache/accumulo/TestProxyInstanceOperations.java ---------------------------------------------------------------------- diff --cc proxy/src/test/java/org/apache/accumulo/TestProxyInstanceOperations.java index 0000000,0000000..a746ad6 new file mode 100644 --- /dev/null +++ b/proxy/src/test/java/org/apache/accumulo/TestProxyInstanceOperations.java @@@ -1,0 -1,0 +1,82 @@@ ++/* ++ * Licensed to the Apache Software Foundation (ASF) under one or more ++ * contributor license agreements. See the NOTICE file distributed with ++ * this work for additional information regarding copyright ownership. ++ * The ASF licenses this file to You under the Apache License, Version 2.0 ++ * (the "License"); you may not use this file except in compliance with ++ * the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an "AS IS" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++package org.apache.accumulo; ++ ++import static org.junit.Assert.assertEquals; ++import static org.junit.Assert.assertNull; ++import static org.junit.Assert.assertTrue; ++ ++import java.nio.ByteBuffer; ++import java.util.Properties; ++ ++import org.apache.accumulo.proxy.Proxy; ++import org.apache.accumulo.proxy.TestProxyClient; ++import org.apache.accumulo.proxy.thrift.UserPass; ++import org.apache.thrift.TException; ++import org.apache.thrift.server.TServer; ++import org.junit.AfterClass; ++import org.junit.BeforeClass; ++import org.junit.Test; ++ ++public class TestProxyInstanceOperations { ++ protected static TServer proxy; ++ protected static Thread thread; ++ protected static TestProxyClient tpc; ++ protected static UserPass userpass; ++ protected static final int port = 10197; ++ ++ @BeforeClass ++ public static void setup() throws Exception { ++ Properties prop = new Properties(); ++ prop.setProperty("org.apache.accumulo.proxy.ProxyServer.useMockInstance", "true"); ++ ++ proxy = Proxy.createProxyServer(Class.forName("org.apache.accumulo.proxy.thrift.AccumuloProxy"), ++ Class.forName("org.apache.accumulo.proxy.ProxyServer"), port, prop); ++ thread = new Thread() { ++ @Override ++ public void run() { ++ proxy.serve(); ++ } ++ }; ++ thread.start(); ++ tpc = new TestProxyClient("localhost", port); ++ userpass = new UserPass("root", ByteBuffer.wrap("".getBytes())); ++ } ++ ++ @AfterClass ++ public static void tearDown() throws InterruptedException { ++ proxy.stop(); ++ thread.join(); ++ } ++ ++ @Test ++ public void properties() throws TException { ++ tpc.proxy().instanceOperations_setProperty(userpass, "test.systemprop", "whistletips"); ++ ++ assertEquals(tpc.proxy().instanceOperations_getSystemConfiguration(userpass).get("test.systemprop"), "whistletips"); ++ tpc.proxy().instanceOperations_removeProperty(userpass, "test.systemprop"); ++ assertNull(tpc.proxy().instanceOperations_getSystemConfiguration(userpass).get("test.systemprop")); ++ ++ } ++ ++ @Test ++ public void testClassLoad() throws TException { ++ assertTrue(tpc.proxy().instanceOperations_testClassLoad(userpass, "org.apache.accumulo.core.iterators.user.RegExFilter", ++ "org.apache.accumulo.core.iterators.Filter")); ++ } ++ ++} http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/proxy/src/test/java/org/apache/accumulo/TestProxyReadWrite.java ---------------------------------------------------------------------- diff --cc proxy/src/test/java/org/apache/accumulo/TestProxyReadWrite.java index 0000000,0000000..0b55261 new file mode 100644 --- /dev/null +++ b/proxy/src/test/java/org/apache/accumulo/TestProxyReadWrite.java @@@ -1,0 -1,0 +1,388 @@@ ++/* ++ * Licensed to the Apache Software Foundation (ASF) under one or more ++ * contributor license agreements. See the NOTICE file distributed with ++ * this work for additional information regarding copyright ownership. ++ * The ASF licenses this file to You under the Apache License, Version 2.0 ++ * (the "License"); you may not use this file except in compliance with ++ * the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an "AS IS" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++package org.apache.accumulo; ++ ++import static org.junit.Assert.assertEquals; ++ ++import java.nio.ByteBuffer; ++import java.util.ArrayList; ++import java.util.Collections; ++import java.util.HashMap; ++import java.util.HashSet; ++import java.util.List; ++import java.util.Map; ++import java.util.Properties; ++import java.util.Set; ++ ++import org.apache.accumulo.core.client.IteratorSetting; ++import org.apache.accumulo.core.iterators.user.RegExFilter; ++import org.apache.accumulo.proxy.Proxy; ++import org.apache.accumulo.proxy.TestProxyClient; ++import org.apache.accumulo.proxy.Util; ++import org.apache.accumulo.proxy.thrift.PColumnUpdate; ++import org.apache.accumulo.proxy.thrift.PIteratorSetting; ++import org.apache.accumulo.proxy.thrift.PKey; ++import org.apache.accumulo.proxy.thrift.PKeyValue; ++import org.apache.accumulo.proxy.thrift.PRange; ++import org.apache.accumulo.proxy.thrift.PScanResult; ++import org.apache.accumulo.proxy.thrift.UserPass; ++import org.apache.thrift.server.TServer; ++import org.junit.After; ++import org.junit.AfterClass; ++import org.junit.Before; ++import org.junit.BeforeClass; ++import org.junit.Test; ++ ++public class TestProxyReadWrite { ++ protected static TServer proxy; ++ protected static Thread thread; ++ protected static TestProxyClient tpc; ++ protected static UserPass userpass; ++ protected static final int port = 10194; ++ protected static final String testtable = "testtable"; ++ ++ @BeforeClass ++ public static void setup() throws Exception { ++ Properties prop = new Properties(); ++ prop.setProperty("org.apache.accumulo.proxy.ProxyServer.useMockInstance", "true"); ++ ++ proxy = Proxy.createProxyServer(Class.forName("org.apache.accumulo.proxy.thrift.AccumuloProxy"), ++ Class.forName("org.apache.accumulo.proxy.ProxyServer"), port, prop); ++ thread = new Thread() { ++ @Override ++ public void run() { ++ proxy.serve(); ++ } ++ }; ++ thread.start(); ++ tpc = new TestProxyClient("localhost", port); ++ userpass = new UserPass("root", ByteBuffer.wrap("".getBytes())); ++ } ++ ++ @AfterClass ++ public static void tearDown() throws InterruptedException { ++ proxy.stop(); ++ thread.join(); ++ } ++ ++ @Before ++ public void makeTestTable() throws Exception { ++ tpc.proxy().tableOperations_create(userpass, testtable); ++ } ++ ++ @After ++ public void deleteTestTable() throws Exception { ++ tpc.proxy().tableOperations_delete(userpass, testtable); ++ } ++ ++ private static void addMutation(Map<ByteBuffer,List<PColumnUpdate>> mutations, String row, String cf, String cq, String value) { ++ PColumnUpdate update = new PColumnUpdate(ByteBuffer.wrap(cf.getBytes()), ByteBuffer.wrap(cq.getBytes()), ByteBuffer.wrap(value.getBytes())); ++ mutations.put(ByteBuffer.wrap(row.getBytes()), Collections.singletonList(update)); ++ } ++ ++ private static void addMutation(Map<ByteBuffer,List<PColumnUpdate>> mutations, String row, String cf, String cq, String vis, String value) { ++ PColumnUpdate update = new PColumnUpdate(ByteBuffer.wrap(cf.getBytes()), ByteBuffer.wrap(cq.getBytes()), ByteBuffer.wrap(value.getBytes())); ++ update.setColVisibility(vis.getBytes()); ++ mutations.put(ByteBuffer.wrap(row.getBytes()), Collections.singletonList(update)); ++ } ++ ++ /** ++ * Insert 100000 cells which have as the row [0..99999] (padded with zeros). Set a range so only the entries between -Inf...5 come back (there should be ++ * 50,000) ++ * ++ * @throws Exception ++ */ ++ @Test ++ public void readWriteBatchOneShotWithRange() throws Exception { ++ int maxInserts = 100000; ++ Map<ByteBuffer,List<PColumnUpdate>> mutations = new HashMap<ByteBuffer,List<PColumnUpdate>>(); ++ String format = "%1$05d"; ++ for (int i = 0; i < maxInserts; i++) { ++ addMutation(mutations, String.format(format, i), "cf" + i, "cq" + i, Util.randString(10)); ++ ++ if (i % 1000 == 0 || i == maxInserts - 1) { ++ tpc.proxy().updateAndFlush(userpass, testtable, mutations, null); ++ mutations.clear(); ++ } ++ } ++ ++ PKey stop = new PKey(); ++ stop.setRow("5".getBytes()); ++ List<PRange> pranges = new ArrayList<PRange>(); ++ pranges.add(new PRange(null, stop)); ++ String cookie = tpc.proxy().createBatchScanner(userpass, testtable, null, null, pranges); ++ ++ int i = 0; ++ boolean hasNext = true; ++ ++ int k = 1000; ++ while (hasNext) { ++ PScanResult kvList = tpc.proxy().scanner_next_k(cookie, k); ++ i += kvList.getResultsSize(); ++ hasNext = kvList.isMore(); ++ } ++ assertEquals(i, 50000); ++ } ++ ++ /** ++ * Insert 100000 cells which have as the row [0..99999] (padded with zeros). Filter the results so only the even numbers come back. ++ * ++ * @throws Exception ++ */ ++ @Test ++ public void readWriteBatchOneShotWithFilterIterator() throws Exception { ++ int maxInserts = 10000; ++ Map<ByteBuffer,List<PColumnUpdate>> mutations = new HashMap<ByteBuffer,List<PColumnUpdate>>(); ++ String format = "%1$05d"; ++ for (int i = 0; i < maxInserts; i++) { ++ addMutation(mutations, String.format(format, i), "cf" + i, "cq" + i, Util.randString(10)); ++ ++ if (i % 1000 == 0 || i == maxInserts - 1) { ++ tpc.proxy().updateAndFlush(userpass, testtable, mutations, null); ++ mutations.clear(); ++ } ++ ++ } ++ ++ String regex = ".*[02468]"; ++ ++ IteratorSetting is = new IteratorSetting(50, regex, RegExFilter.class); ++ RegExFilter.setRegexs(is, regex, null, null, null, false); ++ ++ PIteratorSetting pis = Util.iteratorSetting2ProxyIteratorSetting(is); ++ String cookie = tpc.proxy().createBatchScanner(userpass, testtable, null, pis, null); ++ ++ int i = 0; ++ boolean hasNext = true; ++ ++ int k = 1000; ++ while (hasNext) { ++ PScanResult kvList = tpc.proxy().scanner_next_k(cookie, k); ++ for (PKeyValue kv : kvList.getResults()) { ++ assertEquals(Integer.parseInt(new String(kv.getKey().getRow())), i); ++ ++ i += 2; ++ } ++ hasNext = kvList.isMore(); ++ } ++ } ++ ++ @Test ++ public void readWriteOneShotWithRange() throws Exception { ++ int maxInserts = 100000; ++ Map<ByteBuffer,List<PColumnUpdate>> mutations = new HashMap<ByteBuffer,List<PColumnUpdate>>(); ++ String format = "%1$05d"; ++ for (int i = 0; i < maxInserts; i++) { ++ addMutation(mutations, String.format(format, i), "cf" + i, "cq" + i, Util.randString(10)); ++ ++ if (i % 1000 == 0 || i == maxInserts - 1) { ++ tpc.proxy().updateAndFlush(userpass, testtable, mutations, null); ++ mutations.clear(); ++ } ++ } ++ ++ PKey stop = new PKey(); ++ stop.setRow("5".getBytes()); ++ String cookie = tpc.proxy().createScanner(userpass, testtable, null, null, new PRange(null, stop)); ++ ++ int i = 0; ++ boolean hasNext = true; ++ ++ int k = 1000; ++ while (hasNext) { ++ PScanResult kvList = tpc.proxy().scanner_next_k(cookie, k); ++ i += kvList.getResultsSize(); ++ hasNext = kvList.isMore(); ++ } ++ assertEquals(i, 50000); ++ } ++ ++ /** ++ * Insert 100000 cells which have as the row [0..99999] (padded with zeros). Filter the results so only the even numbers come back. ++ * ++ * @throws Exception ++ */ ++ @Test ++ public void readWriteOneShotWithFilterIterator() throws Exception { ++ int maxInserts = 10000; ++ Map<ByteBuffer,List<PColumnUpdate>> mutations = new HashMap<ByteBuffer,List<PColumnUpdate>>(); ++ String format = "%1$05d"; ++ for (int i = 0; i < maxInserts; i++) { ++ addMutation(mutations, String.format(format, i), "cf" + i, "cq" + i, Util.randString(10)); ++ ++ if (i % 1000 == 0 || i == maxInserts - 1) { ++ ++ tpc.proxy().updateAndFlush(userpass, testtable, mutations, null); ++ mutations.clear(); ++ ++ } ++ ++ } ++ ++ String regex = ".*[02468]"; ++ ++ IteratorSetting is = new IteratorSetting(50, regex, RegExFilter.class); ++ RegExFilter.setRegexs(is, regex, null, null, null, false); ++ ++ PIteratorSetting pis = Util.iteratorSetting2ProxyIteratorSetting(is); ++ String cookie = tpc.proxy().createScanner(userpass, testtable, null, pis, null); ++ ++ int i = 0; ++ boolean hasNext = true; ++ ++ int k = 1000; ++ while (hasNext) { ++ PScanResult kvList = tpc.proxy().scanner_next_k(cookie, k); ++ for (PKeyValue kv : kvList.getResults()) { ++ assertEquals(Integer.parseInt(new String(kv.getKey().getRow())), i); ++ ++ i += 2; ++ } ++ hasNext = kvList.isMore(); ++ } ++ } ++ ++ // @Test ++ // This test takes kind of a long time. Enable it if you think you may have memory issues. ++ public void manyWritesAndReads() throws Exception { ++ int maxInserts = 1000000; ++ Map<ByteBuffer,List<PColumnUpdate>> mutations = new HashMap<ByteBuffer,List<PColumnUpdate>>(); ++ String format = "%1$06d"; ++ String writer = tpc.proxy().createWriter(userpass, testtable); ++ for (int i = 0; i < maxInserts; i++) { ++ addMutation(mutations, String.format(format, i), "cf" + i, "cq" + i, Util.randString(10)); ++ ++ if (i % 1000 == 0 || i == maxInserts - 1) { ++ ++ tpc.proxy().writer_update(writer, mutations, null); ++ mutations.clear(); ++ ++ } ++ ++ } ++ ++ tpc.proxy().writer_flush(writer); ++ tpc.proxy().writer_close(writer); ++ ++ String cookie = tpc.proxy().createBatchScanner(userpass, testtable, null, null, null); ++ ++ int i = 0; ++ boolean hasNext = true; ++ ++ int k = 1000; ++ while (hasNext) { ++ PScanResult kvList = tpc.proxy().scanner_next_k(cookie, k); ++ for (PKeyValue kv : kvList.getResults()) { ++ assertEquals(Integer.parseInt(new String(kv.getKey().getRow())), i); ++ i++; ++ } ++ hasNext = kvList.isMore(); ++ if (hasNext) ++ assertEquals(k, kvList.getResults().size()); ++ } ++ assertEquals(maxInserts, i); ++ } ++ ++ @Test ++ public void asynchReadWrite() throws Exception { ++ int maxInserts = 10000; ++ Map<ByteBuffer,List<PColumnUpdate>> mutations = new HashMap<ByteBuffer,List<PColumnUpdate>>(); ++ String format = "%1$05d"; ++ String writer = tpc.proxy().createWriter(userpass, testtable); ++ for (int i = 0; i < maxInserts; i++) { ++ addMutation(mutations, String.format(format, i), "cf" + i, "cq" + i, Util.randString(10)); ++ ++ if (i % 1000 == 0 || i == maxInserts - 1) { ++ tpc.proxy().writer_update(writer, mutations, null); ++ mutations.clear(); ++ } ++ } ++ ++ tpc.proxy().writer_flush(writer); ++ tpc.proxy().writer_close(writer); ++ ++ String regex = ".*[02468]"; ++ ++ IteratorSetting is = new IteratorSetting(50, regex, RegExFilter.class); ++ RegExFilter.setRegexs(is, regex, null, null, null, false); ++ ++ PIteratorSetting pis = Util.iteratorSetting2ProxyIteratorSetting(is); ++ String cookie = tpc.proxy().createBatchScanner(userpass, testtable, null, pis, null); ++ ++ int i = 0; ++ boolean hasNext = true; ++ ++ int k = 1000; ++ int numRead = 0; ++ while (hasNext) { ++ PScanResult kvList = tpc.proxy().scanner_next_k(cookie, k); ++ for (PKeyValue kv : kvList.getResults()) { ++ assertEquals(i, Integer.parseInt(new String(kv.getKey().getRow()))); ++ numRead++; ++ i += 2; ++ } ++ hasNext = kvList.isMore(); ++ } ++ assertEquals(maxInserts / 2, numRead); ++ } ++ ++ @Test ++ public void testVisibility() throws Exception { ++ ++ Set<String> auths = new HashSet<String>(); ++ auths.add("even"); ++ tpc.proxy().securityOperations_changeUserAuthorizations(userpass, "root", auths); ++ ++ int maxInserts = 10000; ++ Map<ByteBuffer,List<PColumnUpdate>> mutations = new HashMap<ByteBuffer,List<PColumnUpdate>>(); ++ String format = "%1$05d"; ++ String writer = tpc.proxy().createWriter(userpass, testtable); ++ for (int i = 0; i < maxInserts; i++) { ++ if (i % 2 == 0) ++ addMutation(mutations, String.format(format, i), "cf" + i, "cq" + i, "even", Util.randString(10)); ++ else ++ addMutation(mutations, String.format(format, i), "cf" + i, "cq" + i, "odd", Util.randString(10)); ++ ++ if (i % 1000 == 0 || i == maxInserts - 1) { ++ tpc.proxy().writer_update(writer, mutations, null); ++ mutations.clear(); ++ } ++ } ++ ++ tpc.proxy().writer_flush(writer); ++ tpc.proxy().writer_close(writer); ++ String cookie = tpc.proxy().createBatchScanner(userpass, testtable, auths, null, null); ++ ++ int i = 0; ++ boolean hasNext = true; ++ ++ int k = 1000; ++ int numRead = 0; ++ while (hasNext) { ++ PScanResult kvList = tpc.proxy().scanner_next_k(cookie, k); ++ for (PKeyValue kv : kvList.getResults()) { ++ assertEquals(Integer.parseInt(new String(kv.getKey().getRow())), i); ++ i += 2; ++ numRead++; ++ } ++ hasNext = kvList.isMore(); ++ ++ } ++ assertEquals(maxInserts / 2, numRead); ++ } ++ ++} http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/proxy/src/test/java/org/apache/accumulo/TestProxySecurityOperations.java ---------------------------------------------------------------------- diff --cc proxy/src/test/java/org/apache/accumulo/TestProxySecurityOperations.java index 0000000,0000000..b221285 new file mode 100644 --- /dev/null +++ b/proxy/src/test/java/org/apache/accumulo/TestProxySecurityOperations.java @@@ -1,0 -1,0 +1,142 @@@ ++/* ++ * Licensed to the Apache Software Foundation (ASF) under one or more ++ * contributor license agreements. See the NOTICE file distributed with ++ * this work for additional information regarding copyright ownership. ++ * The ASF licenses this file to You under the Apache License, Version 2.0 ++ * (the "License"); you may not use this file except in compliance with ++ * the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an "AS IS" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++package org.apache.accumulo; ++ ++import static org.junit.Assert.assertEquals; ++import static org.junit.Assert.assertFalse; ++import static org.junit.Assert.assertTrue; ++ ++import java.nio.ByteBuffer; ++import java.util.HashSet; ++import java.util.List; ++import java.util.Properties; ++ ++import org.apache.accumulo.proxy.Proxy; ++import org.apache.accumulo.proxy.TestProxyClient; ++import org.apache.accumulo.proxy.thrift.PSystemPermission; ++import org.apache.accumulo.proxy.thrift.PTablePermission; ++import org.apache.accumulo.proxy.thrift.UserPass; ++import org.apache.thrift.TException; ++import org.apache.thrift.server.TServer; ++import org.junit.After; ++import org.junit.AfterClass; ++import org.junit.Before; ++import org.junit.BeforeClass; ++import org.junit.Test; ++ ++public class TestProxySecurityOperations { ++ protected static TServer proxy; ++ protected static Thread thread; ++ protected static TestProxyClient tpc; ++ protected static UserPass userpass; ++ protected static final int port = 10196; ++ protected static final String testtable = "testtable"; ++ protected static final String testuser = "VonJines"; ++ protected static final ByteBuffer testpw = ByteBuffer.wrap("fiveones".getBytes()); ++ ++ @BeforeClass ++ public static void setup() throws Exception { ++ Properties prop = new Properties(); ++ prop.setProperty("org.apache.accumulo.proxy.ProxyServer.useMockInstance", "true"); ++ ++ proxy = Proxy.createProxyServer(Class.forName("org.apache.accumulo.proxy.thrift.AccumuloProxy"), ++ Class.forName("org.apache.accumulo.proxy.ProxyServer"), port, prop); ++ thread = new Thread() { ++ @Override ++ public void run() { ++ proxy.serve(); ++ } ++ }; ++ thread.start(); ++ ++ tpc = new TestProxyClient("localhost", port); ++ userpass = new UserPass("root", ByteBuffer.wrap("".getBytes())); ++ } ++ ++ @AfterClass ++ public static void tearDown() throws InterruptedException { ++ proxy.stop(); ++ thread.join(); ++ } ++ ++ @Before ++ public void makeTestTableAndUser() throws Exception { ++ tpc.proxy().tableOperations_create(userpass, testtable); ++ tpc.proxy().securityOperations_createUser(userpass, testuser, testpw, new HashSet<String>()); ++ } ++ ++ @After ++ public void deleteTestTable() throws Exception { ++ tpc.proxy().tableOperations_delete(userpass, testtable); ++ tpc.proxy().securityOperations_dropUser(userpass, testuser); ++ } ++ ++ @Test ++ public void create() throws TException { ++ tpc.proxy().securityOperations_createUser(userpass, testuser + "2", testpw, new HashSet<String>()); ++ assertTrue(tpc.proxy().securityOperations_listUsers(userpass).contains(testuser + "2")); ++ tpc.proxy().securityOperations_dropUser(userpass, testuser + "2"); ++ assertTrue(!tpc.proxy().securityOperations_listUsers(userpass).contains(testuser + "2")); ++ } ++ ++ @Test ++ public void authenticate() throws TException { ++ assertTrue(tpc.proxy().securityOperations_authenticateUser(userpass, testuser, testpw)); ++ assertFalse(tpc.proxy().securityOperations_authenticateUser(userpass, "EvilUser", testpw)); ++ ++ tpc.proxy().securityOperations_changeUserPassword(userpass, testuser, ByteBuffer.wrap("newpass".getBytes())); ++ assertFalse(tpc.proxy().securityOperations_authenticateUser(userpass, testuser, testpw)); ++ assertTrue(tpc.proxy().securityOperations_authenticateUser(userpass, testuser, ByteBuffer.wrap("newpass".getBytes()))); ++ ++ } ++ ++ @Test ++ public void tablePermissions() throws TException { ++ tpc.proxy().securityOperations_grantTablePermission(userpass, testuser, testtable, PTablePermission.ALTER_TABLE); ++ assertTrue(tpc.proxy().securityOperations_hasTablePermission(userpass, testuser, testtable, PTablePermission.ALTER_TABLE)); ++ ++ tpc.proxy().securityOperations_revokeTablePermission(userpass, testuser, testtable, PTablePermission.ALTER_TABLE); ++ assertFalse(tpc.proxy().securityOperations_hasTablePermission(userpass, testuser, testtable, PTablePermission.ALTER_TABLE)); ++ ++ } ++ ++ @Test ++ public void systemPermissions() throws TException { ++ tpc.proxy().securityOperations_grantSystemPermission(userpass, testuser, PSystemPermission.ALTER_USER); ++ assertTrue(tpc.proxy().securityOperations_hasSystemPermission(userpass, testuser, PSystemPermission.ALTER_USER)); ++ ++ tpc.proxy().securityOperations_revokeSystemPermission(userpass, testuser, PSystemPermission.ALTER_USER); ++ assertFalse(tpc.proxy().securityOperations_hasSystemPermission(userpass, testuser, PSystemPermission.ALTER_USER)); ++ ++ } ++ ++ @Test ++ public void auths() throws TException { ++ HashSet<String> newauths = new HashSet<String>(); ++ newauths.add("BBR"); ++ newauths.add("Barney"); ++ tpc.proxy().securityOperations_changeUserAuthorizations(userpass, testuser, newauths); ++ List<ByteBuffer> actualauths = tpc.proxy().securityOperations_getUserAuthorizations(userpass, testuser); ++ assertEquals(actualauths.size(), newauths.size()); ++ ++ for (ByteBuffer auth : actualauths) { ++ System.out.println(auth); ++ assertTrue(newauths.contains(new String(auth.array()))); ++ } ++ } ++ ++} http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/proxy/src/test/java/org/apache/accumulo/TestProxyTableOperations.java ---------------------------------------------------------------------- diff --cc proxy/src/test/java/org/apache/accumulo/TestProxyTableOperations.java index 0000000,0000000..8904b06 new file mode 100644 --- /dev/null +++ b/proxy/src/test/java/org/apache/accumulo/TestProxyTableOperations.java @@@ -1,0 -1,0 +1,219 @@@ ++/* ++ * Licensed to the Apache Software Foundation (ASF) under one or more ++ * contributor license agreements. See the NOTICE file distributed with ++ * this work for additional information regarding copyright ownership. ++ * The ASF licenses this file to You under the Apache License, Version 2.0 ++ * (the "License"); you may not use this file except in compliance with ++ * the License. You may obtain a copy of the License at ++ * ++ * http://www.apache.org/licenses/LICENSE-2.0 ++ * ++ * Unless required by applicable law or agreed to in writing, software ++ * distributed under the License is distributed on an "AS IS" BASIS, ++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++ * See the License for the specific language governing permissions and ++ * limitations under the License. ++ */ ++package org.apache.accumulo; ++ ++import static org.junit.Assert.assertEquals; ++import static org.junit.Assert.assertFalse; ++import static org.junit.Assert.assertNull; ++import static org.junit.Assert.assertTrue; ++ ++import java.nio.ByteBuffer; ++import java.util.Collections; ++import java.util.HashMap; ++import java.util.HashSet; ++import java.util.List; ++import java.util.Map; ++import java.util.Properties; ++import java.util.Set; ++ ++import org.apache.accumulo.proxy.Proxy; ++import org.apache.accumulo.proxy.TestProxyClient; ++import org.apache.accumulo.proxy.thrift.PColumnUpdate; ++import org.apache.accumulo.proxy.thrift.UserPass; ++import org.apache.thrift.TException; ++import org.apache.thrift.server.TServer; ++import org.junit.After; ++import org.junit.AfterClass; ++import org.junit.Before; ++import org.junit.BeforeClass; ++import org.junit.Test; ++ ++public class TestProxyTableOperations { ++ ++ protected static TServer proxy; ++ protected static Thread thread; ++ protected static TestProxyClient tpc; ++ protected static UserPass userpass; ++ protected static final int port = 10195; ++ protected static final String testtable = "testtable"; ++ ++ @BeforeClass ++ public static void setup() throws Exception { ++ Properties prop = new Properties(); ++ prop.setProperty("org.apache.accumulo.proxy.ProxyServer.useMockInstance", "true"); ++ ++ proxy = Proxy.createProxyServer(Class.forName("org.apache.accumulo.proxy.thrift.AccumuloProxy"), ++ Class.forName("org.apache.accumulo.proxy.ProxyServer"), port, prop); ++ thread = new Thread() { ++ @Override ++ public void run() { ++ proxy.serve(); ++ } ++ }; ++ thread.start(); ++ tpc = new TestProxyClient("localhost", port); ++ userpass = new UserPass("root", ByteBuffer.wrap("".getBytes())); ++ } ++ ++ @AfterClass ++ public static void tearDown() throws InterruptedException { ++ proxy.stop(); ++ thread.join(); ++ } ++ ++ @Before ++ public void makeTestTable() throws Exception { ++ tpc.proxy().tableOperations_create(userpass, testtable); ++ } ++ ++ @After ++ public void deleteTestTable() throws Exception { ++ tpc.proxy().tableOperations_delete(userpass, testtable); ++ } ++ ++ @Test ++ public void ping() throws Exception { ++ tpc.proxy().ping(userpass); ++ } ++ ++ @Test ++ public void createExistsDelete() throws TException { ++ assertFalse(tpc.proxy().tableOperations_exists(userpass, "testtable2")); ++ tpc.proxy().tableOperations_create(userpass, "testtable2"); ++ assertTrue(tpc.proxy().tableOperations_exists(userpass, "testtable2")); ++ tpc.proxy().tableOperations_delete(userpass, "testtable2"); ++ assertFalse(tpc.proxy().tableOperations_exists(userpass, "testtable2")); ++ } ++ ++ @Test ++ public void listRename() throws TException { ++ assertFalse(tpc.proxy().tableOperations_exists(userpass, "testtable2")); ++ tpc.proxy().tableOperations_rename(userpass, testtable, "testtable2"); ++ assertTrue(tpc.proxy().tableOperations_exists(userpass, "testtable2")); ++ tpc.proxy().tableOperations_rename(userpass, "testtable2", testtable); ++ assertTrue(tpc.proxy().tableOperations_list(userpass).contains("testtable")); ++ ++ } ++ ++ // This test does not yet function because the backing Mock instance does not yet support merging ++ // TODO: add back in as a test when Mock is improved ++ // @Test ++ public void merge() throws TException { ++ Set<String> splits = new HashSet<String>(); ++ splits.add("a"); ++ splits.add("c"); ++ splits.add("z"); ++ tpc.proxy().tableOperations_addSplits(userpass, testtable, splits); ++ ++ tpc.proxy().tableOperations_merge(userpass, testtable, "b", "d"); ++ ++ splits.remove("c"); ++ ++ List<String> tableSplits = tpc.proxy().tableOperations_getSplits(userpass, testtable, 10); ++ ++ for (String split : tableSplits) ++ assertTrue(splits.contains(split)); ++ assertTrue(tableSplits.size() == splits.size()); ++ ++ } ++ ++ @Test ++ public void splits() throws TException { ++ Set<String> splits = new HashSet<String>(); ++ splits.add("a"); ++ splits.add("b"); ++ splits.add("z"); ++ tpc.proxy().tableOperations_addSplits(userpass, testtable, splits); ++ ++ List<String> tableSplits = tpc.proxy().tableOperations_getSplits(userpass, testtable, 10); ++ ++ for (String split : tableSplits) ++ assertTrue(splits.contains(split)); ++ assertTrue(tableSplits.size() == splits.size()); ++ } ++ ++ @Test ++ public void constraints() throws TException { ++ int cid = tpc.proxy().tableOperations_addConstraint(userpass, testtable, "org.apache.accumulo.TestConstraint"); ++ Map<String,Integer> constraints = tpc.proxy().tableOperations_listConstraints(userpass, testtable); ++ assertEquals((int) constraints.get("org.apache.accumulo.TestConstraint"), cid); ++ tpc.proxy().tableOperations_removeConstraint(userpass, testtable, cid); ++ constraints = tpc.proxy().tableOperations_listConstraints(userpass, testtable); ++ assertNull(constraints.get("org.apache.accumulo.TestConstraint")); ++ } ++ ++ // This test does not yet function because the backing Mock instance does not yet support locality groups ++ // TODO: add back in as a test when Mock is improved ++ // @Test ++ public void localityGroups() throws TException { ++ Map<String,Set<String>> groups = new HashMap<String,Set<String>>(); ++ Set<String> group1 = new HashSet<String>(); ++ group1.add("cf1"); ++ groups.put("group1", group1); ++ Set<String> group2 = new HashSet<String>(); ++ group2.add("cf2"); ++ group2.add("cf3"); ++ groups.put("group2", group2); ++ tpc.proxy().tableOperations_setLocalityGroups(userpass, testtable, groups); ++ ++ Map<String,Set<String>> actualGroups = tpc.proxy().tableOperations_getLocalityGroups(userpass, testtable); ++ ++ assertEquals(groups.size(), actualGroups.size()); ++ for (String groupName : groups.keySet()) { ++ assertTrue(actualGroups.containsKey(groupName)); ++ assertEquals(groups.get(groupName).size(), actualGroups.get(groupName).size()); ++ for (String cf : groups.get(groupName)) { ++ assertTrue(actualGroups.get(groupName).contains(cf)); ++ } ++ } ++ } ++ ++ @Test ++ public void tableProperties() throws TException { ++ tpc.proxy().tableOperations_setProperty(userpass, testtable, "test.property1", "wharrrgarbl"); ++ assertEquals(tpc.proxy().tableOperations_getProperties(userpass, testtable).get("test.property1"), "wharrrgarbl"); ++ tpc.proxy().tableOperations_removeProperty(userpass, testtable, "test.property1"); ++ assertNull(tpc.proxy().tableOperations_getProperties(userpass, testtable).get("test.property1")); ++ } ++ ++ private static void addMutation(Map<ByteBuffer,List<PColumnUpdate>> mutations, String row, String cf, String cq, String value) { ++ PColumnUpdate update = new PColumnUpdate(ByteBuffer.wrap(cf.getBytes()), ByteBuffer.wrap(cq.getBytes()), ByteBuffer.wrap(value.getBytes())); ++ mutations.put(ByteBuffer.wrap(row.getBytes()), Collections.singletonList(update)); ++ } ++ ++ @Test ++ public void tableOperationsRowMethods() throws TException { ++ List<ByteBuffer> auths = tpc.proxy().securityOperations_getUserAuthorizations(userpass, "root"); ++ // System.out.println(auths); ++ Map<ByteBuffer,List<PColumnUpdate>> mutations = new HashMap<ByteBuffer,List<PColumnUpdate>>(); ++ for (int i = 0; i < 10; i++) { ++ addMutation(mutations, "" + i, "cf", "cq", ""); ++ } ++ tpc.proxy().updateAndFlush(userpass, testtable, mutations, null); ++ ++ assertEquals(tpc.proxy().tableOperations_getMaxRow(userpass, testtable, auths, null, true, null, true), "9"); ++ ++ // TODO: Uncomment when the Mock isn't broken ++ // tpc.proxy().tableOperations_deleteRows(userpass,testtable,"51","99"); ++ // assertEquals(tpc.proxy().tableOperations_getMaxRow(userpass, testtable, auths, null, true, null, true),"5"); ++ ++ } ++ ++ /* ++ * @Test(expected = TException.class) public void peekTest() { } ++ */ ++} http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/server/pom.xml ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/server/src/main/c++/nativeMap/Makefile ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/start/pom.xml ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/test/pom.xml ---------------------------------------------------------------------- diff --cc test/pom.xml index 0000000,ba189ce..2d309b7 mode 000000,100644..100644 --- a/test/pom.xml +++ b/test/pom.xml @@@ -1,0 -1,112 +1,112 @@@ + <?xml version="1.0" encoding="UTF-8"?> + <!-- + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> + + <parent> + <groupId>org.apache.accumulo</groupId> + <artifactId>accumulo</artifactId> - <version>1.5.0-SNAPSHOT</version> ++ <version>ACCUMULO-652-SNAPSHOT</version> + </parent> + + <modelVersion>4.0.0</modelVersion> + <artifactId>accumulo-test</artifactId> + <name>accumulo-test</name> + + <build> + <pluginManagement> + <plugins> + <plugin> + <artifactId>maven-jar-plugin</artifactId> + <configuration> + <outputDirectory>../lib</outputDirectory> + </configuration> + </plugin> + </plugins> + </pluginManagement> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <configuration> + <archive> + <manifestSections> + <manifestSection> + <name>accumulo/test/</name> + <manifestEntries> + <Sealed>true</Sealed> + </manifestEntries> + </manifestSection> + </manifestSections> + </archive> + </configuration> + </plugin> + </plugins> + </build> + + <profiles> + <!-- profile for building against Hadoop 1.0.x + Activate by not specifying hadoop.profile --> + <profile> + <id>hadoop-1.0</id> + <activation> + <property> + <name>!hadoop.profile</name> + </property> + </activation> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-core</artifactId> + </dependency> + </dependencies> + </profile> + <!-- profile for building against Hadoop 2.0.x + Activate using: mvn -Dhadoop.profile=2.0 --> + <profile> + <id>hadoop-2.0</id> + <activation> + <property> + <name>hadoop.profile</name> + <value>2.0</value> + </property> + </activation> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + </dependency> + </dependencies> + </profile> + </profiles> + + <dependencies> + <dependency> + <groupId>org.apache.accumulo</groupId> + <artifactId>accumulo-core</artifactId> + </dependency> + <dependency> + <groupId>org.apache.accumulo</groupId> + <artifactId>accumulo-server</artifactId> + </dependency> + <dependency> + <groupId>org.apache.zookeeper</groupId> + <artifactId>zookeeper</artifactId> + </dependency> + </dependencies> + + </project> http://git-wip-us.apache.org/repos/asf/accumulo/blob/58fcad6e/trace/pom.xml ----------------------------------------------------------------------