http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/HttpUtils.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/HttpUtils.java b/utils/src/main/java/com/cloud/utils/HttpUtils.java new file mode 100644 index 0000000..d2e844a --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/HttpUtils.java @@ -0,0 +1,114 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import org.apache.log4j.Logger; + +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpSession; +import java.io.IOException; +import java.util.Map; + +public class HttpUtils { + + public static final Logger s_logger = Logger.getLogger(HttpUtils.class); + + public static final String UTF_8 = "UTF-8"; + public static final String RESPONSE_TYPE_JSON = "json"; + public static final String RESPONSE_TYPE_XML = "xml"; + public static final String JSON_CONTENT_TYPE = "application/json; charset=UTF-8"; + public static final String XML_CONTENT_TYPE = "text/xml; charset=UTF-8"; + + public static void addSecurityHeaders(final HttpServletResponse resp) { + if (resp.containsHeader("X-Content-Type-Options")) { + resp.setHeader("X-Content-Type-Options", "nosniff"); + } + else { + resp.addHeader("X-Content-Type-Options", "nosniff"); + } + if (resp.containsHeader("X-XSS-Protection")) { + resp.setHeader("X-XSS-Protection", "1;mode=block"); + } + else { + resp.addHeader("X-XSS-Protection", "1;mode=block"); + } + } + + public static void writeHttpResponse(final HttpServletResponse resp, final String response, + final Integer responseCode, final String responseType, final String jsonContentType) { + try { + if (RESPONSE_TYPE_JSON.equalsIgnoreCase(responseType)) { + if (jsonContentType != null && !jsonContentType.isEmpty()) { + resp.setContentType(jsonContentType); + } else { + resp.setContentType(JSON_CONTENT_TYPE); + } + } else if (RESPONSE_TYPE_XML.equalsIgnoreCase(responseType)){ + resp.setContentType(XML_CONTENT_TYPE); + } + if (responseCode != null) { + resp.setStatus(responseCode); + } + addSecurityHeaders(resp); + resp.getWriter().print(response); + } catch (final IOException ioex) { + if (s_logger.isTraceEnabled()) { + s_logger.trace("Exception writing http response: " + ioex); + } + } catch (final Exception ex) { + if (!(ex instanceof IllegalStateException)) { + s_logger.error("Unknown exception writing http response", ex); + } + } + } + + public static String findCookie(final Cookie[] cookies, final String key) { + if (cookies == null || key == null || key.isEmpty()) { + return null; + } + for (Cookie cookie: cookies) { + if (cookie != null && cookie.getName().equals(key)) { + return cookie.getValue(); + } + } + return null; + } + + public static boolean validateSessionKey(final HttpSession session, final Map<String, Object[]> params, final Cookie[] cookies, final String sessionKeyString) { + if (session == null || sessionKeyString == null) { + return false; + } + final String sessionKey = (String) session.getAttribute(sessionKeyString); + final String sessionKeyFromCookie = HttpUtils.findCookie(cookies, sessionKeyString); + String[] sessionKeyFromParams = null; + if (params != null) { + sessionKeyFromParams = (String[]) params.get(sessionKeyString); + } + if ((sessionKey == null) + || (sessionKeyFromParams == null && sessionKeyFromCookie == null) + || (sessionKeyFromParams != null && !sessionKey.equals(sessionKeyFromParams[0])) + || (sessionKeyFromCookie != null && !sessionKey.equals(sessionKeyFromCookie))) { + return false; + } + return true; + } + +}
http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/IteratorUtil.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/IteratorUtil.java b/utils/src/main/java/com/cloud/utils/IteratorUtil.java new file mode 100644 index 0000000..8ab88f5 --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/IteratorUtil.java @@ -0,0 +1,72 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Enumeration; +import java.util.Iterator; +import java.util.List; + +public class IteratorUtil { + public static <T> Iterable<T> enumerationAsIterable(final Enumeration<T> e) { + return new Iterable<T>() { + @Override + public Iterator<T> iterator() { + return new Iterator<T>() { + @Override + public boolean hasNext() { + return e.hasMoreElements(); + } + + @Override + public T next() { + return e.nextElement(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } + }; + } + + public static <T> Enumeration<T> iteratorAsEnumeration(final Iterator<T> it) { + return new Enumeration<T>() { + @Override + public boolean hasMoreElements() { + return it.hasNext(); + } + + @Override + public T nextElement() { + return it.next(); + } + }; + } + + public static <T extends Comparable<? super T>> List<T> asSortedList(Collection<T> c) { + List<T> list = new ArrayList<T>(c); + java.util.Collections.sort(list); + return list; + } +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/Journal.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/Journal.java b/utils/src/main/java/com/cloud/utils/Journal.java new file mode 100644 index 0000000..e88dba3 --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/Journal.java @@ -0,0 +1,104 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import java.util.ArrayList; + +import org.apache.log4j.Level; +import org.apache.log4j.Logger; + +/** + * Journal is used to kept what has happened during a process so someone can track + * what happens during a process. + * + */ +public class Journal { + String _name; + ArrayList<Pair<String, Object[]>> _entries; + + public Journal(String name) { + _name = name; + _entries = new ArrayList<Pair<String, Object[]>>(); + } + + final private void log(String msg, Object... params) { + Pair<String, Object[]> entry = new Pair<String, Object[]>(msg, params); + assert msg != null : "Message can not be null or else it's useless!"; + _entries.add(entry); + } + + public void record(String msg, Object... params) { + log(msg, params); + } + + public void record(Logger logger, Level p, String msg, Object... params) { + if (logger.isEnabledFor(p)) { + StringBuilder buf = new StringBuilder(); + toString(buf, msg, params); + String entry = buf.toString(); + log(entry); + logger.log(p, entry); + } else { + log(msg, params); + } + } + + protected void toString(StringBuilder buf, String msg, Object[] params) { + buf.append(msg); + if (params != null) { + buf.append(" - "); + int i = 0; + for (Object obj : params) { + buf.append('P').append(i).append('='); + buf.append(obj != null ? obj.toString() : "null"); + buf.append(", "); + } + buf.delete(buf.length() - 2, buf.length()); + } + } + + public String toString(String separator) { + StringBuilder buf = new StringBuilder(_name).append(": "); + for (Pair<String, Object[]> entry : _entries) { + toString(buf, entry.first(), entry.second()); + buf.append(separator); + } + return buf.toString(); + } + + @Override + public String toString() { + return toString("; "); + } + + public static class LogJournal extends Journal { + Logger _logger; + + public LogJournal(String name, Logger logger) { + super(name); + _logger = logger; + } + + @Override + public void record(String msg, Object... params) { + record(_logger, Level.DEBUG, msg, params); + } + } +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/LogUtils.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/LogUtils.java b/utils/src/main/java/com/cloud/utils/LogUtils.java new file mode 100644 index 0000000..d86766c --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/LogUtils.java @@ -0,0 +1,45 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import java.io.File; + +import org.apache.log4j.Logger; +import org.apache.log4j.xml.DOMConfigurator; + +public class LogUtils { + public static final Logger s_logger = Logger.getLogger(LogUtils.class); + + public static void initLog4j(String log4jConfigFileName) { + assert (log4jConfigFileName != null); + File file = PropertiesUtil.findConfigFile(log4jConfigFileName); + if (file != null) { + s_logger.info("log4j configuration found at " + file.getAbsolutePath()); + DOMConfigurator.configureAndWatch(file.getAbsolutePath()); + } else { + String nameWithoutExtension = log4jConfigFileName.substring(0, log4jConfigFileName.lastIndexOf('.')); + file = PropertiesUtil.findConfigFile(nameWithoutExtension + ".properties"); + if (file != null) { + s_logger.info("log4j configuration found at " + file.getAbsolutePath()); + DOMConfigurator.configureAndWatch(file.getAbsolutePath()); + } + } + } +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/MethodCapturer.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/MethodCapturer.java b/utils/src/main/java/com/cloud/utils/MethodCapturer.java new file mode 100644 index 0000000..655a45d --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/MethodCapturer.java @@ -0,0 +1,113 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import java.lang.reflect.Method; +import java.util.WeakHashMap; + +import net.sf.cglib.proxy.Callback; +import net.sf.cglib.proxy.CallbackFilter; +import net.sf.cglib.proxy.Enhancer; +import net.sf.cglib.proxy.MethodInterceptor; +import net.sf.cglib.proxy.MethodProxy; + +/* + * This helper class provides a way to retrieve Method in a strong-type way. It takes advantage of power of + * Intelligent IDE(Eclipse) in code-editing + * + * DummyImpl dummy = new DummyImpl(); + * MethodCapturer<DummyImpl> capturer = MethodCapturer.capture(dummy); + * Method method = capturer.get(capturer.instance().foo2()); + * + */ +public class MethodCapturer<T> { + + private final static int CACHE_SIZE = 1024; + + private T _instance; + private Method _method; + + private static WeakHashMap<Object, Object> s_cache = new WeakHashMap<Object, Object>(); + + private MethodCapturer() { + } + + @SuppressWarnings("unchecked") + public static <T> MethodCapturer<T> capture(T obj) { + synchronized (s_cache) { + MethodCapturer<T> capturer = (MethodCapturer<T>)s_cache.get(obj); + if (capturer != null) { + return capturer; + } + + final MethodCapturer<T> capturerNew = new MethodCapturer<T>(); + + Enhancer en = new Enhancer(); + en.setSuperclass(obj.getClass()); + en.setCallbacks(new Callback[] {new MethodInterceptor() { + @Override + public Object intercept(Object arg0, Method arg1, Object[] arg2, MethodProxy arg3) throws Throwable { + capturerNew.setMethod(arg1); + return null; + } + }, new MethodInterceptor() { + @Override + public Object intercept(Object arg0, Method arg1, Object[] arg2, MethodProxy arg3) throws Throwable { + return null; + } + }}); + en.setCallbackFilter(new CallbackFilter() { + @Override + public int accept(Method method) { + if (method.getParameterTypes().length == 0 && method.getName().equals("finalize")) { + return 1; + } + return 0; + } + }); + + capturerNew.setInstance((T)en.create()); + + // We expect MethodCapturer is only used for singleton objects here, so we only maintain a limited cache + // here + if (s_cache.size() < CACHE_SIZE) { + s_cache.put(obj, capturerNew); + } + + return capturerNew; + } + } + + public T instance() { + return _instance; + } + + private void setInstance(T instance) { + _instance = instance; + } + + public Method get(Object... useless) { + return _method; + } + + private void setMethod(Method method) { + _method = method; + } +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/NumbersUtil.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/NumbersUtil.java b/utils/src/main/java/com/cloud/utils/NumbersUtil.java new file mode 100644 index 0000000..8b93a40 --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/NumbersUtil.java @@ -0,0 +1,139 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import java.nio.ByteBuffer; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Formatter; +import java.util.Locale; + +import org.apache.commons.lang.math.NumberUtils; + +import com.cloud.utils.exception.CloudRuntimeException; + +public class NumbersUtil { + public static long parseLong(String s, long defaultValue) { + return NumberUtils.toLong(s, defaultValue); + } + + public static int parseInt(String s, int defaultValue) { + return NumberUtils.toInt(s, defaultValue); + } + + public static float parseFloat(String s, float defaultValue) { + return NumberUtils.toFloat(s, defaultValue); + } + + /** + * Converts bytes to long on input. + */ + public static long bytesToLong(byte b[]) { + return bytesToLong(b, 0); + } + + public static long bytesToLong(byte b[], int pos) { + return ByteBuffer.wrap(b, pos, 8).getLong(); + } + + /** + * Converts a byte array to a hex readable string. + **/ + public static String bytesToString(byte[] data, int start, int end) { + StringBuilder buf = new StringBuilder(); + if (end > data.length) { + end = data.length; + } + for (int i = start; i < end; i++) { + buf.append(" "); + buf.append(Integer.toHexString(data[i] & 0xff)); + } + return buf.toString(); + } + + protected static final long KB = 1024; + protected static final long MB = 1024 * KB; + protected static final long GB = 1024 * MB; + protected static final long TB = 1024 * GB; + + public static String toReadableSize(long bytes) { + if (bytes < KB && bytes >= 0) { + return Long.toString(bytes) + " bytes"; + } + StringBuilder builder = new StringBuilder(); + Formatter format = new Formatter(builder, Locale.getDefault()); + if (bytes < MB) { + format.format("%.2f KB", (float)bytes / (float)KB); + } else if (bytes < GB) { + format.format("%.2f MB", (float)bytes / (float)MB); + } else if (bytes < TB) { + format.format("%.2f GB", (float)bytes / (float)GB); + } else { + format.format("%.4f TB", (float)bytes / (float)TB); + } + format.close(); + return builder.toString(); + } + + /** + * Converts a string of the format 'yy-MM-dd'T'HH:mm:ss.SSS" into ms. + * + * @param str containing the interval. + * @param defaultValue value to return if str doesn't parse. If -1, throws VmopsRuntimeException + * @return interval in ms + */ + public static long parseInterval(String str, long defaultValue) { + try { + if (str == null) { + throw new ParseException("String is wrong", 0); + } + + SimpleDateFormat sdf = null; + if (str.contains("D")) { + sdf = new SimpleDateFormat("dd'D'HH'h'mm'M'ss'S'SSS'ms'"); + } else if (str.contains("h")) { + sdf = new SimpleDateFormat("HH'h'mm'M'ss'S'SSS'ms'"); + } else if (str.contains("M")) { + sdf = new SimpleDateFormat("mm'M'ss'S'SSS'ms'"); + } else if (str.contains("S")) { + sdf = new SimpleDateFormat("ss'S'SSS'ms'"); + } else if (str.contains("ms")) { + sdf = new SimpleDateFormat("SSS'ms'"); + } + if (sdf == null) { + throw new ParseException("String is wrong", 0); + } + + Date date = sdf.parse(str); + return date.getTime(); + } catch (ParseException e) { + if (defaultValue != -1) { + return defaultValue; + } else { + throw new CloudRuntimeException("Unable to parse: " + str, e); + } + } + } + + public static int hash(long value) { + return (int)(value ^ (value >>> 32)); + } +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/Pair.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/Pair.java b/utils/src/main/java/com/cloud/utils/Pair.java new file mode 100644 index 0000000..73d3562 --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/Pair.java @@ -0,0 +1,87 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import java.io.Serializable; + +public class Pair<T, U> implements Serializable { + private static final long serialVersionUID = 2L; + T t; + U u; + + protected Pair() { + + } + + public Pair(T t, U u) { + this.t = t; + this.u = u; + } + + public T first() { + return t; + } + + public U second() { + return u; + } + + public U second(U value) { + u = value; + return u; + } + + public T first(T value) { + t = value; + return t; + } + + public void set(T t, U u) { + this.t = t; + this.u = u; + } + + @Override + // Note: This means any two pairs with null for both values will match each + // other but what can I do? This is due to stupid type erasure. + public + int hashCode() { + return (t != null ? t.hashCode() : 0) | (u != null ? u.hashCode() : 0); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Pair)) { + return false; + } + Pair<?, ?> that = (Pair<?, ?>)obj; + return (t != null ? t.equals(that.t) : that.t == null) && (u != null ? u.equals(that.u) : that.u == null); + } + + @Override + public String toString() { + StringBuilder b = new StringBuilder("P["); + b.append((t != null) ? t.toString() : "null"); + b.append(":"); + b.append((u != null) ? u.toString() : "null"); + b.append("]"); + return b.toString(); + } +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/PasswordGenerator.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/PasswordGenerator.java b/utils/src/main/java/com/cloud/utils/PasswordGenerator.java new file mode 100644 index 0000000..0d79143 --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/PasswordGenerator.java @@ -0,0 +1,90 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import java.security.SecureRandom; +import java.util.Random; + +/** + * Generate random passwords + * + */ +public class PasswordGenerator { + //Leave out visually confusing l,L,1,o,O,0 + static private char[] lowerCase = new char[] {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'}; + static private char[] upperCase = new char[] {'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z'}; + static private char[] numeric = new char[] {'2', '3', '4', '5', '6', '7', '8', '9'}; + + static private char[] alphaNumeric = new char[] {'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', + 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '2', '3', '4', '5', '6', '7', '8', '9'}; + + static private int minLength = 3; + + public static String generateRandomPassword(int num) { + Random r = new SecureRandom(); + StringBuilder password = new StringBuilder(); + + //Guard for num < minLength + if (num < minLength) { + //Add alphanumeric chars at random + for (int i = 0; i < minLength; i++) { + password.append(generateAlphaNumeric(r)); + } + } else { + // Generate random 3-character string with a lowercase character, + // uppercase character, and a digit + password.append(generateLowercaseChar(r)).append(generateUppercaseChar(r)).append(generateDigit(r)); + + // Generate a random n-character string with only lowercase + // characters + for (int i = 0; i < num - 3; i++) { + password.append(generateLowercaseChar(r)); + } + } + + return password.toString(); + } + + private static char generateLowercaseChar(Random r) { + return lowerCase[r.nextInt(lowerCase.length)]; + } + + private static char generateDigit(Random r) { + return numeric[r.nextInt(numeric.length)]; + } + + private static char generateUppercaseChar(Random r) { + return upperCase[r.nextInt(upperCase.length)]; + } + + private static char generateAlphaNumeric(Random r) { + return alphaNumeric[r.nextInt(alphaNumeric.length)]; + } + + public static String generatePresharedKey(int numChars) { + Random r = new SecureRandom(); + StringBuilder psk = new StringBuilder(); + for (int i = 0; i < numChars; i++) { + psk.append(generateAlphaNumeric(r)); + } + return psk.toString(); + + } +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/Predicate.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/Predicate.java b/utils/src/main/java/com/cloud/utils/Predicate.java new file mode 100644 index 0000000..6079d16 --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/Predicate.java @@ -0,0 +1,24 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +public interface Predicate { + boolean checkCondition(); +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/ProcessUtil.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/ProcessUtil.java b/utils/src/main/java/com/cloud/utils/ProcessUtil.java new file mode 100644 index 0000000..53137c4 --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/ProcessUtil.java @@ -0,0 +1,112 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import java.io.File; +import java.io.IOException; +import java.util.Properties; + +import javax.naming.ConfigurationException; + +import org.apache.commons.io.FileUtils; +import org.apache.log4j.Logger; + +import com.cloud.utils.exception.CloudRuntimeException; +import com.cloud.utils.script.OutputInterpreter; +import com.cloud.utils.script.Script; + +public class ProcessUtil { + private static final Logger s_logger = Logger.getLogger(ProcessUtil.class.getName()); + + // paths cannot be hardcoded + public static void pidCheck(String pidDir, String run) throws ConfigurationException { + + String dir = pidDir == null ? "/var/run" : pidDir; + + try { + final File propsFile = PropertiesUtil.findConfigFile("environment.properties"); + if (propsFile == null) { + s_logger.debug("environment.properties could not be opened"); + } else { + final Properties props = PropertiesUtil.loadFromFile(propsFile); + dir = props.getProperty("paths.pid"); + if (dir == null) { + dir = pidDir == null ? "/var/run" : pidDir; + } + } + } catch (IOException e) { + s_logger.debug("environment.properties could not be opened"); + } + + final File pidFile = new File(dir + File.separator + run); + try { + if (!pidFile.createNewFile()) { + if (!pidFile.exists()) { + throw new ConfigurationException("Unable to write to " + pidFile.getAbsolutePath() + ". Are you sure you're running as root?"); + } + + final String pidLine = FileUtils.readFileToString(pidFile).trim(); + if (pidLine.isEmpty()) { + throw new ConfigurationException("Java process is being started twice. If this is not true, remove " + pidFile.getAbsolutePath()); + } + try { + final long pid = Long.parseLong(pidLine); + final Script script = new Script("bash", 120000, s_logger); + script.add("-c", "ps -p " + pid); + final String result = script.execute(); + if (result == null) { + throw new ConfigurationException("Java process is being started twice. If this is not true, remove " + pidFile.getAbsolutePath()); + } + if (!pidFile.delete()) { + throw new ConfigurationException("Java process is being started twice. If this is not true, remove " + pidFile.getAbsolutePath()); + } + if (!pidFile.createNewFile()) { + throw new ConfigurationException("Java process is being started twice. If this is not true, remove " + pidFile.getAbsolutePath()); + } + } catch (final NumberFormatException e) { + throw new ConfigurationException("Java process is being started twice. If this is not true, remove " + pidFile.getAbsolutePath()); + } + } + pidFile.deleteOnExit(); + + final Script script = new Script("bash", 120000, s_logger); + script.add("-c", "echo $PPID"); + final OutputInterpreter.OneLineParser parser = new OutputInterpreter.OneLineParser(); + script.execute(parser); + + final String pid = parser.getLine(); + + FileUtils.writeStringToFile(pidFile, pid + "\n"); + } catch (final IOException e) { + throw new CloudRuntimeException("Unable to create the " + pidFile.getAbsolutePath() + ". Are you running as root?", e); + } + } + + public static String dumpStack() { + StringBuilder sb = new StringBuilder(); + StackTraceElement[] elems = Thread.currentThread().getStackTrace(); + if (elems != null && elems.length > 0) { + for (StackTraceElement elem : elems) { + sb.append("\tat ").append(elem.getMethodName()).append("(").append(elem.getFileName()).append(":").append(elem.getLineNumber()).append(")\n"); + } + } + return sb.toString(); + } +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/Profiler.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/Profiler.java b/utils/src/main/java/com/cloud/utils/Profiler.java new file mode 100644 index 0000000..f8e44bd --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/Profiler.java @@ -0,0 +1,91 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +public class Profiler { + + private static final long MILLIS_FACTOR = 1000l; + private static final double EXPONENT = 2d; + + + private Long startTickNanoSeconds; + private Long stopTickNanoSeconds; + + public long start() { + startTickNanoSeconds = System.nanoTime(); + return startTickNanoSeconds; + } + + public long stop() { + stopTickNanoSeconds = System.nanoTime(); + return stopTickNanoSeconds; + } + + /** + * 1 millisecond = 1e+6 nanoseconds + * 1 second = 1000 milliseconds = 1e+9 nanoseconds + * + * @return the duration in nanoseconds. + */ + public long getDuration() { + if (startTickNanoSeconds != null && stopTickNanoSeconds != null) { + final long timeInNanoSeconds = stopTickNanoSeconds - startTickNanoSeconds; + return timeInNanoSeconds; + } + + return -1; + } + + /** + * 1 millisecond = 1e+6 nanoseconds + * 1 second = 1000 millisecond = 1e+9 nanoseconds + * + * @return the duration in milliseconds. + */ + public long getDurationInMillis() { + if (startTickNanoSeconds != null && stopTickNanoSeconds != null) { + final long timeInMillis = (stopTickNanoSeconds - startTickNanoSeconds) / (long)Math.pow(MILLIS_FACTOR, EXPONENT); + return timeInMillis; + } + + return -1; + } + + public boolean isStarted() { + return startTickNanoSeconds != null; + } + + public boolean isStopped() { + return stopTickNanoSeconds != null; + } + + @Override + public String toString() { + if (startTickNanoSeconds == null) { + return "Not Started"; + } + + if (stopTickNanoSeconds == null) { + return "Started but not stopped"; + } + + return "Done. Duration: " + getDurationInMillis() + "ms"; + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/PropertiesUtil.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/PropertiesUtil.java b/utils/src/main/java/com/cloud/utils/PropertiesUtil.java new file mode 100644 index 0000000..4cb89f7 --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/PropertiesUtil.java @@ -0,0 +1,196 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.Set; + +import org.apache.log4j.Logger; + +public class PropertiesUtil { + private static final Logger s_logger = Logger.getLogger(PropertiesUtil.class); + + /** + * Searches the class path and local paths to find the config file. + * @param path path to find. if it starts with / then it's absolute path. + * @return File or null if not found at all. + */ + + public static File findConfigFile(String path) { + ClassLoader cl = PropertiesUtil.class.getClassLoader(); + URL url = cl.getResource(path); + + if (url != null && "file".equals(url.getProtocol())) { + return new File(url.getFile()); + } + + url = ClassLoader.getSystemResource(path); + if (url != null && "file".equals(url.getProtocol())) { + return new File(url.getFile()); + } + + File file = new File(path); + if (file.exists()) { + return file; + } + + String newPath = "conf" + (path.startsWith(File.separator) ? "" : "/") + path; + url = ClassLoader.getSystemResource(newPath); + if (url != null && "file".equals(url.getProtocol())) { + return new File(url.getFile()); + } + + url = cl.getResource(newPath); + if (url != null && "file".equals(url.getProtocol())) { + return new File(url.getFile()); + } + + newPath = "conf" + (path.startsWith(File.separator) ? "" : File.separator) + path; + file = new File(newPath); + if (file.exists()) { + return file; + } + + newPath = System.getProperty("catalina.home"); + if (newPath == null) { + newPath = System.getenv("CATALINA_HOME"); + } + + if (newPath == null) { + newPath = System.getenv("CATALINA_BASE"); + } + + if (newPath == null) { + return null; + } + + file = new File(newPath + File.separator + "conf" + File.separator + path); + if (file.exists()) { + return file; + } + + return null; + } + + public static Map<String, Object> toMap(Properties props) { + Set<String> names = props.stringPropertyNames(); + HashMap<String, Object> map = new HashMap<String, Object>(names.size()); + for (String name : names) { + map.put(name, props.getProperty(name)); + } + + return map; + } + + /* + * Returns an InputStream for the given resource + * This is needed to read the files within a jar in classpath. + */ + public static InputStream openStreamFromURL(String path) { + ClassLoader cl = PropertiesUtil.class.getClassLoader(); + URL url = cl.getResource(path); + if (url != null) { + try { + InputStream stream = url.openStream(); + return stream; + } catch (IOException ioex) { + return null; + } + } + return null; + } + + public static void loadFromJar(Properties properties, String configFile) throws IOException { + InputStream stream = PropertiesUtil.openStreamFromURL(configFile); + if (stream != null) { + properties.load(stream); + } else { + s_logger.error("Unable to find properties file: " + configFile); + } + } + + // Returns key=value pairs by parsing a commands.properties/config file + // with syntax; key=cmd;value (with this syntax cmd is stripped) and key=value + public static Map<String, String> processConfigFile(String[] configFiles) { + Map<String, String> configMap = new HashMap<String, String>(); + Properties preProcessedCommands = new Properties(); + for (String configFile : configFiles) { + File commandsFile = findConfigFile(configFile); + if (commandsFile != null) { + try { + loadFromFile(preProcessedCommands, commandsFile); + } catch (IOException ioe) { + s_logger.error("IO Exception loading properties file", ioe); + } + } + else { + // in case of a file within a jar in classpath, try to open stream using url + try { + loadFromJar(preProcessedCommands, configFile); + } catch (IOException e) { + s_logger.error("IO Exception loading properties file from jar", e); + } + } + } + + for (Object key : preProcessedCommands.keySet()) { + String preProcessedCommand = preProcessedCommands.getProperty((String)key); + int splitIndex = preProcessedCommand.lastIndexOf(";"); + String value = preProcessedCommand.substring(splitIndex + 1); + configMap.put((String)key, value); + } + + return configMap; + } + + /** + * Load a Properties object with contents from a File. + * @param properties the properties object to be loaded + * @param file the file to load from + * @throws IOException + */ + public static void loadFromFile(final Properties properties, final File file) + throws IOException { + try (final InputStream stream = new FileInputStream(file)) { + properties.load(stream); + } + } + + /** + * Load the file and return the contents as a Properties object. + * @param file the file to load + * @return A Properties object populated + * @throws IOException + */ + public static Properties loadFromFile(final File file) + throws IOException { + final Properties properties = new Properties(); + loadFromFile(properties, file); + return properties; + } + +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/ReflectUtil.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/ReflectUtil.java b/utils/src/main/java/com/cloud/utils/ReflectUtil.java new file mode 100644 index 0000000..c8ae954 --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/ReflectUtil.java @@ -0,0 +1,213 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import static java.beans.Introspector.getBeanInfo; +import static java.util.Collections.emptyList; +import static java.util.Collections.unmodifiableList; + +import java.beans.BeanInfo; +import java.beans.IntrospectionException; +import java.beans.PropertyDescriptor; +import java.lang.annotation.Annotation; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.log4j.Logger; +import org.reflections.Reflections; +import org.reflections.util.ConfigurationBuilder; +import org.reflections.util.ClasspathHelper; +import org.reflections.scanners.SubTypesScanner; +import org.reflections.scanners.TypeAnnotationsScanner; + +import com.google.common.collect.ImmutableSet; + +import com.cloud.utils.exception.CloudRuntimeException; + +public class ReflectUtil { + + private static final Logger s_logger = Logger.getLogger(ReflectUtil.class); + private static final Logger logger = Logger.getLogger(Reflections.class); + + public static Pair<Class<?>, Field> getAnyField(Class<?> clazz, String fieldName) { + try { + return new Pair<Class<?>, Field>(clazz, clazz.getDeclaredField(fieldName)); + } catch (SecurityException e) { + throw new CloudRuntimeException("How the heck?", e); + } catch (NoSuchFieldException e) { + // Do I really want this? No I don't but what can I do? It only throws the NoSuchFieldException. + Class<?> parent = clazz.getSuperclass(); + if (parent != null) { + return getAnyField(parent, fieldName); + } + return null; + } + } + + // Gets all classes with some annotation from a package + public static Set<Class<?>> getClassesWithAnnotation(Class<? extends Annotation> annotation, String[] packageNames) { + Reflections reflections; + Set<Class<?>> classes = new HashSet<Class<?>>(); + ConfigurationBuilder builder=new ConfigurationBuilder(); + for (String packageName : packageNames) { + builder.addUrls(ClasspathHelper.forPackage(packageName)); + } + builder.setScanners(new SubTypesScanner(),new TypeAnnotationsScanner()); + reflections = new Reflections(builder); + classes.addAll(reflections.getTypesAnnotatedWith(annotation)); + return classes; + } + + // Checks against posted search classes if cmd is async + public static boolean isCmdClassAsync(Class<?> cmdClass, Class<?>[] searchClasses) { + boolean isAsync = false; + Class<?> superClass = cmdClass; + + while (superClass != null && superClass != Object.class) { + String superName = superClass.getName(); + for (Class<?> baseClass : searchClasses) { + if (superName.equals(baseClass.getName())) { + isAsync = true; + break; + } + } + if (isAsync) + break; + superClass = superClass.getSuperclass(); + } + return isAsync; + } + + // Returns all fields until a base class for a cmd class + public static List<Field> getAllFieldsForClass(Class<?> cmdClass, Class<?> baseClass) { + List<Field> fields = new ArrayList<Field>(); + Collections.addAll(fields, cmdClass.getDeclaredFields()); + Class<?> superClass = cmdClass.getSuperclass(); + while (baseClass.isAssignableFrom(superClass) && baseClass != superClass) { + Field[] superClassFields = superClass.getDeclaredFields(); + if (superClassFields != null) + Collections.addAll(fields, superClassFields); + superClass = superClass.getSuperclass(); + } + return fields; + } + + /** + * Returns all unique fields except excludeClasses for a cmd class + * @param cmdClass the class in which fields should be collected + * @param excludeClasses the classes whose fields must be ignored + * @return list of fields + */ + public static Set<Field> getAllFieldsForClass(Class<?> cmdClass, Class<?>[] excludeClasses) { + Set<Field> fields = new HashSet<Field>(); + Collections.addAll(fields, cmdClass.getDeclaredFields()); + Class<?> superClass = cmdClass.getSuperclass(); + + while (superClass != null && superClass != Object.class) { + String superName = superClass.getName(); + boolean isNameEqualToSuperName = false; + for (Class<?> baseClass : excludeClasses) { + if (superName.equals(baseClass.getName())) { + isNameEqualToSuperName = true; + } + } + + if (!isNameEqualToSuperName) { + Field[] superClassFields = superClass.getDeclaredFields(); + if (superClassFields != null) { + Collections.addAll(fields, superClassFields); + } + } + superClass = superClass.getSuperclass(); + } + return fields; + } + + public static List<String> flattenProperties(final Object target, final Class<?> clazz) { + return flattenPropeties(target, clazz, "class"); + } + + public static List<String> flattenPropeties(final Object target, final Class<?> clazz, final String... excludedProperties) { + return flattenProperties(target, clazz, ImmutableSet.copyOf(excludedProperties)); + } + + private static List<String> flattenProperties(final Object target, final Class<?> clazz, final ImmutableSet<String> excludedProperties) { + + assert clazz != null; + + if (target == null) { + return emptyList(); + } + + assert clazz.isAssignableFrom(target.getClass()); + + try { + + final BeanInfo beanInfo = getBeanInfo(clazz); + final PropertyDescriptor[] descriptors = beanInfo.getPropertyDescriptors(); + + final List<String> serializedProperties = new ArrayList<String>(); + for (final PropertyDescriptor descriptor : descriptors) { + + if (excludedProperties.contains(descriptor.getName())) { + continue; + } + + serializedProperties.add(descriptor.getName()); + final Object value = descriptor.getReadMethod().invoke(target); + serializedProperties.add(value != null ? value.toString() : "null"); + + } + + return unmodifiableList(serializedProperties); + + } catch (IntrospectionException e) { + s_logger.warn("Ignored IntrospectionException when serializing class " + target.getClass().getCanonicalName(), e); + } catch (IllegalArgumentException e) { + s_logger.warn("Ignored IllegalArgumentException when serializing class " + target.getClass().getCanonicalName(), e); + } catch (IllegalAccessException e) { + s_logger.warn("Ignored IllegalAccessException when serializing class " + target.getClass().getCanonicalName(), e); + } catch (InvocationTargetException e) { + s_logger.warn("Ignored InvocationTargetException when serializing class " + target.getClass().getCanonicalName(), e); + } + + return emptyList(); + + } + + public static String getEntityName(Class clz){ + if(clz == null) + return null; + + String entityName = clz.getName(); + int index = entityName.lastIndexOf("."); + if (index != -1) { + return entityName.substring(index + 1); + }else{ + return entityName; + } + } + +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/ReflectionUse.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/ReflectionUse.java b/utils/src/main/java/com/cloud/utils/ReflectionUse.java new file mode 100644 index 0000000..a5a78e2 --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/ReflectionUse.java @@ -0,0 +1,31 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +@Target({METHOD}) +@Retention(RUNTIME) +public @interface ReflectionUse { +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/S3Utils.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/S3Utils.java b/utils/src/main/java/com/cloud/utils/S3Utils.java new file mode 100644 index 0000000..6efe76b --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/S3Utils.java @@ -0,0 +1,603 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +import static com.amazonaws.Protocol.HTTP; +import static com.amazonaws.Protocol.HTTPS; +import static com.cloud.utils.StringUtils.join; +import static java.io.File.createTempFile; +import static java.lang.String.format; +import static java.lang.System.currentTimeMillis; +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; +import static java.util.Collections.unmodifiableList; +import static org.apache.commons.lang.ArrayUtils.isEmpty; +import static org.apache.commons.lang.StringUtils.isBlank; +import static org.apache.commons.lang.StringUtils.isNotBlank; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FilenameFilter; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.UUID; + +import org.apache.commons.lang.ArrayUtils; +import org.apache.log4j.Logger; + +import com.amazonaws.AmazonClientException; +import com.amazonaws.ClientConfiguration; +import com.amazonaws.HttpMethod; +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3Client; +import com.amazonaws.services.s3.model.Bucket; +import com.amazonaws.services.s3.model.CannedAccessControlList; +import com.amazonaws.services.s3.model.GetObjectRequest; +import com.amazonaws.services.s3.model.ListObjectsRequest; +import com.amazonaws.services.s3.model.ObjectListing; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.PutObjectRequest; +import com.amazonaws.services.s3.model.S3Object; +import com.amazonaws.services.s3.model.S3ObjectSummary; +import com.amazonaws.services.s3.transfer.TransferManager; +import com.amazonaws.services.s3.transfer.Upload; +import com.cloud.utils.exception.CloudRuntimeException; + +public final class S3Utils { + + private static final Logger LOGGER = Logger.getLogger(S3Utils.class); + + public static final String SEPARATOR = "/"; + + private static final int MIN_BUCKET_NAME_LENGTH = 3; + private static final int MAX_BUCKET_NAME_LENGTH = 63; + + private S3Utils() { + super(); + } + + public static AmazonS3 acquireClient(final ClientOptions clientOptions) { + + final AWSCredentials credentials = new BasicAWSCredentials(clientOptions.getAccessKey(), clientOptions.getSecretKey()); + + final ClientConfiguration configuration = new ClientConfiguration(); + + if (clientOptions.isHttps() != null) { + configuration.setProtocol(clientOptions.isHttps() == true ? HTTPS : HTTP); + } + + if (clientOptions.getConnectionTimeout() != null) { + configuration.setConnectionTimeout(clientOptions.getConnectionTimeout()); + } + + if (clientOptions.getMaxErrorRetry() != null) { + configuration.setMaxErrorRetry(clientOptions.getMaxErrorRetry()); + } + + if (clientOptions.getSocketTimeout() != null) { + configuration.setSocketTimeout(clientOptions.getSocketTimeout()); + } + + if (clientOptions.getUseTCPKeepAlive() != null) { + //configuration.setUseTcpKeepAlive(clientOptions.getUseTCPKeepAlive()); + LOGGER.debug("useTCPKeepAlive not supported by old AWS SDK"); + } + + if (clientOptions.getConnectionTtl() != null) { + //configuration.setConnectionTTL(clientOptions.getConnectionTtl()); + LOGGER.debug("connectionTtl not supported by old AWS SDK"); + } + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(format("Creating S3 client with configuration: [protocol: %1$s, connectionTimeOut: " + "%2$s, maxErrorRetry: %3$s, socketTimeout: %4$s, useTCPKeepAlive: %5$s, connectionTtl: %6$s]", + configuration.getProtocol(), configuration.getConnectionTimeout(), configuration.getMaxErrorRetry(), configuration.getSocketTimeout(), + -1, -1)); + } + + final AmazonS3Client client = new AmazonS3Client(credentials, configuration); + + if (isNotBlank(clientOptions.getEndPoint())) { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(format("Setting the end point for S3 client %1$s to %2$s.", client, clientOptions.getEndPoint())); + } + client.setEndpoint(clientOptions.getEndPoint()); + } + + return client; + + } + + public static void putFile(final ClientOptions clientOptions, final File sourceFile, final String bucketName, final String key) { + + assert clientOptions != null; + assert sourceFile != null; + assert !isBlank(bucketName); + assert !isBlank(key); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(format("Sending file %1$s as S3 object %2$s in " + "bucket %3$s", sourceFile.getName(), key, bucketName)); + } + + acquireClient(clientOptions).putObject(bucketName, key, sourceFile); + + } + + public static void putObject(final ClientOptions clientOptions, final InputStream sourceStream, final String bucketName, final String key) { + + assert clientOptions != null; + assert sourceStream != null; + assert !isBlank(bucketName); + assert !isBlank(key); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(format("Sending stream as S3 object %1$s in " + "bucket %2$s", key, bucketName)); + } + + acquireClient(clientOptions).putObject(bucketName, key, sourceStream, null); + + } + + public static void putObject(final ClientOptions clientOptions, final PutObjectRequest req) { + + assert clientOptions != null; + assert req != null; + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(format("Sending stream as S3 object using PutObjectRequest")); + } + + acquireClient(clientOptions).putObject(req); + + } + + // multi-part upload file + public static void mputFile(final ClientOptions clientOptions, final File sourceFile, final String bucketName, final String key) throws InterruptedException { + + assert clientOptions != null; + assert sourceFile != null; + assert !isBlank(bucketName); + assert !isBlank(key); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(format("Multipart sending file %1$s as S3 object %2$s in " + "bucket %3$s", sourceFile.getName(), key, bucketName)); + } + TransferManager tm = new TransferManager(S3Utils.acquireClient(clientOptions)); + Upload upload = tm.upload(bucketName, key, sourceFile); + upload.waitForCompletion(); + } + + // multi-part upload object + public static void mputObject(final ClientOptions clientOptions, final InputStream sourceStream, final String bucketName, final String key) + throws InterruptedException { + + assert clientOptions != null; + assert sourceStream != null; + assert !isBlank(bucketName); + assert !isBlank(key); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(format("Multipart sending stream as S3 object %1$s in " + "bucket %2$s", key, bucketName)); + } + TransferManager tm = new TransferManager(S3Utils.acquireClient(clientOptions)); + Upload upload = tm.upload(bucketName, key, sourceStream, null); + upload.waitForCompletion(); + } + + // multi-part upload object + public static void mputObject(final ClientOptions clientOptions, final PutObjectRequest req) throws InterruptedException { + + assert clientOptions != null; + assert req != null; + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Multipart sending object to S3 using PutObjectRequest"); + } + TransferManager tm = new TransferManager(S3Utils.acquireClient(clientOptions)); + Upload upload = tm.upload(req); + upload.waitForCompletion(); + + } + + public static void setObjectAcl(final ClientOptions clientOptions, final String bucketName, final String key, final CannedAccessControlList acl) { + + assert clientOptions != null; + assert acl != null; + + acquireClient(clientOptions).setObjectAcl(bucketName, key, acl); + + } + + public static URL generatePresignedUrl(final ClientOptions clientOptions, final String bucketName, final String key, final Date expiration) { + + assert clientOptions != null; + assert !isBlank(bucketName); + assert !isBlank(key); + + return acquireClient(clientOptions).generatePresignedUrl(bucketName, key, expiration, HttpMethod.GET); + + } + + // Note that whenever S3Object is returned, client code needs to close the internal stream to avoid resource leak. + public static S3Object getObject(final ClientOptions clientOptions, final String bucketName, final String key) { + + assert clientOptions != null; + assert !isBlank(bucketName); + assert !isBlank(key); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(format("Get S3 object %1$s in " + "bucket %2$s", key, bucketName)); + } + + return acquireClient(clientOptions).getObject(bucketName, key); + + } + + @SuppressWarnings("unchecked") + public static File getFile(final ClientOptions clientOptions, final String bucketName, final String key, final File targetDirectory, + final FileNamingStrategy namingStrategy) { + + assert clientOptions != null; + assert isNotBlank(bucketName); + assert isNotBlank(key); + assert targetDirectory != null && targetDirectory.isDirectory(); + assert namingStrategy != null; + + final AmazonS3 connection = acquireClient(clientOptions); + + File tempFile = null; + try { + + tempFile = createTempFile(join("-", targetDirectory.getName(), currentTimeMillis(), "part"), "tmp", targetDirectory); + tempFile.deleteOnExit(); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(format("Downloading object %1$s from bucket %2$s to temp file %3$s", key, bucketName, tempFile.getName())); + } + + try { + connection.getObject(new GetObjectRequest(bucketName, key), tempFile); + } catch (AmazonClientException ex) { + // hack to handle different ETAG format generated from RiakCS for multi-part uploaded object + String msg = ex.getMessage(); + if (!msg.contains("verify integrity")) { + throw ex; + } + } + + final File targetFile = new File(targetDirectory, namingStrategy.determineFileName(key)); + tempFile.renameTo(targetFile); + + return targetFile; + + } catch (FileNotFoundException e) { + + throw new CloudRuntimeException(format("Failed open file %1$s in order to get object %2$s from bucket %3$s.", targetDirectory.getAbsoluteFile(), bucketName, + key), e); + + } catch (IOException e) { + + throw new CloudRuntimeException(format("Unable to allocate temporary file in directory %1$s to download %2$s:%3$s from S3", + targetDirectory.getAbsolutePath(), bucketName, key), e); + + } finally { + + if (tempFile != null) { + tempFile.delete(); + } + + } + + } + + public static List<File> getDirectory(final ClientOptions clientOptions, final String bucketName, final String sourcePath, final File targetDirectory, + final FileNamingStrategy namingStrategy) { + + assert clientOptions != null; + assert isNotBlank(bucketName); + assert isNotBlank(sourcePath); + assert targetDirectory != null; + + final AmazonS3 connection = acquireClient(clientOptions); + + // List the objects in the source directory on S3 + final List<S3ObjectSummary> objectSummaries = listDirectory(bucketName, sourcePath, connection); + final List<File> files = new ArrayList<File>(); + + for (final S3ObjectSummary objectSummary : objectSummaries) { + + files.add(getFile(clientOptions, bucketName, objectSummary.getKey(), targetDirectory, namingStrategy)); + + } + + return unmodifiableList(files); + + } + + public static List<S3ObjectSummary> getDirectory(final ClientOptions clientOptions, final String bucketName, final String sourcePath) { + assert clientOptions != null; + assert isNotBlank(bucketName); + assert isNotBlank(sourcePath); + + final AmazonS3 connection = acquireClient(clientOptions); + + // List the objects in the source directory on S3 + return listDirectory(bucketName, sourcePath, connection); + } + + private static List<S3ObjectSummary> listDirectory(final String bucketName, final String directory, final AmazonS3 client) { + + List<S3ObjectSummary> objects = new ArrayList<S3ObjectSummary>(); + ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucketName).withPrefix(directory + SEPARATOR); + + ObjectListing ol = client.listObjects(listObjectsRequest); + if(ol.isTruncated()) { + do { + objects.addAll(ol.getObjectSummaries()); + listObjectsRequest.setMarker(ol.getNextMarker()); + ol = client.listObjects(listObjectsRequest); + } while (ol.isTruncated()); + } + else { + objects.addAll(ol.getObjectSummaries()); + } + + if (objects.isEmpty()) { + return emptyList(); + } + + return unmodifiableList(objects); + } + + public static void putDirectory(final ClientOptions clientOptions, final String bucketName, final File directory, final FilenameFilter fileNameFilter, + final ObjectNamingStrategy namingStrategy) { + + assert clientOptions != null; + assert isNotBlank(bucketName); + assert directory != null && directory.isDirectory(); + assert fileNameFilter != null; + assert namingStrategy != null; + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(format("Putting directory %1$s in S3 bucket %2$s.", directory.getAbsolutePath(), bucketName)); + } + + // Determine the list of files to be sent using the passed filter ... + final File[] files = directory.listFiles(fileNameFilter); + + if (LOGGER.isTraceEnabled()) { + LOGGER.trace(format("Putting files (%1$s) in S3 bucket %2$s.", ArrayUtils.toString(files, "no files found"), bucketName)); + } + + // Skip spinning up an S3 connection when no files will be sent ... + if (isEmpty(files)) { + return; + } + + final AmazonS3 client = acquireClient(clientOptions); + + // Send the files to S3 using the passed ObjectNaming strategy to + // determine the key ... + for (final File file : files) { + final String key = namingStrategy.determineKey(file); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(format("Putting file %1$s into bucket %2$s with key %3$s.", file.getAbsolutePath(), bucketName, key)); + } + client.putObject(bucketName, key, file); + } + + } + + public static void deleteObject(final ClientOptions clientOptions, final String bucketName, final String key) { + + assert clientOptions != null; + assert isNotBlank(bucketName); + assert isNotBlank(key); + + final AmazonS3 client = acquireClient(clientOptions); + + client.deleteObject(bucketName, key); + + } + + public static void deleteDirectory(final ClientOptions clientOptions, final String bucketName, final String directoryName) { + + assert clientOptions != null; + assert isNotBlank(bucketName); + assert isNotBlank(directoryName); + + final AmazonS3 client = acquireClient(clientOptions); + + final List<S3ObjectSummary> objects = listDirectory(bucketName, directoryName, client); + + for (final S3ObjectSummary object : objects) { + + client.deleteObject(bucketName, object.getKey()); + + } + + client.deleteObject(bucketName, directoryName); + + } + + public static boolean canConnect(final ClientOptions clientOptions) { + + try { + + acquireClient(clientOptions); + return true; + + } catch (AmazonClientException e) { + + LOGGER.warn("Ignored Exception while checking connection options", e); + return false; + + } + + } + + public static boolean doesBucketExist(final ClientOptions clientOptions, final String bucketName) { + + assert clientOptions != null; + assert !isBlank(bucketName); + + try { + + final List<Bucket> buckets = acquireClient(clientOptions).listBuckets(); + + for (Bucket bucket : buckets) { + if (bucket.getName().equals(bucketName)) { + return true; + } + } + + return false; + + } catch (AmazonClientException e) { + + LOGGER.warn("Ignored Exception while checking bucket existence", e); + return false; + + } + + } + + public static boolean canReadWriteBucket(final ClientOptions clientOptions, final String bucketName) { + + assert clientOptions != null; + assert isNotBlank(bucketName); + + try { + + final AmazonS3 client = acquireClient(clientOptions); + + final String fileContent = "testing put and delete"; + final InputStream inputStream = new ByteArrayInputStream(fileContent.getBytes()); + final String key = UUID.randomUUID().toString() + ".txt"; + + final ObjectMetadata metadata = new ObjectMetadata(); + metadata.setContentLength(fileContent.length()); + + client.putObject(bucketName, key, inputStream, metadata); + client.deleteObject(bucketName, key); + + return true; + + } catch (AmazonClientException e) { + + return false; + + } + + } + + public static List<String> checkClientOptions(ClientOptions clientOptions) { + + assert clientOptions != null; + + List<String> errorMessages = new ArrayList<String>(); + + errorMessages.addAll(checkRequiredField("access key", clientOptions.getAccessKey())); + errorMessages.addAll(checkRequiredField("secret key", clientOptions.getSecretKey())); + + errorMessages.addAll(checkOptionalField("connection timeout", clientOptions.getConnectionTimeout())); + errorMessages.addAll(checkOptionalField("socket timeout", clientOptions.getSocketTimeout())); + errorMessages.addAll(checkOptionalField("max error retries", clientOptions.getMaxErrorRetry())); + errorMessages.addAll(checkOptionalField("connection ttl", clientOptions.getConnectionTtl())); + + return unmodifiableList(errorMessages); + + } + + public static List<String> checkBucketName(final String bucketLabel, final String bucket) { + + assert isNotBlank(bucketLabel); + assert isNotBlank(bucket); + + final List<String> errorMessages = new ArrayList<String>(); + + if (bucket.length() < MIN_BUCKET_NAME_LENGTH) { + errorMessages.add(format("The length of %1$s " + "for the %2$s must have a length of at least %3$s " + "characters", bucket, bucketLabel, + MIN_BUCKET_NAME_LENGTH)); + } + + if (bucket.length() > MAX_BUCKET_NAME_LENGTH) { + errorMessages.add(format("The length of %1$s " + "for the %2$s must not have a length of at greater" + " than %3$s characters", bucket, bucketLabel, + MAX_BUCKET_NAME_LENGTH)); + } + + return unmodifiableList(errorMessages); + + } + + private static List<String> checkOptionalField(final String fieldName, final Integer fieldValue) { + if (fieldValue != null && fieldValue < 0) { + return singletonList(format("The value of %1$s must " + "be greater than zero.", fieldName)); + } + return emptyList(); + } + + private static List<String> checkRequiredField(String fieldName, String fieldValue) { + if (isBlank(fieldValue)) { + return singletonList(format("A %1$s must be specified.", fieldName)); + } + return emptyList(); + } + + public interface ClientOptions { + + String getAccessKey(); + + String getSecretKey(); + + String getEndPoint(); + + Boolean isHttps(); + + Integer getConnectionTimeout(); + + Integer getMaxErrorRetry(); + + Integer getSocketTimeout(); + + Boolean getUseTCPKeepAlive(); + + Integer getConnectionTtl(); + } + + public interface ObjectNamingStrategy { + + String determineKey(File file); + + } + + public interface FileNamingStrategy { + + String determineFileName(String key); + + } + +} http://git-wip-us.apache.org/repos/asf/cloudstack/blob/83fd8f60/utils/src/main/java/com/cloud/utils/SerialVersionUID.java ---------------------------------------------------------------------- diff --git a/utils/src/main/java/com/cloud/utils/SerialVersionUID.java b/utils/src/main/java/com/cloud/utils/SerialVersionUID.java new file mode 100644 index 0000000..e4ea217 --- /dev/null +++ b/utils/src/main/java/com/cloud/utils/SerialVersionUID.java @@ -0,0 +1,69 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// + +package com.cloud.utils; + +/** + * purposes. This is purely on an honor system though. You should always + **/ +public interface SerialVersionUID { + public static final long Base = 0x564D4F70 << 32; // 100 brownie points if you guess what this is and tell me. + + public static final long UUID = Base | 0x1; + public static final long CloudRuntimeException = Base | 0x2; + public static final long CloudStartupServlet = Base | 0x3; + public static final long CloudServiceImpl = Base | 0x4; + public static final long AccountLimitException = Base | 0x5; + public static final long InsufficientVirtualNetworkCapacityException = Base | 0x7; + public static final long NetworkUnavailableException = Base | 0x8; + public static final long Ip = Base | 0x9; + public static final long UnsupportedVersionException = Base | 0xb; + public static final long DataCenterIpAddressPK = Base | 0xc; + public static final long UnableToExecuteException = Base | 0xd; + public static final long ExecutionException = Base | 0xe; + public static final long VnetKey = Base | 0xf; + public static final long InsufficientServerCapacityException = Base | 0x10; + public static final long InsufficientAddressCapacityException = Base | 0x11; + public static final long ManagementServerException = Base | 0x12; + public static final long HAStateException = Base | 0x13; + public static final long InsufficientStorageCapacityException = Base | 0x14; + public static final long InsufficientCapacityException = Base | 0x15; + public static final long ConcurrentOperationException = Base | 0x16; + public static final long AgentUnavailableException = Base | 0x17; + public static final long OperationTimedoutException = Base | 0x18; + public static final long StorageUnavailableException = Base | 0x19; + public static final long InfficientVirtualNetworkCapacityException = Base | 0x1a; + public static final long DiscoveryException = Base | 0x1b; + public static final long ConflictingNetworkSettingException = Base | 0x1c; + public static final long CloudAuthenticationException = Base | 0x1d; + public static final long AsyncCommandQueued = Base | 0x1e; + public static final long ResourceUnavailableException = Base | 0x1f; + public static final long ConnectionException = Base | 0x20; + public static final long PermissionDeniedException = Base | 0x21; + public static final long sshException = Base | 0x22; + public static final long HttpCallException = Base | 0x23; + public static final long VirtualMachineMigrationException = Base | 0x24; + public static final long DiscoveredWithErrorException = Base | 0x25; + public static final long NoTransitionException = Base | 0x26; + public static final long CloudExecutionException = Base | 0x27; + public static final long CallFailedException = Base | 0x28; + public static final long UnableDeleteHostException = Base | 0x29; + public static final long AffinityConflictException = Base | 0x2a; + public static final long JobCancellationException = Base | 0x2b; +}