Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/7943#discussion_r36773859
  
    --- Diff: 
network/shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
 ---
    @@ -252,4 +338,118 @@ public String toString() {
             .toString();
         }
       }
    +
    +  static ObjectMapper mapper = new ObjectMapper();
    +
    +  private static byte[] dbAppExecKey(AppExecId appExecId) throws 
IOException {
    +    // we stick a common prefix on all the keys so we can find them in the 
DB
    +    String appExecJson = mapper.writeValueAsString(appExecId);
    +    String key = (APP_KEY_PREFIX + ";" + appExecJson);
    +    return key.getBytes(Charsets.UTF_8);
    +  }
    +
    +  private static AppExecId parseDbAppExecKey(String s) throws IOException {
    +    int p = s.indexOf(';');
    +    String json = s.substring(p + 1);
    +    AppExecId parsed = mapper.readValue(json, AppExecId.class);
    +    return parsed;
    +  }
    +
    +  private static final String APP_KEY_PREFIX = "AppExecShuffleInfo";
    +
    +  @VisibleForTesting
    +  static ConcurrentMap<AppExecId, ExecutorShuffleInfo> 
reloadRegisteredExecutors(DB db)
    +      throws IOException {
    +    ConcurrentMap<AppExecId, ExecutorShuffleInfo> registeredExecutors = 
Maps.newConcurrentMap();
    +    if (db != null) {
    +      DBIterator itr = db.iterator();
    +      itr.seek(APP_KEY_PREFIX.getBytes(Charsets.UTF_8));
    +      while (itr.hasNext()) {
    +        Map.Entry<byte[], byte[]> e = itr.next();
    +        String key = new String(e.getKey(), Charsets.UTF_8);
    +        if (!key.startsWith(APP_KEY_PREFIX))
    +          break;
    +        AppExecId id = parseDbAppExecKey(key);
    +        ExecutorShuffleInfo shuffleInfo =
    +          mapper.readValue(new String(e.getValue(), Charsets.UTF_8), 
ExecutorShuffleInfo.class);
    +        registeredExecutors.put(id, shuffleInfo);
    +      }
    +    }
    +    return registeredExecutors;
    +  }
    +
    +  private static class LevelDBLogger implements org.iq80.leveldb.Logger {
    +    private static final Logger LOG = 
LoggerFactory.getLogger(LevelDBLogger.class);
    +
    +    @Override
    +    public void log(String message) {
    +      LOG.info(message);
    +    }
    +  }
    +
    +  private static final StoreVersion CURRENT_VERSION = new 
StoreVersion(1,0);
    +  private static void checkVersion(DB db) throws IOException {
    +    byte[] bytes = db.get(StoreVersion.KEY);
    +    if (bytes == null) {
    +      storeVersion(db);
    +    } else if (bytes.length != 8) {
    +      throw new IOException("unexpected version format");
    +    } else {
    +      DataInputStream in = new DataInputStream(new 
ByteArrayInputStream(bytes));
    +      int major = in.readInt();
    --- End diff --
    
    Any reason not to use JSON for this too?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to