sammccall created this revision.
sammccall added reviewers: kadircet, usaxena95.
Herald added subscribers: llvm-commits, cfe-commits, jfb, arphaman, jkorous, 
MaskRay, ilya-biryukov.
Herald added projects: clang, LLVM.
sammccall updated this revision to Diff 239652.
sammccall added a comment.
sammccall updated this revision to Diff 239653.

Revert changes to VSCode client. This experimental version of the VSCode libs
is fairly new and some corp mirrors we care about are behind ;-)

also clang-format


sammccall added a comment.

revert accidental change


It simply shows the completed/total items on the background queue, e.g.
 indexing: 233/1000
The denominator is reset to zero every time the queue goes idle.

The protocol is fairly complicated here (requires creating a remote "progress"
resource before sending updates). We implement the full protocol, but I've added
an extension allowing it to be skipped to reduce the burden on clients - in
particular the lit test takes this shortcut.

The addition of background index progress to DiagnosticConsumer seems ridiculous
at first glance, but I believe that interface is trending in the direction of
"ClangdServer callbacks" anyway. It's due for a rename, but otherwise actually
fits.


Repository:
  rG LLVM Github Monorepo

https://reviews.llvm.org/D73218

Files:
  clang-tools-extra/clangd/ClangdLSPServer.cpp
  clang-tools-extra/clangd/ClangdLSPServer.h
  clang-tools-extra/clangd/ClangdServer.cpp
  clang-tools-extra/clangd/ClangdServer.h
  clang-tools-extra/clangd/Protocol.cpp
  clang-tools-extra/clangd/Protocol.h
  clang-tools-extra/clangd/index/Background.cpp
  clang-tools-extra/clangd/index/Background.h
  clang-tools-extra/clangd/index/BackgroundQueue.cpp
  clang-tools-extra/clangd/test/Inputs/background-index/definition.jsonrpc
  clang-tools-extra/clangd/test/background-index.test
  clang-tools-extra/clangd/unittests/BackgroundIndexTests.cpp
  llvm/include/llvm/Support/JSON.h

Index: llvm/include/llvm/Support/JSON.h
===================================================================
--- llvm/include/llvm/Support/JSON.h
+++ llvm/include/llvm/Support/JSON.h
@@ -598,6 +598,13 @@
   }
   return false;
 }
+inline bool fromJSON(const Value &E, std::nullptr_t &Out) {
+  if (auto S = E.getAsNull()) {
+    Out = *S;
+    return true;
+  }
+  return false;
+}
 template <typename T> bool fromJSON(const Value &E, llvm::Optional<T> &Out) {
   if (E.getAsNull()) {
     Out = llvm::None;
Index: clang-tools-extra/clangd/unittests/BackgroundIndexTests.cpp
===================================================================
--- clang-tools-extra/clangd/unittests/BackgroundIndexTests.cpp
+++ clang-tools-extra/clangd/unittests/BackgroundIndexTests.cpp
@@ -711,5 +711,53 @@
   }
 }
 
+TEST(BackgroundQueueTest, Progress) {
+  using testing::AnyOf;
+  BackgroundQueue::Stats S;
+  BackgroundQueue Q([&](BackgroundQueue::Stats New) {
+    // Verify values are sane.
+    // Items are enqueued one at a time (at least in this test).
+    EXPECT_THAT(New.Enqueued, AnyOf(S.Enqueued, S.Enqueued + 1));
+    // Items are completed one at a time.
+    EXPECT_THAT(New.Completed, AnyOf(S.Completed, S.Completed + 1));
+    // Items are started or completed one at a time.
+    EXPECT_THAT(New.Active, AnyOf(S.Active - 1, S.Active, S.Active + 1));
+    // Idle point only advances in time.
+    EXPECT_GE(New.LastIdle, S.LastIdle);
+    // Idle point is a task that has been completed in the past.
+    EXPECT_LE(New.LastIdle, New.Completed);
+    // LastIdle is now only if we're really idle.
+    EXPECT_EQ(New.LastIdle == New.Enqueued,
+              New.Completed == New.Enqueued && New.Active == 0u);
+    S = New;
+  });
+
+  // Two types of tasks: a ping task enqueues a pong task.
+  // This avoids all enqueues followed by all completions (boring!)
+  std::atomic<int> PingCount(0), PongCount(0);
+  BackgroundQueue::Task Pong([&] { ++PongCount; });
+  BackgroundQueue::Task Ping([&] {
+    ++PingCount;
+    Q.push(Pong);
+  });
+
+  for (int I = 0; I < 1000; ++I)
+    Q.push(Ping);
+  // Spin up some workers and stop while idle.
+  AsyncTaskRunner ThreadPool;
+  for (unsigned I = 0; I < 5; ++I)
+    ThreadPool.runAsync("worker", [&] { Q.work([&] { Q.stop(); }); });
+  ThreadPool.wait();
+
+  // Everything's done, check final stats.
+  // Assertions above ensure we got from 0 to 2000 in a reasonable way.
+  EXPECT_EQ(PingCount.load(), 1000);
+  EXPECT_EQ(PongCount.load(), 1000);
+  EXPECT_EQ(S.Active, 0u);
+  EXPECT_EQ(S.Enqueued, 2000u);
+  EXPECT_EQ(S.Completed, 2000u);
+  EXPECT_EQ(S.LastIdle, 2000u);
+}
+
 } // namespace clangd
 } // namespace clang
Index: clang-tools-extra/clangd/test/background-index.test
===================================================================
--- clang-tools-extra/clangd/test/background-index.test
+++ clang-tools-extra/clangd/test/background-index.test
@@ -11,7 +11,8 @@
 # We're editing bar.cpp, which includes foo.h.
 # foo() is declared in foo.h and defined in foo.cpp.
 # The background index should allow us to go-to-definition on foo().
-# RUN: clangd -background-index -lit-test < %t/definition.jsonrpc | FileCheck %t/definition.jsonrpc
+# We should also see indexing progress notifications.
+# RUN: clangd -background-index -lit-test < %t/definition.jsonrpc | FileCheck %t/definition.jsonrpc --check-prefixes=CHECK,BUILD
 
 # Test that the index is writing files in the expected location.
 # RUN: ls %t/.clangd/index/foo.cpp.*.idx
@@ -19,4 +20,4 @@
 
 # Test the index is read from disk: delete code and restart clangd.
 # RUN: rm %t/foo.cpp
-# RUN: clangd -background-index -lit-test < %t/definition.jsonrpc | FileCheck %t/definition.jsonrpc
+# RUN: clangd -background-index -lit-test < %t/definition.jsonrpc | FileCheck %t/definition.jsonrpc --check-prefixes=CHECK,USE
Index: clang-tools-extra/clangd/test/Inputs/background-index/definition.jsonrpc
===================================================================
--- clang-tools-extra/clangd/test/Inputs/background-index/definition.jsonrpc
+++ clang-tools-extra/clangd/test/Inputs/background-index/definition.jsonrpc
@@ -5,7 +5,7 @@
   "params": {
     "processId": 123,
     "rootPath": "clangd",
-    "capabilities": {},
+    "capabilities": { "window": { "workDoneProgress": true, "implicitWorkDoneProgressCreate": true} },
     "trace": "off"
   }
 }
@@ -22,6 +22,31 @@
     }
   }
 }
+# BUILD:      "method": "$/progress"
+# BUILD-NEXT: "params": {
+# BUILD-NEXT:   "token": "backgroundIndexProgress",
+# BUILD-NEXT:   "value": {
+# BUILD-NEXT:     "kind": "begin",
+# BUILD-NEXT:     "percentage": 0,
+# BUILD-NEXT:     "title": "indexing"
+# BUILD-NEXT:   }
+# BUILD-NEXT: }
+# BUILD:      "method": "$/progress"
+# BUILD-NEXT: "params": {
+# BUILD-NEXT:   "token": "backgroundIndexProgress",
+# BUILD-NEXT:   "value": {
+# BUILD-NEXT:     "kind": "report",
+# BUILD-NEXT:     "message": "0/1",
+# BUILD-NEXT:     "percentage": 0
+# BUILD-NEXT:   }
+# BUILD-NEXT: }
+#    later...
+# BUILD:          "message": "1/2",
+# BUILD-NEXT:     "percentage": 50
+#    finally...
+# BUILD:          "kind": "end"
+# when using the index, we spawn one task to read it, but not more.
+# USE-NOT:        "message": "1/2",
 ---
 {
   "jsonrpc": "2.0",
Index: clang-tools-extra/clangd/index/BackgroundQueue.cpp
===================================================================
--- clang-tools-extra/clangd/index/BackgroundQueue.cpp
+++ clang-tools-extra/clangd/index/BackgroundQueue.cpp
@@ -6,6 +6,7 @@
 //
 //===----------------------------------------------------------------------===//
 
+#include "Logger.h"
 #include "index/Background.h"
 
 namespace clang {
@@ -28,10 +29,11 @@
         CV.notify_all();
         return;
       }
-      ++NumActiveTasks;
+      ++Stat.Active;
       std::pop_heap(Queue.begin(), Queue.end());
       Task = std::move(Queue.back());
       Queue.pop_back();
+      notifyProgress();
     }
 
     if (Task->ThreadPri != llvm::ThreadPriority::Default &&
@@ -43,14 +45,20 @@
 
     {
       std::unique_lock<std::mutex> Lock(Mu);
-      if (NumActiveTasks == 1 && Queue.empty() && OnIdle) {
+      ++Stat.Completed;
+      if (Stat.Active == 1 && Queue.empty()) {
         // We just finished the last item, the queue is going idle.
-        Lock.unlock();
-        OnIdle();
-        Lock.lock();
+        assert(ShouldStop || Stat.Completed == Stat.Enqueued);
+        Stat.LastIdle = Stat.Completed;
+        if (OnIdle) {
+          Lock.unlock();
+          OnIdle();
+          Lock.lock();
+        }
       }
-      assert(NumActiveTasks > 0 && "before decrementing");
-      --NumActiveTasks;
+      assert(Stat.Active > 0 && "before decrementing");
+      --Stat.Active;
+      notifyProgress();
     }
     CV.notify_all();
   }
@@ -70,6 +78,8 @@
     T.QueuePri = std::max(T.QueuePri, Boosts.lookup(T.Tag));
     Queue.push_back(std::move(T));
     std::push_heap(Queue.begin(), Queue.end());
+    ++Stat.Enqueued;
+    notifyProgress();
   }
   CV.notify_all();
 }
@@ -81,6 +91,8 @@
       T.QueuePri = std::max(T.QueuePri, Boosts.lookup(T.Tag));
     std::move(Tasks.begin(), Tasks.end(), std::back_inserter(Queue));
     std::make_heap(Queue.begin(), Queue.end());
+    Stat.Enqueued += Tasks.size();
+    notifyProgress();
   }
   CV.notify_all();
 }
@@ -108,7 +120,14 @@
     llvm::Optional<double> TimeoutSeconds) {
   std::unique_lock<std::mutex> Lock(Mu);
   return wait(Lock, CV, timeoutSeconds(TimeoutSeconds),
-              [&] { return Queue.empty() && NumActiveTasks == 0; });
+              [&] { return Queue.empty() && Stat.Active == 0; });
+}
+
+void BackgroundQueue::notifyProgress() const {
+  dlog("Queue: {0}/{1} ({2} active). Last idle at {3}", Stat.Completed,
+       Stat.Enqueued, Stat.Active, Stat.LastIdle);
+  if (OnProgress)
+    OnProgress(Stat);
 }
 
 } // namespace clangd
Index: clang-tools-extra/clangd/index/Background.h
===================================================================
--- clang-tools-extra/clangd/index/Background.h
+++ clang-tools-extra/clangd/index/Background.h
@@ -78,6 +78,17 @@
     bool operator<(const Task &O) const { return QueuePri < O.QueuePri; }
   };
 
+  // Describes the number of tasks processed by the queue.
+  struct Stats {
+    unsigned Enqueued = 0;  // Total number of tasks ever enqueued.
+    unsigned Active = 0;    // Tasks being currently processed by a worker.
+    unsigned Completed = 0; // Tasks that have been finished.
+    unsigned LastIdle = 0;  // Number of completed tasks when last empty.
+  };
+
+  BackgroundQueue(std::function<void(Stats)> OnProgress = nullptr)
+      : OnProgress(OnProgress) {}
+
   // Add tasks to the queue.
   void push(Task);
   void append(std::vector<Task>);
@@ -100,12 +111,15 @@
   blockUntilIdleForTest(llvm::Optional<double> TimeoutSeconds);
 
 private:
+  void notifyProgress() const; // Requires lock Mu
+
   std::mutex Mu;
-  unsigned NumActiveTasks = 0; // Only idle when queue is empty *and* no tasks.
+  Stats Stat;
   std::condition_variable CV;
   bool ShouldStop = false;
   std::vector<Task> Queue; // max-heap
   llvm::StringMap<unsigned> Boosts;
+  std::function<void(Stats)> OnProgress;
 };
 
 // Builds an in-memory index by by running the static indexer action over
@@ -121,7 +135,8 @@
       Context BackgroundContext, const FileSystemProvider &,
       const GlobalCompilationDatabase &CDB,
       BackgroundIndexStorage::Factory IndexStorageFactory,
-      size_t ThreadPoolSize = llvm::heavyweight_hardware_concurrency());
+      size_t ThreadPoolSize = llvm::heavyweight_hardware_concurrency(),
+      std::function<void(BackgroundQueue::Stats)> OnProgress = nullptr);
   ~BackgroundIndex(); // Blocks while the current task finishes.
 
   // Enqueue translation units for indexing.
Index: clang-tools-extra/clangd/index/Background.cpp
===================================================================
--- clang-tools-extra/clangd/index/Background.cpp
+++ clang-tools-extra/clangd/index/Background.cpp
@@ -137,11 +137,13 @@
 BackgroundIndex::BackgroundIndex(
     Context BackgroundContext, const FileSystemProvider &FSProvider,
     const GlobalCompilationDatabase &CDB,
-    BackgroundIndexStorage::Factory IndexStorageFactory, size_t ThreadPoolSize)
-    : SwapIndex(std::make_unique<MemIndex>()), FSProvider(FSProvider),
-      CDB(CDB), BackgroundContext(std::move(BackgroundContext)),
+    BackgroundIndexStorage::Factory IndexStorageFactory, size_t ThreadPoolSize,
+    std::function<void(BackgroundQueue::Stats)> OnProgress)
+    : SwapIndex(std::make_unique<MemIndex>()), FSProvider(FSProvider), CDB(CDB),
+      BackgroundContext(std::move(BackgroundContext)),
       Rebuilder(this, &IndexedSymbols, ThreadPoolSize),
       IndexStorageFactory(std::move(IndexStorageFactory)),
+      Queue(std::move(OnProgress)),
       CommandsChanged(
           CDB.watch([&](const std::vector<std::string> &ChangedFiles) {
             enqueue(ChangedFiles);
Index: clang-tools-extra/clangd/Protocol.h
===================================================================
--- clang-tools-extra/clangd/Protocol.h
+++ clang-tools-extra/clangd/Protocol.h
@@ -427,6 +427,16 @@
   /// The client supports testing for validity of rename operations
   /// before execution.
   bool RenamePrepareSupport = false;
+
+  /// The client supports progress notifications.
+  /// window.workDoneProgress
+  bool WorkDoneProgress = false;
+
+  /// The client supports implicit $/progress work-done progress streams,
+  /// without a preceding window/workDoneProgress/create.
+  /// This is a clangd extension.
+  /// window.implicitWorkDoneProgressCreate
+  bool ImplicitProgressCreation = false;
 };
 bool fromJSON(const llvm::json::Value &, ClientCapabilities &);
 
@@ -499,6 +509,89 @@
 };
 bool fromJSON(const llvm::json::Value &, InitializeParams &);
 
+struct WorkDoneProgressCreateParams {
+  /// The token to be used to report progress.
+  llvm::json::Value token = nullptr;
+};
+llvm::json::Value toJSON(const WorkDoneProgressCreateParams &P);
+
+template <typename T> struct ProgressParams {
+  /// The progress token provided by the client or server.
+  llvm::json::Value token = nullptr;
+
+  /// The progress data.
+  T value;
+};
+template <typename T> llvm::json::Value toJSON(const ProgressParams<T> &P) {
+  return llvm::json::Object{{"token", P.token}, {"value", P.value}};
+}
+/// To start progress reporting a $/progress notification with the following
+/// payload must be sent.
+struct WorkDoneProgressBegin {
+  /// Mandatory title of the progress operation. Used to briefly inform about
+  /// the kind of operation being performed.
+  ///
+  /// Examples: "Indexing" or "Linking dependencies".
+  std::string title;
+
+  /// Controls if a cancel button should show to allow the user to cancel the
+  /// long-running operation. Clients that don't support cancellation are
+  /// allowed to ignore the setting.
+  bool cancellable = false;
+
+  /// Optional progress percentage to display (value 100 is considered 100%).
+  /// If not provided infinite progress is assumed and clients are allowed
+  /// to ignore the `percentage` value in subsequent in report notifications.
+  ///
+  /// The value should be steadily rising. Clients are free to ignore values
+  /// that are not following this rule.
+  ///
+  /// Clangd implementation note: we only send nonzero percentages in
+  /// the WorkProgressReport. 'true' here means percentages will be used.
+  bool percentage = false;
+};
+llvm::json::Value toJSON(const WorkDoneProgressBegin &);
+
+/// Reporting progress is done using the following payload.
+struct WorkDoneProgressReport {
+  /// Mandatory title of the progress operation. Used to briefly inform about
+  /// the kind of operation being performed.
+  ///
+  /// Examples: "Indexing" or "Linking dependencies".
+  std::string title;
+
+  /// Controls enablement state of a cancel button. This property is only valid
+  /// if a cancel button got requested in the `WorkDoneProgressStart` payload.
+  ///
+  /// Clients that don't support cancellation or don't support control
+  /// the button's enablement state are allowed to ignore the setting.
+  llvm::Optional<bool> cancellable;
+
+  /// Optional, more detailed associated progress message. Contains
+  /// complementary information to the `title`.
+  ///
+  /// Examples: "3/25 files", "project/src/module2", "node_modules/some_dep".
+  /// If unset, the previous progress message (if any) is still valid.
+  llvm::Optional<std::string> message;
+
+  /// Optional progress percentage to display (value 100 is considered 100%).
+  /// If not provided infinite progress is assumed and clients are allowed
+  /// to ignore the `percentage` value in subsequent in report notifications.
+  ///
+  /// The value should be steadily rising. Clients are free to ignore values
+  /// that are not following this rule.
+  llvm::Optional<double> percentage;
+};
+llvm::json::Value toJSON(const WorkDoneProgressReport &);
+//
+/// Signals the end of progress reporting.
+struct WorkDoneProgressEnd {
+  /// Optional, a final message indicating to for example indicate the outcome
+  /// of the operation.
+  llvm::Optional<std::string> message;
+};
+llvm::json::Value toJSON(const WorkDoneProgressEnd &);
+
 enum class MessageType {
   /// An error message.
   Error = 1,
Index: clang-tools-extra/clangd/Protocol.cpp
===================================================================
--- clang-tools-extra/clangd/Protocol.cpp
+++ clang-tools-extra/clangd/Protocol.cpp
@@ -347,6 +347,12 @@
       }
     }
   }
+  if (auto *Window = O->getObject("window")) {
+    if (auto WorkDoneProgress = Window->getBoolean("workDoneProgress"))
+      R.WorkDoneProgress = *WorkDoneProgress;
+    if (auto Implicit = Window->getBoolean("implicitWorkDoneProgressCreate"))
+      R.ImplicitProgressCreation = *Implicit;
+  }
   if (auto *OffsetEncoding = O->get("offsetEncoding")) {
     R.offsetEncoding.emplace();
     if (!fromJSON(*OffsetEncoding, *R.offsetEncoding))
@@ -370,6 +376,40 @@
   return true;
 }
 
+llvm::json::Value toJSON(const WorkDoneProgressCreateParams &P) {
+  return llvm::json::Object{{"token", P.token}};
+}
+
+llvm::json::Value toJSON(const WorkDoneProgressBegin &P) {
+  llvm::json::Object Result{
+      {"kind", "begin"},
+      {"title", P.title},
+  };
+  if (P.cancellable)
+    Result["cancellable"] = true;
+  if (P.percentage)
+    Result["percentage"] = 0;
+  return Result;
+}
+
+llvm::json::Value toJSON(const WorkDoneProgressReport &P) {
+  llvm::json::Object Result{{"kind", "report"}};
+  if (P.cancellable)
+    Result["cancellable"] = *P.cancellable;
+  if (P.message)
+    Result["message"] = *P.message;
+  if (P.percentage)
+    Result["percentage"] = *P.percentage;
+  return Result;
+}
+
+llvm::json::Value toJSON(const WorkDoneProgressEnd &P) {
+  llvm::json::Object Result{{"kind", "end"}};
+  if (P.message)
+    Result["message"] = *P.message;
+  return Result;
+}
+
 llvm::json::Value toJSON(const MessageType &R) {
   return static_cast<int64_t>(R);
 }
Index: clang-tools-extra/clangd/ClangdServer.h
===================================================================
--- clang-tools-extra/clangd/ClangdServer.h
+++ clang-tools-extra/clangd/ClangdServer.h
@@ -56,6 +56,9 @@
   virtual void
   onHighlightingsReady(PathRef File,
                        std::vector<HighlightingToken> Highlightings) {}
+
+  // Called when background indexing tasks are enqueued, started, or completed.
+  virtual void onBackgroundIndexProgress(const BackgroundQueue::Stats &Stats) {}
 };
 
 /// When set, used by ClangdServer to get clang-tidy options for each particular
Index: clang-tools-extra/clangd/ClangdServer.cpp
===================================================================
--- clang-tools-extra/clangd/ClangdServer.cpp
+++ clang-tools-extra/clangd/ClangdServer.cpp
@@ -147,7 +147,10 @@
         Context::current().clone(), FSProvider, CDB,
         BackgroundIndexStorage::createDiskBackedStorageFactory(
             [&CDB](llvm::StringRef File) { return CDB.getProjectInfo(File); }),
-        std::max(Opts.AsyncThreadsCount, 1u));
+        std::max(Opts.AsyncThreadsCount, 1u),
+        [&DiagConsumer](BackgroundQueue::Stats S) {
+          DiagConsumer.onBackgroundIndexProgress(S);
+        });
     AddIndex(BackgroundIdx.get());
   }
   if (DynamicIdx)
Index: clang-tools-extra/clangd/ClangdLSPServer.h
===================================================================
--- clang-tools-extra/clangd/ClangdLSPServer.h
+++ clang-tools-extra/clangd/ClangdLSPServer.h
@@ -60,6 +60,7 @@
   void
   onHighlightingsReady(PathRef File,
                        std::vector<HighlightingToken> Highlightings) override;
+  void onBackgroundIndexProgress(const BackgroundQueue::Stats &Stats) override;
 
   // LSP methods. Notifications have signature void(const Params&).
   // Calls have signature void(const Params&, Callback<Response>).
@@ -185,6 +186,12 @@
   void callRaw(StringRef Method, llvm::json::Value Params,
                Callback<llvm::json::Value> CB);
   void notify(StringRef Method, llvm::json::Value Params);
+  template <typename T> void progress(const llvm::json::Value &Token, T Value) {
+    ProgressParams<T> Params;
+    Params.token = Token;
+    Params.value = std::move(Value);
+    notify("$/progress", Params);
+  }
 
   const FileSystemProvider &FSProvider;
   /// Options used for code completion
@@ -205,6 +212,24 @@
   MarkupKind HoverContentFormat = MarkupKind::PlainText;
   /// Whether the client supports offsets for parameter info labels.
   bool SupportsOffsetsInSignatureHelp = false;
+  std::mutex BackgroundIndexProgressMutex;
+  enum class BackgroundIndexProgress {
+    // Client doesn't support reporting progress. No transitions possible.
+    Unsupported,
+    // The queue is idle, and the client has no progress bar.
+    // Can transition to Creating when we have some activity.
+    Empty,
+    // We've requested the client to create a progress bar.
+    // Meanwhile, the state is buffered in PendingBackgraundIndexProgress.
+    Creating,
+    // The client has a progress bar, and we can send it updates immediately.
+    Live,
+  } BackgroundIndexProgressState = BackgroundIndexProgress::Unsupported;
+  // The progress to send when the progress bar is created.
+  // Only valid in state Creating.
+  BackgroundQueue::Stats PendingBackgroundIndexProgress;
+  /// LSP extension: skip WorkDoneProgressCreate, just send progress streams.
+  bool BackgroundIndexSkipCreate;
   // Store of the current versions of the open documents.
   DraftStore DraftMgr;
 
Index: clang-tools-extra/clangd/ClangdLSPServer.cpp
===================================================================
--- clang-tools-extra/clangd/ClangdLSPServer.cpp
+++ clang-tools-extra/clangd/ClangdLSPServer.cpp
@@ -33,6 +33,7 @@
 #include "llvm/Support/ScopedPrinter.h"
 #include <cstddef>
 #include <memory>
+#include <mutex>
 #include <string>
 #include <vector>
 
@@ -522,6 +523,9 @@
   SupportFileStatus = Params.initializationOptions.FileStatus;
   HoverContentFormat = Params.capabilities.HoverContentFormat;
   SupportsOffsetsInSignatureHelp = Params.capabilities.OffsetsInSignatureHelp;
+  if (Params.capabilities.WorkDoneProgress)
+    BackgroundIndexProgressState = BackgroundIndexProgress::Empty;
+  BackgroundIndexSkipCreate = Params.capabilities.ImplicitProgressCreation;
 
   // Per LSP, renameProvider can be either boolean or RenameOptions.
   // RenameOptions will be specified if the client states it supports prepare.
@@ -1378,6 +1382,74 @@
   publishDiagnostics(URI, std::move(LSPDiagnostics));
 }
 
+void ClangdLSPServer::onBackgroundIndexProgress(
+    const BackgroundQueue::Stats &Stats) {
+  static const char ProgressToken[] = "backgroundIndexProgress";
+  std::lock_guard<std::mutex> Lock(BackgroundIndexProgressMutex);
+
+  auto NotifyProgress = [this](const BackgroundQueue::Stats &Stats) {
+    if (BackgroundIndexProgressState != BackgroundIndexProgress::Live) {
+      WorkDoneProgressBegin Begin;
+      Begin.percentage = true;
+      Begin.title = "indexing";
+      progress(ProgressToken, std::move(Begin));
+      BackgroundIndexProgressState = BackgroundIndexProgress::Live;
+    }
+
+    if (Stats.Completed < Stats.Enqueued) {
+      assert(Stats.Enqueued > Stats.LastIdle);
+      WorkDoneProgressReport Report;
+      Report.percentage = 100.0 * (Stats.Completed - Stats.LastIdle) /
+                          (Stats.Enqueued - Stats.LastIdle);
+      Report.message =
+          llvm::formatv("{0}/{1}", Stats.Completed - Stats.LastIdle,
+                        Stats.Enqueued - Stats.LastIdle);
+      progress(ProgressToken, std::move(Report));
+    } else {
+      assert(Stats.Completed == Stats.Enqueued);
+      progress(ProgressToken, WorkDoneProgressEnd());
+      BackgroundIndexProgressState = BackgroundIndexProgress::Empty;
+    }
+  };
+
+  switch (BackgroundIndexProgressState) {
+  case BackgroundIndexProgress::Unsupported:
+    return;
+  case BackgroundIndexProgress::Creating:
+    // Cache this update for when the progress bar is available.
+    PendingBackgroundIndexProgress = Stats;
+    return;
+  case BackgroundIndexProgress::Empty: {
+    if (BackgroundIndexSkipCreate) {
+      NotifyProgress(Stats);
+      break;
+    }
+    // Cache this update for when the progress bar is available.
+    PendingBackgroundIndexProgress = Stats;
+    BackgroundIndexProgressState = BackgroundIndexProgress::Creating;
+    WorkDoneProgressCreateParams CreateRequest;
+    CreateRequest.token = ProgressToken;
+    call<std::nullptr_t>(
+        "window/workDoneProgress/create", CreateRequest,
+        [this, NotifyProgress](llvm::Expected<std::nullptr_t> E) {
+          std::lock_guard<std::mutex> Lock(BackgroundIndexProgressMutex);
+          if (E) {
+            NotifyProgress(this->PendingBackgroundIndexProgress);
+          } else {
+            elog("Failed to create background index progress bar: {0}",
+                 E.takeError());
+            // give up forever rather than thrashing about
+            BackgroundIndexProgressState = BackgroundIndexProgress::Unsupported;
+          }
+        });
+    break;
+  }
+  case BackgroundIndexProgress::Live:
+    NotifyProgress(Stats);
+    break;
+  }
+}
+
 void ClangdLSPServer::onFileUpdated(PathRef File, const TUStatus &Status) {
   if (!SupportFileStatus)
     return;
_______________________________________________
cfe-commits mailing list
cfe-commits@lists.llvm.org
https://lists.llvm.org/cgi-bin/mailman/listinfo/cfe-commits

Reply via email to