TheNeuralBit commented on issue #23599:
URL: https://github.com/apache/beam/issues/23599#issuecomment-1278264933

   Ah actually the ipykernel version conflict is non-blocking, apache-beam is 
still installed (we should still address this though since it's disconcerting, 
thanks for #23599).
   
   The other, protobuf, issue is a hard blocker though. Here is the full 
stacktrace:
   ```
   
[/usr/local/lib/python3.7/dist-packages/apache_beam/runners/interactive/interactive_beam.py](https://localhost:8080/#)
 in <module>
        49 from apache_beam.options.pipeline_options import FlinkRunnerOptions
        50 from apache_beam.runners.interactive import interactive_environment 
as ie
   ---> 51 from 
apache_beam.runners.interactive.dataproc.dataproc_cluster_manager import 
DataprocClusterManager
        52 from apache_beam.runners.interactive.dataproc.types import 
ClusterIdentifier
        53 from apache_beam.runners.interactive.dataproc.types import 
ClusterMetadata
   
   
[/usr/local/lib/python3.7/dist-packages/apache_beam/runners/interactive/dataproc/dataproc_cluster_manager.py](https://localhost:8080/#)
 in <module>
        30 
        31 try:
   ---> 32   from google.cloud import dataproc_v1
        33   from apache_beam.io.gcp import gcsfilesystem  #pylint: 
disable=ungrouped-imports
        34 except ImportError:
   
   
[/usr/local/lib/python3.7/dist-packages/google/cloud/dataproc_v1/__init__.py](https://localhost:8080/#)
 in <module>
        15 #
        16 
   ---> 17 from .services.autoscaling_policy_service import 
AutoscalingPolicyServiceClient
        18 from .services.autoscaling_policy_service import 
AutoscalingPolicyServiceAsyncClient
        19 from .services.batch_controller import BatchControllerClient
   
   
[/usr/local/lib/python3.7/dist-packages/google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py](https://localhost:8080/#)
 in <module>
        14 # limitations under the License.
        15 #
   ---> 16 from .client import AutoscalingPolicyServiceClient
        17 from .async_client import AutoscalingPolicyServiceAsyncClient
        18 
   
   
[/usr/local/lib/python3.7/dist-packages/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py](https://localhost:8080/#)
 in <module>
        35 from google.cloud.dataproc_v1.services.autoscaling_policy_service 
import pagers
        36 from google.cloud.dataproc_v1.types import autoscaling_policies
   ---> 37 from .transports.base import AutoscalingPolicyServiceTransport, 
DEFAULT_CLIENT_INFO
        38 from .transports.grpc import AutoscalingPolicyServiceGrpcTransport
        39 from .transports.grpc_asyncio import 
AutoscalingPolicyServiceGrpcAsyncIOTransport
   
   
[/usr/local/lib/python3.7/dist-packages/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py](https://localhost:8080/#)
 in <module>
        17 from typing import Dict, Type
        18 
   ---> 19 from .base import AutoscalingPolicyServiceTransport
        20 from .grpc import AutoscalingPolicyServiceGrpcTransport
        21 from .grpc_asyncio import 
AutoscalingPolicyServiceGrpcAsyncIOTransport
   
   
[/usr/local/lib/python3.7/dist-packages/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py](https://localhost:8080/#)
 in <module>
        31 try:
        32     DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
   ---> 33         
gapic_version=pkg_resources.get_distribution("google-cloud-dataproc",).version,
        34     )
        35 except pkg_resources.DistributionNotFound:
   
   
[/usr/local/lib/python3.7/dist-packages/pkg_resources/__init__.py](https://localhost:8080/#)
 in get_distribution(dist)
       464         dist = Requirement.parse(dist)
       465     if isinstance(dist, Requirement):
   --> 466         dist = get_provider(dist)
       467     if not isinstance(dist, Distribution):
       468         raise TypeError("Expected string, Requirement, or 
Distribution", dist)
   
   
[/usr/local/lib/python3.7/dist-packages/pkg_resources/__init__.py](https://localhost:8080/#)
 in get_provider(moduleOrReq)
       340     """Return an IResourceProvider for the named module or 
requirement"""
       341     if isinstance(moduleOrReq, Requirement):
   --> 342         return working_set.find(moduleOrReq) or 
require(str(moduleOrReq))[0]
       343     try:
       344         module = sys.modules[moduleOrReq]
   
   
[/usr/local/lib/python3.7/dist-packages/pkg_resources/__init__.py](https://localhost:8080/#)
 in require(self, *requirements)
       884         included, even if they were already activated in this 
working set.
       885         """
   --> 886         needed = self.resolve(parse_requirements(requirements))
       887 
       888         for dist in needed:
   
   
[/usr/local/lib/python3.7/dist-packages/pkg_resources/__init__.py](https://localhost:8080/#)
 in resolve(self, requirements, env, installer, replace_conflicting, extras)
       775                 # Oops, the "best" so far conflicts with a dependency
       776                 dependent_req = required_by[req]
   --> 777                 raise VersionConflict(dist, 
req).with_context(dependent_req)
       778 
       779             # push the new requirements onto the stack
   
   ContextualVersionConflict: (protobuf 3.17.3 
(/usr/local/lib/python3.7/dist-packages), 
Requirement.parse('protobuf<5.0.0dev,>=3.19.0'), {'proto-plus'})
   ```
   
   It looks like this is happening when importing `DataprocClusterManager`.


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to