[grpc-io] Re: SEGFAULT in greeter_client

2019-02-13 Thread 'Srini Polavarapu' via grpc.io
Bump the priority of the Github issue to P1. Let's track it over there.

On Wednesday, February 13, 2019 at 8:36:09 PM UTC-8, Gautham B A wrote:
>
> I had filed it long back - https://github.com/grpc/grpc/issues/17807 .
>
> On Thursday, 14 February 2019 00:06:05 UTC+5:30, Carl Mastrangelo wrote:
>>
>> Hi, can you file an issue on gRPC's GitHub issue tracker?  
>> https://github.com/grpc/grpc/issues/new
>>
>> On Thursday, January 24, 2019 at 7:37:05 AM UTC-8, Gautham B A wrote:
>>>
>>> Hi all,
>>>
>>> I just cloned and built gRPC 
>>> (SHA 9ed8734efb9b1b2cd892942c2c6dd57e903ce719). I'm getting SEGFAULT when I 
>>> try to run greeter_client in C++. It SEGFAULTs when the RPC call is made -
>>>
>>> Status status = stub_->SayHello(&context, request, &reply);
>>>
>>> Here's how I'm building greeter_client -
>>>
>>> cmake_minimum_required(VERSION 3.13)
>>> project(HelloWorld)
>>>
>>> set(CMAKE_CXX_STANDARD 17)
>>>
>>> set(GRPC_BUILD_DIR
>>> /Users/gautham/projects/github/grpc)
>>>
>>> set(LIB_GRPC
>>> ${GRPC_BUILD_DIR}/libs/opt/libgpr.dylib
>>> ${GRPC_BUILD_DIR}/libs/opt/libaddress_sorting.dylib
>>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++.dylib
>>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_cronet.dylib
>>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_error_details.dylib
>>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_reflection.dylib
>>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_unsecure.dylib
>>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc.dylib
>>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc_cronet.dylib
>>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc_unsecure.dylib
>>> ${GRPC_BUILD_DIR}/libs/opt/libgrpcpp_channelz.dylib
>>> )
>>>
>>> set(LIB_PROTOBUF
>>> 
>>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf-lite.17.dylib
>>> 
>>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf-lite.dylib
>>> 
>>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf.17.dylib
>>> 
>>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf.dylib
>>> 
>>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotoc.17.dylib
>>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotoc.dylib
>>> )
>>>
>>> include_directories(
>>> ${GRPC_BUILD_DIR}/include
>>> )
>>>
>>> add_executable(greeter_client
>>> greeter_client.cc
>>> helloworld.grpc.pb.cc
>>> helloworld.pb.cc
>>> )
>>>
>>> target_link_libraries(greeter_client
>>> ${LIB_GRPC}
>>> ${LIB_PROTOBUF}
>>> )
>>>
>>> Here's the coredump -
>>> * thread #1, stop reason = signal SIGSTOP
>>>   * frame #0: 0x7fffa253a19e libsystem_kernel.dylib`poll + 10
>>> frame #1: 0x00010e6c01a6 
>>> libgrpc.dylib`pollset_work(pollset=, 
>>> worker_hdl=0x7fff519dded8, deadline=) at 
>>> ev_poll_posix.cc:1063 [opt]
>>> frame #2: 0x00010e6e5999 
>>> libgrpc.dylib`cq_pluck(cq=0x7fad6240ae40, tag=0x7fff519de200, 
>>> deadline=, reserved=) at completion_queue.cc:1282 
>>> [opt]
>>> frame #3: 0x00010e22c4d1 
>>> greeter_client`grpc::CompletionQueue::Pluck(grpc::internal::CompletionQueueTag*)
>>>  
>>> + 161
>>> frame #4: 0x00010e22b810 
>>> greeter_client`grpc::internal::BlockingUnaryCallImpl>>  
>>> helloworld::HelloReply>::BlockingUnaryCallImpl(grpc::ChannelInterface*, 
>>> grpc::internal::RpcMethod const&, grpc::ClientContext*, 
>>> helloworld::HelloRequest const&, helloworld::HelloReply*) + 704
>>> frame #5: 0x00010e22b4ed 
>>> greeter_client`grpc::internal::BlockingUnaryCallImpl>>  
>>> helloworld::HelloReply>::BlockingUnaryCallImpl(grpc::ChannelInterface*, 
>>> grpc::internal::RpcMethod const&, grpc::ClientContext*, 
>>> helloworld::HelloRequest const&, helloworld::HelloReply*) + 61
>>> frame #6: 0x00010e228921 greeter_client`grpc::Status 
>>> grpc::internal::BlockingUnaryCall>> helloworld::HelloReply>(grpc::ChannelInterface*, grpc::internal::RpcMethod 
>>> const&, grpc::ClientContext*, helloworld::HelloRequest const&, 
>>> helloworld::HelloReply*) + 81
>>> frame #7: 0x00010e2288c5 
>>> greeter_client`helloworld::Greeter::Stub::SayHello(grpc::ClientContext*, 
>>> helloworld::HelloRequest const&, helloworld::HelloReply*) + 85
>>> frame #8: 0x00010e226ecb 
>>> greeter_client`GreeterClient::SayHello(std::__1::basic_string>> std::__1::char_traits, std::__1::allocator > const&) + 235
>>> frame #9: 0x00010e226c05 greeter_client`main + 469
>>> frame #10: 0x7fffa240a235 libdyld.dylib`start + 1
>>> frame #11: 0x7fffa240a235 libdyld.dylib`start + 1
>>>
>>> I'm using macOS Sierra 10.12.6
>>>
>>> Compiler -
>>> clang
>>> Apple LLVM version 9.0.0 (clang-900.0.39.2)
>>> Target: x86_64-apple-darwin16.7.0
>>> Thread model: posix
>>>
>>> Can anyone please help me?
>>>
>>> Thanks,
>>> --Gautham
>>>
>>>

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe fr

[grpc-io] Re: SEGFAULT in greeter_client

2019-02-13 Thread Gautham B A
I had filed it long back - https://github.com/grpc/grpc/issues/17807 .

On Thursday, 14 February 2019 00:06:05 UTC+5:30, Carl Mastrangelo wrote:
>
> Hi, can you file an issue on gRPC's GitHub issue tracker?  
> https://github.com/grpc/grpc/issues/new
>
> On Thursday, January 24, 2019 at 7:37:05 AM UTC-8, Gautham B A wrote:
>>
>> Hi all,
>>
>> I just cloned and built gRPC 
>> (SHA 9ed8734efb9b1b2cd892942c2c6dd57e903ce719). I'm getting SEGFAULT when I 
>> try to run greeter_client in C++. It SEGFAULTs when the RPC call is made -
>>
>> Status status = stub_->SayHello(&context, request, &reply);
>>
>> Here's how I'm building greeter_client -
>>
>> cmake_minimum_required(VERSION 3.13)
>> project(HelloWorld)
>>
>> set(CMAKE_CXX_STANDARD 17)
>>
>> set(GRPC_BUILD_DIR
>> /Users/gautham/projects/github/grpc)
>>
>> set(LIB_GRPC
>> ${GRPC_BUILD_DIR}/libs/opt/libgpr.dylib
>> ${GRPC_BUILD_DIR}/libs/opt/libaddress_sorting.dylib
>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++.dylib
>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_cronet.dylib
>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_error_details.dylib
>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_reflection.dylib
>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_unsecure.dylib
>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc.dylib
>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc_cronet.dylib
>> ${GRPC_BUILD_DIR}/libs/opt/libgrpc_unsecure.dylib
>> ${GRPC_BUILD_DIR}/libs/opt/libgrpcpp_channelz.dylib
>> )
>>
>> set(LIB_PROTOBUF
>> 
>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf-lite.17.dylib
>> 
>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf-lite.dylib
>> 
>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf.17.dylib
>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf.dylib
>> 
>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotoc.17.dylib
>> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotoc.dylib
>> )
>>
>> include_directories(
>> ${GRPC_BUILD_DIR}/include
>> )
>>
>> add_executable(greeter_client
>> greeter_client.cc
>> helloworld.grpc.pb.cc
>> helloworld.pb.cc
>> )
>>
>> target_link_libraries(greeter_client
>> ${LIB_GRPC}
>> ${LIB_PROTOBUF}
>> )
>>
>> Here's the coredump -
>> * thread #1, stop reason = signal SIGSTOP
>>   * frame #0: 0x7fffa253a19e libsystem_kernel.dylib`poll + 10
>> frame #1: 0x00010e6c01a6 
>> libgrpc.dylib`pollset_work(pollset=, 
>> worker_hdl=0x7fff519dded8, deadline=) at 
>> ev_poll_posix.cc:1063 [opt]
>> frame #2: 0x00010e6e5999 
>> libgrpc.dylib`cq_pluck(cq=0x7fad6240ae40, tag=0x7fff519de200, 
>> deadline=, reserved=) at completion_queue.cc:1282 
>> [opt]
>> frame #3: 0x00010e22c4d1 
>> greeter_client`grpc::CompletionQueue::Pluck(grpc::internal::CompletionQueueTag*)
>>  
>> + 161
>> frame #4: 0x00010e22b810 
>> greeter_client`grpc::internal::BlockingUnaryCallImpl>  
>> helloworld::HelloReply>::BlockingUnaryCallImpl(grpc::ChannelInterface*, 
>> grpc::internal::RpcMethod const&, grpc::ClientContext*, 
>> helloworld::HelloRequest const&, helloworld::HelloReply*) + 704
>> frame #5: 0x00010e22b4ed 
>> greeter_client`grpc::internal::BlockingUnaryCallImpl>  
>> helloworld::HelloReply>::BlockingUnaryCallImpl(grpc::ChannelInterface*, 
>> grpc::internal::RpcMethod const&, grpc::ClientContext*, 
>> helloworld::HelloRequest const&, helloworld::HelloReply*) + 61
>> frame #6: 0x00010e228921 greeter_client`grpc::Status 
>> grpc::internal::BlockingUnaryCall> helloworld::HelloReply>(grpc::ChannelInterface*, grpc::internal::RpcMethod 
>> const&, grpc::ClientContext*, helloworld::HelloRequest const&, 
>> helloworld::HelloReply*) + 81
>> frame #7: 0x00010e2288c5 
>> greeter_client`helloworld::Greeter::Stub::SayHello(grpc::ClientContext*, 
>> helloworld::HelloRequest const&, helloworld::HelloReply*) + 85
>> frame #8: 0x00010e226ecb 
>> greeter_client`GreeterClient::SayHello(std::__1::basic_string> std::__1::char_traits, std::__1::allocator > const&) + 235
>> frame #9: 0x00010e226c05 greeter_client`main + 469
>> frame #10: 0x7fffa240a235 libdyld.dylib`start + 1
>> frame #11: 0x7fffa240a235 libdyld.dylib`start + 1
>>
>> I'm using macOS Sierra 10.12.6
>>
>> Compiler -
>> clang
>> Apple LLVM version 9.0.0 (clang-900.0.39.2)
>> Target: x86_64-apple-darwin16.7.0
>> Thread model: posix
>>
>> Can anyone please help me?
>>
>> Thanks,
>> --Gautham
>>
>>

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to grpc-io+unsubscr...@googlegroups.com.
To post to this group, send email to grpc-io@googlegroups.com.
Visit this group at https://groups.google.com/group/grpc-io.
To view this discussion on the web visit 
https://

[grpc-io] Re: Is it possible to simultaneous accept JSON and protobuf payload?

2019-02-13 Thread 'Doug Fawley' via grpc.io
Similarly, you may also want to look at 
grpc-web: https://github.com/grpc/grpc-web.

Thanks,
Doug


On Monday, February 11, 2019 at 8:37:40 PM UTC-8, Danesh Kuruppu wrote:
>
> Hi Glen,
>
> Did you try grpc-gateway(https://github.com/grpc-ecosystem/grpc-gateway)? 
> This is not exactly what you need, but this generates reverse proxy server 
> which translates JSON to protobuf payload. So it serves in different port 
> but using same backend gRPC service.
>
> Thanks
> Danesh
>
> On Monday, February 11, 2019 at 5:37:51 PM UTC+5:30, Glen Huang wrote:
>>
>> I have a grpc server, and I'm wondering if it's possible to make 
>> it accept JSON and protobuf payload simultaneously? I want to use the 
>> server for both backend and frontend communications.
>>
>> By that I mean, if I have a proto definition like this:
>>
>> service Greeter {
>>   rpc SayHello (HelloRequest) returns (HelloReply) {}
>> }
>>
>> message HelloRequest {
>>   string name = 1;
>> }
>>
>> message HelloReply {
>>   string message = 1;
>> }
>>
>> and set up a grpc server to listen at port 1234 for example. Obviously I 
>> can use grpc client to talk to it via port 1234. But Is it possible that I 
>> can send an http request with a particular JSON payload and path to the 
>> same port, that can call the SayHello on the server?
>>
>> I heard it's pretty expansive to encode and decode protobuf with 
>> javascript, so I hope I could keep using JSON and make grpc accept it 
>> automatically.
>>
>> Is that something already feasible?
>>
>> I use Golang btw.
>>
>> Thanks in advance.
>>
>

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to grpc-io+unsubscr...@googlegroups.com.
To post to this group, send email to grpc-io@googlegroups.com.
Visit this group at https://groups.google.com/group/grpc-io.
To view this discussion on the web visit 
https://groups.google.com/d/msgid/grpc-io/4e3f29ab-3869-4c2f-bc7d-31fc57acd676%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.


Re: [grpc-io] Re: Configure Python gRPC Server for Compression

2019-02-13 Thread rbellevi via grpc.io
In the interim, if you're not tied to Python specifically, you could use a 
different implementation of gRPC that fully supports compression like C++ 
 or Java 
. If you really are tied to Python, then 
you could do the compression yourself, using a module like gzip 
.

On Wednesday, February 13, 2019 at 1:07:24 PM UTC-8, Asad Habib wrote:
>
> Okay, thanks for the update. I will follow this issue via github. For the 
> time being, what would you recommend to reduce gRPC payload sizes with 
> Python? From Python server to client, I changed the definition of my 
> variable from string to bytes but that didn't make any difference in 
> payload size. Since I'm sending huge chunks of data across the wire (GBs of 
> data), even a small difference matters. Your input would be greatly 
> appreciated. Thanks.
>
> On Wed, Feb 13, 2019 at 2:08 PM rbellevi via grpc.io <
> grp...@googlegroups.com > wrote:
>
>> Unfortunately, compression settings are not yet fully supported in 
>> Python gRPC.  However, I have 
>> work queued up for this quarter to address this gap. You can track progress 
>> on this through the linked github issue.
>>
>>
>> On Thursday, February 7, 2019 at 12:14:42 AM UTC-8, asadh...@gmail.com 
>> wrote:
>>>
>>> Has anyone does this? I cannot find any documentation or code 
>>> demonstrating this. For my Python gRPC client, I use the following:
>>>
>>> channel = grpc.insecure_channel('localhost:50051', 
>>> options=[('grpc.default_compression_algorithm', 1), 
>>> ('grpc.default_compression_level', CompressionLevel.high)])
>>>
>>>
>>> How do I do the same for my Python gRPC server? Any help would be 
>>> greatly appreciated. Thanks.
>>>
>> -- 
>> You received this message because you are subscribed to a topic in the 
>> Google Groups "grpc.io" group.
>> To unsubscribe from this topic, visit 
>> https://groups.google.com/d/topic/grpc-io/gLOMKC3xSow/unsubscribe.
>> To unsubscribe from this group and all its topics, send an email to 
>> grpc-io+u...@googlegroups.com .
>> To post to this group, send email to grp...@googlegroups.com 
>> .
>> Visit this group at https://groups.google.com/group/grpc-io.
>> To view this discussion on the web visit 
>> https://groups.google.com/d/msgid/grpc-io/7a396ae3-aebc-487e-b85e-e908f94070b5%40googlegroups.com
>>  
>> 
>> .
>> For more options, visit https://groups.google.com/d/optout.
>>
>

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to grpc-io+unsubscr...@googlegroups.com.
To post to this group, send email to grpc-io@googlegroups.com.
Visit this group at https://groups.google.com/group/grpc-io.
To view this discussion on the web visit 
https://groups.google.com/d/msgid/grpc-io/92a9165e-ab04-421f-82f6-2ff7dcd8b73c%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.


Re: [grpc-io] Re: Configure Python gRPC Server for Compression

2019-02-13 Thread Asad Habib
Okay, thanks for the update. I will follow this issue via github. For the
time being, what would you recommend to reduce gRPC payload sizes with
Python? From Python server to client, I changed the definition of my
variable from string to bytes but that didn't make any difference in
payload size. Since I'm sending huge chunks of data across the wire (GBs of
data), even a small difference matters. Your input would be greatly
appreciated. Thanks.

On Wed, Feb 13, 2019 at 2:08 PM rbellevi via grpc.io <
grpc-io@googlegroups.com> wrote:

> Unfortunately, compression settings are not yet fully supported in Python
> gRPC.  However, I have work
> queued up for this quarter to address this gap. You can track progress on
> this through the linked github issue.
>
>
> On Thursday, February 7, 2019 at 12:14:42 AM UTC-8, asadh...@gmail.com
> wrote:
>>
>> Has anyone does this? I cannot find any documentation or code
>> demonstrating this. For my Python gRPC client, I use the following:
>>
>> channel = grpc.insecure_channel('localhost:50051', 
>> options=[('grpc.default_compression_algorithm', 1), 
>> ('grpc.default_compression_level', CompressionLevel.high)])
>>
>>
>> How do I do the same for my Python gRPC server? Any help would be greatly
>> appreciated. Thanks.
>>
> --
> You received this message because you are subscribed to a topic in the
> Google Groups "grpc.io" group.
> To unsubscribe from this topic, visit
> https://groups.google.com/d/topic/grpc-io/gLOMKC3xSow/unsubscribe.
> To unsubscribe from this group and all its topics, send an email to
> grpc-io+unsubscr...@googlegroups.com.
> To post to this group, send email to grpc-io@googlegroups.com.
> Visit this group at https://groups.google.com/group/grpc-io.
> To view this discussion on the web visit
> https://groups.google.com/d/msgid/grpc-io/7a396ae3-aebc-487e-b85e-e908f94070b5%40googlegroups.com
> 
> .
> For more options, visit https://groups.google.com/d/optout.
>

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to grpc-io+unsubscr...@googlegroups.com.
To post to this group, send email to grpc-io@googlegroups.com.
Visit this group at https://groups.google.com/group/grpc-io.
To view this discussion on the web visit 
https://groups.google.com/d/msgid/grpc-io/CADS1Ld-gUWwc6ROuDZc0CO%2Bj5MEFUoPA%2BzXLQou4J2ZbxjRnwQ%40mail.gmail.com.
For more options, visit https://groups.google.com/d/optout.


[grpc-io] Re: Configure Python gRPC Server for Compression

2019-02-13 Thread rbellevi via grpc.io
Unfortunately, compression settings are not yet fully supported in Python 
gRPC.  However, I have work 
queued up for this quarter to address this gap. You can track progress on 
this through the linked github issue.


On Thursday, February 7, 2019 at 12:14:42 AM UTC-8, asadh...@gmail.com 
wrote:
>
> Has anyone does this? I cannot find any documentation or code 
> demonstrating this. For my Python gRPC client, I use the following:
>
> channel = grpc.insecure_channel('localhost:50051', 
> options=[('grpc.default_compression_algorithm', 1), 
> ('grpc.default_compression_level', CompressionLevel.high)])
>
>
> How do I do the same for my Python gRPC server? Any help would be greatly 
> appreciated. Thanks.
>

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to grpc-io+unsubscr...@googlegroups.com.
To post to this group, send email to grpc-io@googlegroups.com.
Visit this group at https://groups.google.com/group/grpc-io.
To view this discussion on the web visit 
https://groups.google.com/d/msgid/grpc-io/7a396ae3-aebc-487e-b85e-e908f94070b5%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.


[grpc-io] Using multiple late-bound services

2019-02-13 Thread Geoff Groos
Hey everyone

I'm building an API with GRPC which currently looks like this:

serivce OurEndpoint {
   rpc register (RegistrationForFeatureCeeAndDee) returns (stream 
FeatureCeeOrDeeRequest) {}
 
   rpc featureA (FeatureAyeRequest) returns (FeatureAyeReponse) {}
   rpc featureB (FeatureBeeRequest) returns (FeatureBeeResponse) {}
   
   rpc offerFeatureC(FeatureCeeResponse) returns (Confirmation) {}
   rpc offerFeatureD(FeatureDeeResponse) returns (Confirmation) {}
   rpc offerCeeOrDeeFailed(FailureResponse) returns (Confirmation) {}
}


message FeatureCeeOrDeeRequest {
oneof request {
FeatureDeeRequest deeRequest = 1;
FeatureCeeRequest ceeRequest = 2;  
}
}


message Confirmation {}

Note that features A and B are fairly traditional client-driven 
request-response pairs.

Features C and D are callbacks; the client registers with

I can provide answers to C and D, send me a message and I'll call 
offerFeatureResponse 
> as appropriate.


I don't like this. It makes our application code complex. We effectively 
have to build our own multiplexer for things like offerCeeOrDeeFailed

What I'd really rather do is this:

serivce OurEndpoint {
   rpc register (RegistrationForFeatureCeeAndDee) returns (Confirmation) {}
 
   rpc featureA (FeatureAyeRequest) returns (FeatureAyeReponse) {}
   rpc featureB (FeatureBeeRequest) returns (FeatureBeeResponse) {}  
}
service EndpointClientMustImplement {
   rpc featureC(FeatureCeeRequest) returns (FeatureCeeResponse) {}
   rpc featureD(FeatureDeeRequest) returns (FeatureDeeResponse) {}
}


message RegistrationForFeatureCeeAndDee {
   ConnectionToken name = 1;
}


message Confirmation {}


The problem here is how to go about implementing ConnectionToken and its 
handler. Ideally I'd like some code like this:

//kotlin, which is on the jvm.
override fun register(request: RegistrationForFeatureCeeAndDee, response: 
ResponseObserver) {
   
//...
   
val channel: Channel = ManagedChannelBuilder
.for("localhost", 5551) // a port shared by the service 
handling this very response
.build()
   
val stub: EndpointClientMustImplement = EndpointClientMustImplement.
newBuilder()
.withServiceNameOrSimilar(request.name)
.build()
   
//
}

What is the best way to go about this?
1. Can I have multiple servers at a single address?
2. Whats the best way to find a service instance by name at runtime rather 
than by a type-derived (and thus by statically bound) name? I suspect the 
BindableService and ServerServiceDefinitions will help me here, but I 
really don't want to mess with the method-table building and the code 
generating system seems opaque. 

I guess my idea solution would be to ask the code generator to generate 
code that is open on its service name, --ideally open on a constructor 
param such that there is no way to instance the service without specifying 
its service name.

Or, perhalps there's some other strategy I should be using? I could of 
course specify port numbers and then instance grpc services once-per-port, 
but that means I'm bounded on the number of ports I'm using by the number 
of active API users I have, which is very strange.

Many thanks!

-Geoff

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to grpc-io+unsubscr...@googlegroups.com.
To post to this group, send email to grpc-io@googlegroups.com.
Visit this group at https://groups.google.com/group/grpc-io.
To view this discussion on the web visit 
https://groups.google.com/d/msgid/grpc-io/7344c5fc-3c92-4003-aa40-cfb03a27a4c5%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.


[grpc-io] Re: Does grpc-java supports cert reload without restarting server?

2019-02-13 Thread sanjaypujare via grpc.io
Hi Daisy

There is an issue on github  https://github.com/grpc/grpc-java/issues/5335 
that talks about the same thing and possibly created by you.

There is a 
comment https://github.com/grpc/grpc-java/issues/5335#issuecomment-462531217 
that describes an approach to achieve this. Can you verify if it works for 
you?

On Thursday, February 7, 2019 at 11:14:32 AM UTC-8, Daisy Zhu wrote:
>
> Thanks for the reply.
>
> Is this the document  https://grpc.io/blog/loadbalancing on round robin 
> load balancer you mentioned? For cert hot rotation which way is recommended?
>
> 1. round robin load balancer in client side 
> 2. implement hot cert reloading in sever side
>
> If using method 2 are there any potential issues needed to be paid 
> attention to?
>
> Best,
> Daisy
>
> On Thursday, February 7, 2019 at 11:01:01 AM UTC-8, Carl Mastrangelo wrote:
>>
>> You are correct, Java doesn't support this.   However, if you are using 
>> the round robin load balancer in your client, you should be able to 
>> gracefully restart your servers with the new certificate without dropping 
>> any requests.
>>
>> On Thursday, February 7, 2019 at 6:52:46 AM UTC-8, Danesh Kuruppu wrote:
>>>
>>> Hi Daisy,
>>>
>>> Does grpc-java supports cert refresh without restarting server?

>>>
>>> AFAIK, this is not supported yet. We need to restart the server.
>>> Please correct me if I am wrong.
>>>
>>> Thanks
>>> Danesh
>>>
>>

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to grpc-io+unsubscr...@googlegroups.com.
To post to this group, send email to grpc-io@googlegroups.com.
Visit this group at https://groups.google.com/group/grpc-io.
To view this discussion on the web visit 
https://groups.google.com/d/msgid/grpc-io/40f6c06c-b566-4148-9a1d-f43f6686a9f4%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.


[grpc-io] Re: grpc python: OS Error, grpc_status: 14

2019-02-13 Thread rbellevi via grpc.io
Hello,

You can increase logging by setting the following environment variables 
:

GRPC_VERBOSITY=DEBUG
GRPC_TRACE=all

On Friday, February 1, 2019 at 12:48:25 PM UTC-8, justin@gmail.com 
wrote:
>
> I've been using successfully some python code using grpcio 1.17 and grpcio 
> 1.18 on a Ubuntu Linux machine to connect to a remote gRPC server.
>
> However, on CentOS 7.5, when I try to run the same code, I immediately 
> receive status=StatusCode.UNAVAILABLE details="OS Error" trying to connect 
> to the same server. Is there any way to increase logging to get more 
> details other an "OS Error" to try and find out why this is occurring on 
> CentOS?
>

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to grpc-io+unsubscr...@googlegroups.com.
To post to this group, send email to grpc-io@googlegroups.com.
Visit this group at https://groups.google.com/group/grpc-io.
To view this discussion on the web visit 
https://groups.google.com/d/msgid/grpc-io/7113794f-c403-4c72-b62f-b58f70f723fe%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.


[grpc-io] Re: SEGFAULT in greeter_client

2019-02-13 Thread 'Carl Mastrangelo' via grpc.io
Hi, can you file an issue on gRPC's GitHub issue 
tracker?  https://github.com/grpc/grpc/issues/new

On Thursday, January 24, 2019 at 7:37:05 AM UTC-8, Gautham B A wrote:
>
> Hi all,
>
> I just cloned and built gRPC 
> (SHA 9ed8734efb9b1b2cd892942c2c6dd57e903ce719). I'm getting SEGFAULT when I 
> try to run greeter_client in C++. It SEGFAULTs when the RPC call is made -
>
> Status status = stub_->SayHello(&context, request, &reply);
>
> Here's how I'm building greeter_client -
>
> cmake_minimum_required(VERSION 3.13)
> project(HelloWorld)
>
> set(CMAKE_CXX_STANDARD 17)
>
> set(GRPC_BUILD_DIR
> /Users/gautham/projects/github/grpc)
>
> set(LIB_GRPC
> ${GRPC_BUILD_DIR}/libs/opt/libgpr.dylib
> ${GRPC_BUILD_DIR}/libs/opt/libaddress_sorting.dylib
> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++.dylib
> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_cronet.dylib
> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_error_details.dylib
> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_reflection.dylib
> ${GRPC_BUILD_DIR}/libs/opt/libgrpc++_unsecure.dylib
> ${GRPC_BUILD_DIR}/libs/opt/libgrpc.dylib
> ${GRPC_BUILD_DIR}/libs/opt/libgrpc_cronet.dylib
> ${GRPC_BUILD_DIR}/libs/opt/libgrpc_unsecure.dylib
> ${GRPC_BUILD_DIR}/libs/opt/libgrpcpp_channelz.dylib
> )
>
> set(LIB_PROTOBUF
> 
> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf-lite.17.dylib
> 
> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf-lite.dylib
> 
> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf.17.dylib
> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotobuf.dylib
> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotoc.17.dylib
> ${GRPC_BUILD_DIR}/third_party/protobuf/src/.libs/libprotoc.dylib
> )
>
> include_directories(
> ${GRPC_BUILD_DIR}/include
> )
>
> add_executable(greeter_client
> greeter_client.cc
> helloworld.grpc.pb.cc
> helloworld.pb.cc
> )
>
> target_link_libraries(greeter_client
> ${LIB_GRPC}
> ${LIB_PROTOBUF}
> )
>
> Here's the coredump -
> * thread #1, stop reason = signal SIGSTOP
>   * frame #0: 0x7fffa253a19e libsystem_kernel.dylib`poll + 10
> frame #1: 0x00010e6c01a6 
> libgrpc.dylib`pollset_work(pollset=, 
> worker_hdl=0x7fff519dded8, deadline=) at 
> ev_poll_posix.cc:1063 [opt]
> frame #2: 0x00010e6e5999 
> libgrpc.dylib`cq_pluck(cq=0x7fad6240ae40, tag=0x7fff519de200, 
> deadline=, reserved=) at completion_queue.cc:1282 
> [opt]
> frame #3: 0x00010e22c4d1 
> greeter_client`grpc::CompletionQueue::Pluck(grpc::internal::CompletionQueueTag*)
>  
> + 161
> frame #4: 0x00010e22b810 
> greeter_client`grpc::internal::BlockingUnaryCallImpl  
> helloworld::HelloReply>::BlockingUnaryCallImpl(grpc::ChannelInterface*, 
> grpc::internal::RpcMethod const&, grpc::ClientContext*, 
> helloworld::HelloRequest const&, helloworld::HelloReply*) + 704
> frame #5: 0x00010e22b4ed 
> greeter_client`grpc::internal::BlockingUnaryCallImpl  
> helloworld::HelloReply>::BlockingUnaryCallImpl(grpc::ChannelInterface*, 
> grpc::internal::RpcMethod const&, grpc::ClientContext*, 
> helloworld::HelloRequest const&, helloworld::HelloReply*) + 61
> frame #6: 0x00010e228921 greeter_client`grpc::Status 
> grpc::internal::BlockingUnaryCall helloworld::HelloReply>(grpc::ChannelInterface*, grpc::internal::RpcMethod 
> const&, grpc::ClientContext*, helloworld::HelloRequest const&, 
> helloworld::HelloReply*) + 81
> frame #7: 0x00010e2288c5 
> greeter_client`helloworld::Greeter::Stub::SayHello(grpc::ClientContext*, 
> helloworld::HelloRequest const&, helloworld::HelloReply*) + 85
> frame #8: 0x00010e226ecb 
> greeter_client`GreeterClient::SayHello(std::__1::basic_string std::__1::char_traits, std::__1::allocator > const&) + 235
> frame #9: 0x00010e226c05 greeter_client`main + 469
> frame #10: 0x7fffa240a235 libdyld.dylib`start + 1
> frame #11: 0x7fffa240a235 libdyld.dylib`start + 1
>
> I'm using macOS Sierra 10.12.6
>
> Compiler -
> clang
> Apple LLVM version 9.0.0 (clang-900.0.39.2)
> Target: x86_64-apple-darwin16.7.0
> Thread model: posix
>
> Can anyone please help me?
>
> Thanks,
> --Gautham
>
>

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to grpc-io+unsubscr...@googlegroups.com.
To post to this group, send email to grpc-io@googlegroups.com.
Visit this group at https://groups.google.com/group/grpc-io.
To view this discussion on the web visit 
https://groups.google.com/d/msgid/grpc-io/f3dc762a-3dfd-4482-a833-f43a1a32a539%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.


[grpc-io] Re: StatusRuntimeException with gRPC stream: Channel closed

2019-02-13 Thread 'Carl Mastrangelo' via grpc.io
Cancellation is usually done by your application code, unless there is a 
proxy in the path.  There is no default time limit, and RPCs will never 
time out normally.   You can add one by setting a deadline on the stub.
Deadlines will fail with a DEADLINE_EXCEEDED error code, rather than a 
CANCELED.

On Tuesday, February 12, 2019 at 8:24:27 PM UTC-8, mur...@akruta.com wrote:
>
> This error occurs on the server side.
> Also, what is the default timelimit for an rpc call?
>
>
>
> On Tuesday, February 12, 2019 at 8:01:44 PM UTC-8, mur...@akruta.com 
> wrote:
>>
>> Hi all,
>>
>> I am trying to use multiple grpc bidirectional streams in the same 
>> service between a client and server. And every now and then I get the 
>> following error:
>> ```
>>  io.grpc.StatusRuntimeException: CANCELLED: cancelled before receiving 
>> half close
>> at io.grpc.Status.asRuntimeException(Status.java:517)
>> at 
>> io.grpc.stub.ServerCalls$StreamingServerCallHandler$StreamingServerCallListener.onCancel(ServerCalls.java:272)
>> at 
>> io.grpc.PartialForwardingServerCallListener.onCancel(PartialForwardingServerCallListener.java:40)
>> at 
>> io.grpc.ForwardingServerCallListener.onCancel(ForwardingServerCallListener.java:23)
>> at 
>> io.grpc.ForwardingServerCallListener$SimpleForwardingServerCallListener.onCancel(ForwardingServerCallListener.java:40)
>> at 
>> io.grpc.Contexts$ContextualizedServerCallListener.onCancel(Contexts.java:96)
>> at 
>> io.grpc.internal.ServerCallImpl$ServerStreamListenerImpl.closed(ServerCallImpl.java:293)
>> at 
>> io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1Closed.runInContext(ServerImpl.java:738)
>> at io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37)
>> at 
>> io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:123)
>> at 
>> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1162)
>> at 
>> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:636)
>> at java.lang.Thread.run(Thread.java:764)
>> ```
>> did anyone else face this issue?
>> any pointers would be really appreciated.
>>
>> Thanks,
>> Murali
>>
>>

-- 
You received this message because you are subscribed to the Google Groups 
"grpc.io" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to grpc-io+unsubscr...@googlegroups.com.
To post to this group, send email to grpc-io@googlegroups.com.
Visit this group at https://groups.google.com/group/grpc-io.
To view this discussion on the web visit 
https://groups.google.com/d/msgid/grpc-io/a824abea-890b-43ff-9333-7431d3a52ff4%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.