Home | History | Annotate | Download | only in channel
      1 /*
      2  *
      3  * Copyright 2015 gRPC authors.
      4  *
      5  * Licensed under the Apache License, Version 2.0 (the "License");
      6  * you may not use this file except in compliance with the License.
      7  * You may obtain a copy of the License at
      8  *
      9  *     http://www.apache.org/licenses/LICENSE-2.0
     10  *
     11  * Unless required by applicable law or agreed to in writing, software
     12  * distributed under the License is distributed on an "AS IS" BASIS,
     13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     14  * See the License for the specific language governing permissions and
     15  * limitations under the License.
     16  *
     17  */
     18 
     19 #include <grpc/support/port_platform.h>
     20 
     21 #include <grpc/support/alloc.h>
     22 #include <grpc/support/log.h>
     23 #include "src/core/lib/channel/channel_stack.h"
     24 #include "src/core/lib/gpr/alloc.h"
     25 
     26 #include <stdlib.h>
     27 #include <string.h>
     28 
     29 grpc_core::TraceFlag grpc_trace_channel(false, "channel");
     30 
     31 /* Memory layouts.
     32 
     33    Channel stack is laid out as: {
     34      grpc_channel_stack stk;
     35      padding to GPR_MAX_ALIGNMENT
     36      grpc_channel_element[stk.count];
     37      per-filter memory, aligned to GPR_MAX_ALIGNMENT
     38    }
     39 
     40    Call stack is laid out as: {
     41      grpc_call_stack stk;
     42      padding to GPR_MAX_ALIGNMENT
     43      grpc_call_element[stk.count];
     44      per-filter memory, aligned to GPR_MAX_ALIGNMENT
     45    } */
     46 
     47 size_t grpc_channel_stack_size(const grpc_channel_filter** filters,
     48                                size_t filter_count) {
     49   /* always need the header, and size for the channel elements */
     50   size_t size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_channel_stack)) +
     51                 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count *
     52                                                sizeof(grpc_channel_element));
     53   size_t i;
     54 
     55   GPR_ASSERT((GPR_MAX_ALIGNMENT & (GPR_MAX_ALIGNMENT - 1)) == 0 &&
     56              "GPR_MAX_ALIGNMENT must be a power of two");
     57 
     58   /* add the size for each filter */
     59   for (i = 0; i < filter_count; i++) {
     60     size += GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data);
     61   }
     62 
     63   return size;
     64 }
     65 
     66 #define CHANNEL_ELEMS_FROM_STACK(stk)                                     \
     67   ((grpc_channel_element*)((char*)(stk) + GPR_ROUND_UP_TO_ALIGNMENT_SIZE( \
     68                                               sizeof(grpc_channel_stack))))
     69 
     70 #define CALL_ELEMS_FROM_STACK(stk)                                     \
     71   ((grpc_call_element*)((char*)(stk) + GPR_ROUND_UP_TO_ALIGNMENT_SIZE( \
     72                                            sizeof(grpc_call_stack))))
     73 
     74 grpc_channel_element* grpc_channel_stack_element(
     75     grpc_channel_stack* channel_stack, size_t index) {
     76   return CHANNEL_ELEMS_FROM_STACK(channel_stack) + index;
     77 }
     78 
     79 grpc_channel_element* grpc_channel_stack_last_element(
     80     grpc_channel_stack* channel_stack) {
     81   return grpc_channel_stack_element(channel_stack, channel_stack->count - 1);
     82 }
     83 
     84 grpc_call_element* grpc_call_stack_element(grpc_call_stack* call_stack,
     85                                            size_t index) {
     86   return CALL_ELEMS_FROM_STACK(call_stack) + index;
     87 }
     88 
     89 grpc_error* grpc_channel_stack_init(
     90     int initial_refs, grpc_iomgr_cb_func destroy, void* destroy_arg,
     91     const grpc_channel_filter** filters, size_t filter_count,
     92     const grpc_channel_args* channel_args, grpc_transport* optional_transport,
     93     const char* name, grpc_channel_stack* stack) {
     94   size_t call_size =
     95       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack)) +
     96       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count * sizeof(grpc_call_element));
     97   grpc_channel_element* elems;
     98   grpc_channel_element_args args;
     99   char* user_data;
    100   size_t i;
    101 
    102   stack->count = filter_count;
    103   GRPC_STREAM_REF_INIT(&stack->refcount, initial_refs, destroy, destroy_arg,
    104                        name);
    105   elems = CHANNEL_ELEMS_FROM_STACK(stack);
    106   user_data = (reinterpret_cast<char*>(elems)) +
    107               GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count *
    108                                              sizeof(grpc_channel_element));
    109 
    110   /* init per-filter data */
    111   grpc_error* first_error = GRPC_ERROR_NONE;
    112   for (i = 0; i < filter_count; i++) {
    113     args.channel_stack = stack;
    114     args.channel_args = channel_args;
    115     args.optional_transport = optional_transport;
    116     args.is_first = i == 0;
    117     args.is_last = i == (filter_count - 1);
    118     elems[i].filter = filters[i];
    119     elems[i].channel_data = user_data;
    120     grpc_error* error = elems[i].filter->init_channel_elem(&elems[i], &args);
    121     if (error != GRPC_ERROR_NONE) {
    122       if (first_error == GRPC_ERROR_NONE) {
    123         first_error = error;
    124       } else {
    125         GRPC_ERROR_UNREF(error);
    126       }
    127     }
    128     user_data +=
    129         GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data);
    130     call_size += GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_call_data);
    131   }
    132 
    133   GPR_ASSERT(user_data > (char*)stack);
    134   GPR_ASSERT((uintptr_t)(user_data - (char*)stack) ==
    135              grpc_channel_stack_size(filters, filter_count));
    136 
    137   stack->call_stack_size = call_size;
    138   return first_error;
    139 }
    140 
    141 void grpc_channel_stack_destroy(grpc_channel_stack* stack) {
    142   grpc_channel_element* channel_elems = CHANNEL_ELEMS_FROM_STACK(stack);
    143   size_t count = stack->count;
    144   size_t i;
    145 
    146   /* destroy per-filter data */
    147   for (i = 0; i < count; i++) {
    148     channel_elems[i].filter->destroy_channel_elem(&channel_elems[i]);
    149   }
    150 }
    151 
    152 grpc_error* grpc_call_stack_init(grpc_channel_stack* channel_stack,
    153                                  int initial_refs, grpc_iomgr_cb_func destroy,
    154                                  void* destroy_arg,
    155                                  const grpc_call_element_args* elem_args) {
    156   grpc_channel_element* channel_elems = CHANNEL_ELEMS_FROM_STACK(channel_stack);
    157   size_t count = channel_stack->count;
    158   grpc_call_element* call_elems;
    159   char* user_data;
    160   size_t i;
    161 
    162   elem_args->call_stack->count = count;
    163   GRPC_STREAM_REF_INIT(&elem_args->call_stack->refcount, initial_refs, destroy,
    164                        destroy_arg, "CALL_STACK");
    165   call_elems = CALL_ELEMS_FROM_STACK(elem_args->call_stack);
    166   user_data = (reinterpret_cast<char*>(call_elems)) +
    167               GPR_ROUND_UP_TO_ALIGNMENT_SIZE(count * sizeof(grpc_call_element));
    168 
    169   /* init per-filter data */
    170   grpc_error* first_error = GRPC_ERROR_NONE;
    171   for (i = 0; i < count; i++) {
    172     call_elems[i].filter = channel_elems[i].filter;
    173     call_elems[i].channel_data = channel_elems[i].channel_data;
    174     call_elems[i].call_data = user_data;
    175     grpc_error* error =
    176         call_elems[i].filter->init_call_elem(&call_elems[i], elem_args);
    177     if (error != GRPC_ERROR_NONE) {
    178       if (first_error == GRPC_ERROR_NONE) {
    179         first_error = error;
    180       } else {
    181         GRPC_ERROR_UNREF(error);
    182       }
    183     }
    184     user_data +=
    185         GPR_ROUND_UP_TO_ALIGNMENT_SIZE(call_elems[i].filter->sizeof_call_data);
    186   }
    187   return first_error;
    188 }
    189 
    190 void grpc_call_stack_set_pollset_or_pollset_set(grpc_call_stack* call_stack,
    191                                                 grpc_polling_entity* pollent) {
    192   size_t count = call_stack->count;
    193   grpc_call_element* call_elems;
    194   size_t i;
    195 
    196   call_elems = CALL_ELEMS_FROM_STACK(call_stack);
    197 
    198   /* init per-filter data */
    199   for (i = 0; i < count; i++) {
    200     call_elems[i].filter->set_pollset_or_pollset_set(&call_elems[i], pollent);
    201   }
    202 }
    203 
    204 void grpc_call_stack_ignore_set_pollset_or_pollset_set(
    205     grpc_call_element* elem, grpc_polling_entity* pollent) {}
    206 
    207 void grpc_call_stack_destroy(grpc_call_stack* stack,
    208                              const grpc_call_final_info* final_info,
    209                              grpc_closure* then_schedule_closure) {
    210   grpc_call_element* elems = CALL_ELEMS_FROM_STACK(stack);
    211   size_t count = stack->count;
    212   size_t i;
    213 
    214   /* destroy per-filter data */
    215   for (i = 0; i < count; i++) {
    216     elems[i].filter->destroy_call_elem(
    217         &elems[i], final_info,
    218         i == count - 1 ? then_schedule_closure : nullptr);
    219   }
    220 }
    221 
    222 void grpc_call_next_op(grpc_call_element* elem,
    223                        grpc_transport_stream_op_batch* op) {
    224   grpc_call_element* next_elem = elem + 1;
    225   GRPC_CALL_LOG_OP(GPR_INFO, next_elem, op);
    226   next_elem->filter->start_transport_stream_op_batch(next_elem, op);
    227 }
    228 
    229 void grpc_channel_next_get_info(grpc_channel_element* elem,
    230                                 const grpc_channel_info* channel_info) {
    231   grpc_channel_element* next_elem = elem + 1;
    232   next_elem->filter->get_channel_info(next_elem, channel_info);
    233 }
    234 
    235 void grpc_channel_next_op(grpc_channel_element* elem, grpc_transport_op* op) {
    236   grpc_channel_element* next_elem = elem + 1;
    237   next_elem->filter->start_transport_op(next_elem, op);
    238 }
    239 
    240 grpc_channel_stack* grpc_channel_stack_from_top_element(
    241     grpc_channel_element* elem) {
    242   return reinterpret_cast<grpc_channel_stack*>(
    243       reinterpret_cast<char*>(elem) -
    244       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_channel_stack)));
    245 }
    246 
    247 grpc_call_stack* grpc_call_stack_from_top_element(grpc_call_element* elem) {
    248   return reinterpret_cast<grpc_call_stack*>(
    249       reinterpret_cast<char*>(elem) -
    250       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack)));
    251 }
    252