PostgreSQL Source Code  git master
llvmjit_deform.c File Reference
#include "postgres.h"
#include <llvm-c/Core.h>
#include "access/htup_details.h"
#include "access/tupdesc_details.h"
#include "executor/tuptable.h"
#include "jit/llvmjit.h"
#include "jit/llvmjit_emit.h"
Include dependency graph for llvmjit_deform.c:

Go to the source code of this file.

Functions

LLVMValueRef slot_compile_deform (LLVMJitContext *context, TupleDesc desc, const TupleTableSlotOps *ops, int natts)
 

Function Documentation

◆ slot_compile_deform()

LLVMValueRef slot_compile_deform ( LLVMJitContext *  context,
TupleDesc  desc,
const TupleTableSlotOps ops,
int  natts 
)

Definition at line 34 of file llvmjit_deform.c.

36 {
37  char *funcname;
38 
39  LLVMModuleRef mod;
40  LLVMBuilderRef b;
41 
42  LLVMTypeRef deform_sig;
43  LLVMValueRef v_deform_fn;
44 
45  LLVMBasicBlockRef b_entry;
46  LLVMBasicBlockRef b_adjust_unavail_cols;
47  LLVMBasicBlockRef b_find_start;
48 
49  LLVMBasicBlockRef b_out;
50  LLVMBasicBlockRef b_dead;
51  LLVMBasicBlockRef *attcheckattnoblocks;
52  LLVMBasicBlockRef *attstartblocks;
53  LLVMBasicBlockRef *attisnullblocks;
54  LLVMBasicBlockRef *attcheckalignblocks;
55  LLVMBasicBlockRef *attalignblocks;
56  LLVMBasicBlockRef *attstoreblocks;
57 
58  LLVMValueRef v_offp;
59 
60  LLVMValueRef v_tupdata_base;
61  LLVMValueRef v_tts_values;
62  LLVMValueRef v_tts_nulls;
63  LLVMValueRef v_slotoffp;
64  LLVMValueRef v_flagsp;
65  LLVMValueRef v_nvalidp;
66  LLVMValueRef v_nvalid;
67  LLVMValueRef v_maxatt;
68 
69  LLVMValueRef v_slot;
70 
71  LLVMValueRef v_tupleheaderp;
72  LLVMValueRef v_tuplep;
73  LLVMValueRef v_infomask1;
74  LLVMValueRef v_infomask2;
75  LLVMValueRef v_bits;
76 
77  LLVMValueRef v_hoff;
78 
79  LLVMValueRef v_hasnulls;
80 
81  /* last column (0 indexed) guaranteed to exist */
82  int guaranteed_column_number = -1;
83 
84  /* current known alignment */
85  int known_alignment = 0;
86 
87  /* if true, known_alignment describes definite offset of column */
88  bool attguaranteedalign = true;
89 
90  int attnum;
91 
92  /* virtual tuples never need deforming, so don't generate code */
93  if (ops == &TTSOpsVirtual)
94  return NULL;
95 
96  /* decline to JIT for slot types we don't know to handle */
97  if (ops != &TTSOpsHeapTuple && ops != &TTSOpsBufferHeapTuple &&
98  ops != &TTSOpsMinimalTuple)
99  return NULL;
100 
101  mod = llvm_mutable_module(context);
102 
103  funcname = llvm_expand_funcname(context, "deform");
104 
105  /*
106  * Check which columns have to exist, so we don't have to check the row's
107  * natts unnecessarily.
108  */
109  for (attnum = 0; attnum < desc->natts; attnum++)
110  {
112 
113  /*
114  * If the column is declared NOT NULL then it must be present in every
115  * tuple, unless there's a "missing" entry that could provide a
116  * non-NULL value for it. That in turn guarantees that the NULL bitmap
117  * - if there are any NULLable columns - is at least long enough to
118  * cover columns up to attnum.
119  *
120  * Be paranoid and also check !attisdropped, even though the
121  * combination of attisdropped && attnotnull combination shouldn't
122  * exist.
123  */
124  if (att->attnotnull &&
125  !att->atthasmissing &&
126  !att->attisdropped)
127  guaranteed_column_number = attnum;
128  }
129 
130  /* Create the signature and function */
131  {
132  LLVMTypeRef param_types[1];
133 
134  param_types[0] = l_ptr(StructTupleTableSlot);
135 
136  deform_sig = LLVMFunctionType(LLVMVoidType(), param_types,
137  lengthof(param_types), 0);
138  }
139  v_deform_fn = LLVMAddFunction(mod, funcname, deform_sig);
140  LLVMSetLinkage(v_deform_fn, LLVMInternalLinkage);
141  LLVMSetParamAlignment(LLVMGetParam(v_deform_fn, 0), MAXIMUM_ALIGNOF);
143 
144  b_entry =
145  LLVMAppendBasicBlock(v_deform_fn, "entry");
146  b_adjust_unavail_cols =
147  LLVMAppendBasicBlock(v_deform_fn, "adjust_unavail_cols");
148  b_find_start =
149  LLVMAppendBasicBlock(v_deform_fn, "find_startblock");
150  b_out =
151  LLVMAppendBasicBlock(v_deform_fn, "outblock");
152  b_dead =
153  LLVMAppendBasicBlock(v_deform_fn, "deadblock");
154 
155  b = LLVMCreateBuilder();
156 
157  attcheckattnoblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
158  attstartblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
159  attisnullblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
160  attcheckalignblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
161  attalignblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
162  attstoreblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
163 
164  known_alignment = 0;
165 
166  LLVMPositionBuilderAtEnd(b, b_entry);
167 
168  /* perform allocas first, llvm only converts those to registers */
169  v_offp = LLVMBuildAlloca(b, TypeSizeT, "v_offp");
170 
171  v_slot = LLVMGetParam(v_deform_fn, 0);
172 
173  v_tts_values =
174  l_load_struct_gep(b, v_slot, FIELDNO_TUPLETABLESLOT_VALUES,
175  "tts_values");
176  v_tts_nulls =
177  l_load_struct_gep(b, v_slot, FIELDNO_TUPLETABLESLOT_ISNULL,
178  "tts_ISNULL");
179  v_flagsp = LLVMBuildStructGEP(b, v_slot, FIELDNO_TUPLETABLESLOT_FLAGS, "");
180  v_nvalidp = LLVMBuildStructGEP(b, v_slot, FIELDNO_TUPLETABLESLOT_NVALID, "");
181 
182  if (ops == &TTSOpsHeapTuple || ops == &TTSOpsBufferHeapTuple)
183  {
184  LLVMValueRef v_heapslot;
185 
186  v_heapslot =
187  LLVMBuildBitCast(b,
188  v_slot,
190  "heapslot");
191  v_slotoffp = LLVMBuildStructGEP(b, v_heapslot, FIELDNO_HEAPTUPLETABLESLOT_OFF, "");
192  v_tupleheaderp =
193  l_load_struct_gep(b, v_heapslot, FIELDNO_HEAPTUPLETABLESLOT_TUPLE,
194  "tupleheader");
195  }
196  else if (ops == &TTSOpsMinimalTuple)
197  {
198  LLVMValueRef v_minimalslot;
199 
200  v_minimalslot =
201  LLVMBuildBitCast(b,
202  v_slot,
204  "minimalslot");
205  v_slotoffp = LLVMBuildStructGEP(b, v_minimalslot, FIELDNO_MINIMALTUPLETABLESLOT_OFF, "");
206  v_tupleheaderp =
207  l_load_struct_gep(b, v_minimalslot, FIELDNO_MINIMALTUPLETABLESLOT_TUPLE,
208  "tupleheader");
209  }
210  else
211  {
212  /* should've returned at the start of the function */
213  pg_unreachable();
214  }
215 
216  v_tuplep =
217  l_load_struct_gep(b, v_tupleheaderp, FIELDNO_HEAPTUPLEDATA_DATA,
218  "tuple");
219  v_bits =
220  LLVMBuildBitCast(b,
221  LLVMBuildStructGEP(b, v_tuplep,
223  ""),
224  l_ptr(LLVMInt8Type()),
225  "t_bits");
226  v_infomask1 =
227  l_load_struct_gep(b, v_tuplep,
229  "infomask1");
230  v_infomask2 =
231  l_load_struct_gep(b,
233  "infomask2");
234 
235  /* t_infomask & HEAP_HASNULL */
236  v_hasnulls =
237  LLVMBuildICmp(b, LLVMIntNE,
238  LLVMBuildAnd(b,
239  l_int16_const(HEAP_HASNULL),
240  v_infomask1, ""),
241  l_int16_const(0),
242  "hasnulls");
243 
244  /* t_infomask2 & HEAP_NATTS_MASK */
245  v_maxatt = LLVMBuildAnd(b,
246  l_int16_const(HEAP_NATTS_MASK),
247  v_infomask2,
248  "maxatt");
249 
250  /*
251  * Need to zext, as getelementptr otherwise treats hoff as a signed 8bit
252  * integer, which'd yield a negative offset for t_hoff > 127.
253  */
254  v_hoff =
255  LLVMBuildZExt(b,
256  l_load_struct_gep(b, v_tuplep,
258  ""),
259  LLVMInt32Type(), "t_hoff");
260 
261  v_tupdata_base =
262  LLVMBuildGEP(b,
263  LLVMBuildBitCast(b,
264  v_tuplep,
265  l_ptr(LLVMInt8Type()),
266  ""),
267  &v_hoff, 1,
268  "v_tupdata_base");
269 
270  /*
271  * Load tuple start offset from slot. Will be reset below in case there's
272  * no existing deformed columns in slot.
273  */
274  {
275  LLVMValueRef v_off_start;
276 
277  v_off_start = LLVMBuildLoad(b, v_slotoffp, "v_slot_off");
278  v_off_start = LLVMBuildZExt(b, v_off_start, TypeSizeT, "");
279  LLVMBuildStore(b, v_off_start, v_offp);
280  }
281 
282  /* build the basic block for each attribute, need them as jump target */
283  for (attnum = 0; attnum < natts; attnum++)
284  {
285  attcheckattnoblocks[attnum] =
286  l_bb_append_v(v_deform_fn, "block.attr.%d.attcheckattno", attnum);
287  attstartblocks[attnum] =
288  l_bb_append_v(v_deform_fn, "block.attr.%d.start", attnum);
289  attisnullblocks[attnum] =
290  l_bb_append_v(v_deform_fn, "block.attr.%d.attisnull", attnum);
291  attcheckalignblocks[attnum] =
292  l_bb_append_v(v_deform_fn, "block.attr.%d.attcheckalign", attnum);
293  attalignblocks[attnum] =
294  l_bb_append_v(v_deform_fn, "block.attr.%d.align", attnum);
295  attstoreblocks[attnum] =
296  l_bb_append_v(v_deform_fn, "block.attr.%d.store", attnum);
297  }
298 
299  /*
300  * Check if it is guaranteed that all the desired attributes are available
301  * in the tuple (but still possibly NULL), by dint of either the last
302  * to-be-deformed column being NOT NULL, or subsequent ones not accessed
303  * here being NOT NULL. If that's not guaranteed the tuple headers natt's
304  * has to be checked, and missing attributes potentially have to be
305  * fetched (using slot_getmissingattrs().
306  */
307  if ((natts - 1) <= guaranteed_column_number)
308  {
309  /* just skip through unnecessary blocks */
310  LLVMBuildBr(b, b_adjust_unavail_cols);
311  LLVMPositionBuilderAtEnd(b, b_adjust_unavail_cols);
312  LLVMBuildBr(b, b_find_start);
313  }
314  else
315  {
316  LLVMValueRef v_params[3];
317 
318  /* branch if not all columns available */
319  LLVMBuildCondBr(b,
320  LLVMBuildICmp(b, LLVMIntULT,
321  v_maxatt,
322  l_int16_const(natts),
323  ""),
324  b_adjust_unavail_cols,
325  b_find_start);
326 
327  /* if not, memset tts_isnull of relevant cols to true */
328  LLVMPositionBuilderAtEnd(b, b_adjust_unavail_cols);
329 
330  v_params[0] = v_slot;
331  v_params[1] = LLVMBuildZExt(b, v_maxatt, LLVMInt32Type(), "");
332  v_params[2] = l_int32_const(natts);
333  LLVMBuildCall(b, llvm_pg_func(mod, "slot_getmissingattrs"),
334  v_params, lengthof(v_params), "");
335  LLVMBuildBr(b, b_find_start);
336  }
337 
338  LLVMPositionBuilderAtEnd(b, b_find_start);
339 
340  v_nvalid = LLVMBuildLoad(b, v_nvalidp, "");
341 
342  /*
343  * Build switch to go from nvalid to the right startblock. Callers
344  * currently don't have the knowledge, but it'd be good for performance to
345  * avoid this check when it's known that the slot is empty (e.g. in scan
346  * nodes).
347  */
348  if (true)
349  {
350  LLVMValueRef v_switch = LLVMBuildSwitch(b, v_nvalid,
351  b_dead, natts);
352 
353  for (attnum = 0; attnum < natts; attnum++)
354  {
355  LLVMValueRef v_attno = l_int16_const(attnum);
356 
357  LLVMAddCase(v_switch, v_attno, attcheckattnoblocks[attnum]);
358  }
359  }
360  else
361  {
362  /* jump from entry block to first block */
363  LLVMBuildBr(b, attcheckattnoblocks[0]);
364  }
365 
366  LLVMPositionBuilderAtEnd(b, b_dead);
367  LLVMBuildUnreachable(b);
368 
369  /*
370  * Iterate over each attribute that needs to be deformed, build code to
371  * deform it.
372  */
373  for (attnum = 0; attnum < natts; attnum++)
374  {
376  LLVMValueRef v_incby;
377  int alignto;
378  LLVMValueRef l_attno = l_int16_const(attnum);
379  LLVMValueRef v_attdatap;
380  LLVMValueRef v_resultp;
381 
382  /* build block checking whether we did all the necessary attributes */
383  LLVMPositionBuilderAtEnd(b, attcheckattnoblocks[attnum]);
384 
385  /*
386  * If this is the first attribute, slot->tts_nvalid was 0. Therefore
387  * also reset offset to 0, it may be from a previous execution.
388  */
389  if (attnum == 0)
390  {
391  LLVMBuildStore(b, l_sizet_const(0), v_offp);
392  }
393 
394  /*
395  * Build check whether column is available (i.e. whether the tuple has
396  * that many columns stored). We can avoid the branch if we know
397  * there's a subsequent NOT NULL column.
398  */
399  if (attnum <= guaranteed_column_number)
400  {
401  LLVMBuildBr(b, attstartblocks[attnum]);
402  }
403  else
404  {
405  LLVMValueRef v_islast;
406 
407  v_islast = LLVMBuildICmp(b, LLVMIntUGE,
408  l_attno,
409  v_maxatt,
410  "heap_natts");
411  LLVMBuildCondBr(b, v_islast, b_out, attstartblocks[attnum]);
412  }
413  LLVMPositionBuilderAtEnd(b, attstartblocks[attnum]);
414 
415  /*
416  * Check for nulls if necessary. No need to take missing attributes
417  * into account, because if they're present the heaptuple's natts
418  * would have indicated that a slot_getmissingattrs() is needed.
419  */
420  if (!att->attnotnull)
421  {
422  LLVMBasicBlockRef b_ifnotnull;
423  LLVMBasicBlockRef b_ifnull;
424  LLVMBasicBlockRef b_next;
425  LLVMValueRef v_attisnull;
426  LLVMValueRef v_nullbyteno;
427  LLVMValueRef v_nullbytemask;
428  LLVMValueRef v_nullbyte;
429  LLVMValueRef v_nullbit;
430 
431  b_ifnotnull = attcheckalignblocks[attnum];
432  b_ifnull = attisnullblocks[attnum];
433 
434  if (attnum + 1 == natts)
435  b_next = b_out;
436  else
437  b_next = attcheckattnoblocks[attnum + 1];
438 
439  v_nullbyteno = l_int32_const(attnum >> 3);
440  v_nullbytemask = l_int8_const(1 << ((attnum) & 0x07));
441  v_nullbyte = l_load_gep1(b, v_bits, v_nullbyteno, "attnullbyte");
442 
443  v_nullbit = LLVMBuildICmp(b,
444  LLVMIntEQ,
445  LLVMBuildAnd(b, v_nullbyte, v_nullbytemask, ""),
446  l_int8_const(0),
447  "attisnull");
448 
449  v_attisnull = LLVMBuildAnd(b, v_hasnulls, v_nullbit, "");
450 
451  LLVMBuildCondBr(b, v_attisnull, b_ifnull, b_ifnotnull);
452 
453  LLVMPositionBuilderAtEnd(b, b_ifnull);
454 
455  /* store null-byte */
456  LLVMBuildStore(b,
457  l_int8_const(1),
458  LLVMBuildGEP(b, v_tts_nulls, &l_attno, 1, ""));
459  /* store zero datum */
460  LLVMBuildStore(b,
461  l_sizet_const(0),
462  LLVMBuildGEP(b, v_tts_values, &l_attno, 1, ""));
463 
464  LLVMBuildBr(b, b_next);
465  attguaranteedalign = false;
466  }
467  else
468  {
469  /* nothing to do */
470  LLVMBuildBr(b, attcheckalignblocks[attnum]);
471  LLVMPositionBuilderAtEnd(b, attisnullblocks[attnum]);
472  LLVMBuildBr(b, attcheckalignblocks[attnum]);
473  }
474  LLVMPositionBuilderAtEnd(b, attcheckalignblocks[attnum]);
475 
476  /* determine required alignment */
477  if (att->attalign == TYPALIGN_INT)
478  alignto = ALIGNOF_INT;
479  else if (att->attalign == TYPALIGN_CHAR)
480  alignto = 1;
481  else if (att->attalign == TYPALIGN_DOUBLE)
482  alignto = ALIGNOF_DOUBLE;
483  else if (att->attalign == TYPALIGN_SHORT)
484  alignto = ALIGNOF_SHORT;
485  else
486  {
487  elog(ERROR, "unknown alignment");
488  alignto = 0;
489  }
490 
491  /* ------
492  * Even if alignment is required, we can skip doing it if provably
493  * unnecessary:
494  * - first column is guaranteed to be aligned
495  * - columns following a NOT NULL fixed width datum have known
496  * alignment, can skip alignment computation if that known alignment
497  * is compatible with current column.
498  * ------
499  */
500  if (alignto > 1 &&
501  (known_alignment < 0 || known_alignment != TYPEALIGN(alignto, known_alignment)))
502  {
503  /*
504  * When accessing a varlena field, we have to "peek" to see if we
505  * are looking at a pad byte or the first byte of a 1-byte-header
506  * datum. A zero byte must be either a pad byte, or the first
507  * byte of a correctly aligned 4-byte length word; in either case,
508  * we can align safely. A non-zero byte must be either a 1-byte
509  * length word, or the first byte of a correctly aligned 4-byte
510  * length word; in either case, we need not align.
511  */
512  if (att->attlen == -1)
513  {
514  LLVMValueRef v_possible_padbyte;
515  LLVMValueRef v_ispad;
516  LLVMValueRef v_off;
517 
518  /* don't know if short varlena or not */
519  attguaranteedalign = false;
520 
521  v_off = LLVMBuildLoad(b, v_offp, "");
522 
523  v_possible_padbyte =
524  l_load_gep1(b, v_tupdata_base, v_off, "padbyte");
525  v_ispad =
526  LLVMBuildICmp(b, LLVMIntEQ,
527  v_possible_padbyte, l_int8_const(0),
528  "ispadbyte");
529  LLVMBuildCondBr(b, v_ispad,
530  attalignblocks[attnum],
531  attstoreblocks[attnum]);
532  }
533  else
534  {
535  LLVMBuildBr(b, attalignblocks[attnum]);
536  }
537 
538  LLVMPositionBuilderAtEnd(b, attalignblocks[attnum]);
539 
540  /* translation of alignment code (cf TYPEALIGN()) */
541  {
542  LLVMValueRef v_off_aligned;
543  LLVMValueRef v_off = LLVMBuildLoad(b, v_offp, "");
544 
545  /* ((ALIGNVAL) - 1) */
546  LLVMValueRef v_alignval = l_sizet_const(alignto - 1);
547 
548  /* ((uintptr_t) (LEN) + ((ALIGNVAL) - 1)) */
549  LLVMValueRef v_lh = LLVMBuildAdd(b, v_off, v_alignval, "");
550 
551  /* ~((uintptr_t) ((ALIGNVAL) - 1)) */
552  LLVMValueRef v_rh = l_sizet_const(~(alignto - 1));
553 
554  v_off_aligned = LLVMBuildAnd(b, v_lh, v_rh, "aligned_offset");
555 
556  LLVMBuildStore(b, v_off_aligned, v_offp);
557  }
558 
559  /*
560  * As alignment either was unnecessary or has been performed, we
561  * now know the current alignment. This is only safe because this
562  * value isn't used for varlena and nullable columns.
563  */
564  if (known_alignment >= 0)
565  {
566  Assert(known_alignment != 0);
567  known_alignment = TYPEALIGN(alignto, known_alignment);
568  }
569 
570  LLVMBuildBr(b, attstoreblocks[attnum]);
571  LLVMPositionBuilderAtEnd(b, attstoreblocks[attnum]);
572  }
573  else
574  {
575  LLVMPositionBuilderAtEnd(b, attcheckalignblocks[attnum]);
576  LLVMBuildBr(b, attalignblocks[attnum]);
577  LLVMPositionBuilderAtEnd(b, attalignblocks[attnum]);
578  LLVMBuildBr(b, attstoreblocks[attnum]);
579  }
580  LLVMPositionBuilderAtEnd(b, attstoreblocks[attnum]);
581 
582  /*
583  * Store the current offset if known to be constant. That allows LLVM
584  * to generate better code. Without that LLVM can't figure out that
585  * the offset might be constant due to the jumps for previously
586  * decoded columns.
587  */
588  if (attguaranteedalign)
589  {
590  Assert(known_alignment >= 0);
591  LLVMBuildStore(b, l_sizet_const(known_alignment), v_offp);
592  }
593 
594  /* compute what following columns are aligned to */
595  if (att->attlen < 0)
596  {
597  /* can't guarantee any alignment after variable length field */
598  known_alignment = -1;
599  attguaranteedalign = false;
600  }
601  else if (att->attnotnull && attguaranteedalign && known_alignment >= 0)
602  {
603  /*
604  * If the offset to the column was previously known, a NOT NULL &
605  * fixed-width column guarantees that alignment is just the
606  * previous alignment plus column width.
607  */
608  Assert(att->attlen > 0);
609  known_alignment += att->attlen;
610  }
611  else if (att->attnotnull && (att->attlen % alignto) == 0)
612  {
613  /*
614  * After a NOT NULL fixed-width column with a length that is a
615  * multiple of its alignment requirement, we know the following
616  * column is aligned to at least the current column's alignment.
617  */
618  Assert(att->attlen > 0);
619  known_alignment = alignto;
620  Assert(known_alignment > 0);
621  attguaranteedalign = false;
622  }
623  else
624  {
625  known_alignment = -1;
626  attguaranteedalign = false;
627  }
628 
629 
630  /* compute address to load data from */
631  {
632  LLVMValueRef v_off = LLVMBuildLoad(b, v_offp, "");
633 
634  v_attdatap =
635  LLVMBuildGEP(b, v_tupdata_base, &v_off, 1, "");
636  }
637 
638  /* compute address to store value at */
639  v_resultp = LLVMBuildGEP(b, v_tts_values, &l_attno, 1, "");
640 
641  /* store null-byte (false) */
642  LLVMBuildStore(b, l_int8_const(0),
643  LLVMBuildGEP(b, v_tts_nulls, &l_attno, 1, ""));
644 
645  /*
646  * Store datum. For byval: datums copy the value, extend to Datum's
647  * width, and store. For byref types: store pointer to data.
648  */
649  if (att->attbyval)
650  {
651  LLVMValueRef v_tmp_loaddata;
652  LLVMTypeRef vartypep =
653  LLVMPointerType(LLVMIntType(att->attlen * 8), 0);
654 
655  v_tmp_loaddata =
656  LLVMBuildPointerCast(b, v_attdatap, vartypep, "");
657  v_tmp_loaddata = LLVMBuildLoad(b, v_tmp_loaddata, "attr_byval");
658  v_tmp_loaddata = LLVMBuildZExt(b, v_tmp_loaddata, TypeSizeT, "");
659 
660  LLVMBuildStore(b, v_tmp_loaddata, v_resultp);
661  }
662  else
663  {
664  LLVMValueRef v_tmp_loaddata;
665 
666  /* store pointer */
667  v_tmp_loaddata =
668  LLVMBuildPtrToInt(b,
669  v_attdatap,
670  TypeSizeT,
671  "attr_ptr");
672  LLVMBuildStore(b, v_tmp_loaddata, v_resultp);
673  }
674 
675  /* increment data pointer */
676  if (att->attlen > 0)
677  {
678  v_incby = l_sizet_const(att->attlen);
679  }
680  else if (att->attlen == -1)
681  {
682  v_incby = LLVMBuildCall(b,
683  llvm_pg_func(mod, "varsize_any"),
684  &v_attdatap, 1,
685  "varsize_any");
686  l_callsite_ro(v_incby);
687  l_callsite_alwaysinline(v_incby);
688  }
689  else if (att->attlen == -2)
690  {
691  v_incby = LLVMBuildCall(b,
692  llvm_pg_func(mod, "strlen"),
693  &v_attdatap, 1, "strlen");
694 
695  l_callsite_ro(v_incby);
696 
697  /* add 1 for NUL byte */
698  v_incby = LLVMBuildAdd(b, v_incby, l_sizet_const(1), "");
699  }
700  else
701  {
702  Assert(false);
703  v_incby = NULL; /* silence compiler */
704  }
705 
706  if (attguaranteedalign)
707  {
708  Assert(known_alignment >= 0);
709  LLVMBuildStore(b, l_sizet_const(known_alignment), v_offp);
710  }
711  else
712  {
713  LLVMValueRef v_off = LLVMBuildLoad(b, v_offp, "");
714 
715  v_off = LLVMBuildAdd(b, v_off, v_incby, "increment_offset");
716  LLVMBuildStore(b, v_off, v_offp);
717  }
718 
719  /*
720  * jump to next block, unless last possible column, or all desired
721  * (available) attributes have been fetched.
722  */
723  if (attnum + 1 == natts)
724  {
725  /* jump out */
726  LLVMBuildBr(b, b_out);
727  }
728  else
729  {
730  LLVMBuildBr(b, attcheckattnoblocks[attnum + 1]);
731  }
732  }
733 
734 
735  /* build block that returns */
736  LLVMPositionBuilderAtEnd(b, b_out);
737 
738  {
739  LLVMValueRef v_off = LLVMBuildLoad(b, v_offp, "");
740  LLVMValueRef v_flags;
741 
742  LLVMBuildStore(b, l_int16_const(natts), v_nvalidp);
743  v_off = LLVMBuildTrunc(b, v_off, LLVMInt32Type(), "");
744  LLVMBuildStore(b, v_off, v_slotoffp);
745  v_flags = LLVMBuildLoad(b, v_flagsp, "tts_flags");
746  v_flags = LLVMBuildOr(b, v_flags, l_int16_const(TTS_FLAG_SLOW), "");
747  LLVMBuildStore(b, v_flags, v_flagsp);
748  LLVMBuildRetVoid(b);
749  }
750 
751  LLVMDisposeBuilder(b);
752 
753  return v_deform_fn;
754 }
#define TYPEALIGN(ALIGNVAL, LEN)
Definition: c.h:740
#define pg_unreachable()
Definition: c.h:280
#define lengthof(array)
Definition: c.h:724
#define ERROR
Definition: elog.h:39
const TupleTableSlotOps TTSOpsVirtual
Definition: execTuples.c:83
const TupleTableSlotOps TTSOpsBufferHeapTuple
Definition: execTuples.c:86
const TupleTableSlotOps TTSOpsHeapTuple
Definition: execTuples.c:84
const TupleTableSlotOps TTSOpsMinimalTuple
Definition: execTuples.c:85
#define FIELDNO_HEAPTUPLEDATA_DATA
Definition: htup.h:67
#define HEAP_NATTS_MASK
Definition: htup_details.h:272
#define HEAP_HASNULL
Definition: htup_details.h:189
#define FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK
Definition: htup_details.h:168
#define FIELDNO_HEAPTUPLEHEADERDATA_HOFF
Definition: htup_details.h:171
#define FIELDNO_HEAPTUPLEHEADERDATA_BITS
Definition: htup_details.h:176
#define FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK2
Definition: htup_details.h:165
int b
Definition: isn.c:70
Assert(fmt[strlen(fmt) - 1] !='\n')
LLVMTypeRef StructMinimalTupleTableSlot
Definition: llvmjit.c:77
LLVMValueRef llvm_pg_func(LLVMModuleRef mod, const char *funcname)
Definition: llvmjit.c:419
LLVMTypeRef TypeSizeT
Definition: llvmjit.c:59
char * llvm_expand_funcname(struct LLVMJitContext *context, const char *basename)
Definition: llvmjit.c:255
LLVMTypeRef StructTupleTableSlot
Definition: llvmjit.c:75
LLVMTypeRef StructHeapTupleTableSlot
Definition: llvmjit.c:76
LLVMModuleRef llvm_mutable_module(LLVMJitContext *context)
Definition: llvmjit.c:230
LLVMValueRef AttributeTemplate
Definition: llvmjit.c:89
void llvm_copy_attributes(LLVMValueRef v_from, LLVMValueRef v_to)
Definition: llvmjit.c:475
void * palloc(Size size)
Definition: mcxt.c:1199
int16 attnum
Definition: pg_attribute.h:83
FormData_pg_attribute * Form_pg_attribute
Definition: pg_attribute.h:207
#define TupleDescAttr(tupdesc, i)
Definition: tupdesc.h:92
#define TTS_FLAG_SLOW
Definition: tuptable.h:104
#define FIELDNO_HEAPTUPLETABLESLOT_OFF
Definition: tuptable.h:255
#define FIELDNO_HEAPTUPLETABLESLOT_TUPLE
Definition: tuptable.h:253
#define FIELDNO_TUPLETABLESLOT_ISNULL
Definition: tuptable.h:127
#define FIELDNO_MINIMALTUPLETABLESLOT_TUPLE
Definition: tuptable.h:289
#define FIELDNO_MINIMALTUPLETABLESLOT_OFF
Definition: tuptable.h:293
#define FIELDNO_TUPLETABLESLOT_VALUES
Definition: tuptable.h:125
#define FIELDNO_TUPLETABLESLOT_FLAGS
Definition: tuptable.h:118
#define FIELDNO_TUPLETABLESLOT_NVALID
Definition: tuptable.h:120

References Assert(), attnum, AttributeTemplate, b, elog(), ERROR, FIELDNO_HEAPTUPLEDATA_DATA, FIELDNO_HEAPTUPLEHEADERDATA_BITS, FIELDNO_HEAPTUPLEHEADERDATA_HOFF, FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK, FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK2, FIELDNO_HEAPTUPLETABLESLOT_OFF, FIELDNO_HEAPTUPLETABLESLOT_TUPLE, FIELDNO_MINIMALTUPLETABLESLOT_OFF, FIELDNO_MINIMALTUPLETABLESLOT_TUPLE, FIELDNO_TUPLETABLESLOT_FLAGS, FIELDNO_TUPLETABLESLOT_ISNULL, FIELDNO_TUPLETABLESLOT_NVALID, FIELDNO_TUPLETABLESLOT_VALUES, HEAP_HASNULL, HEAP_NATTS_MASK, lengthof, llvm_copy_attributes(), llvm_expand_funcname(), llvm_mutable_module(), llvm_pg_func(), TupleDescData::natts, palloc(), pg_unreachable, StructHeapTupleTableSlot, StructMinimalTupleTableSlot, StructTupleTableSlot, TTS_FLAG_SLOW, TTSOpsBufferHeapTuple, TTSOpsHeapTuple, TTSOpsMinimalTuple, TTSOpsVirtual, TupleDescAttr, TYPEALIGN, and TypeSizeT.