Line data Source code
1 : /*-------------------------------------------------------------------------
2 : *
3 : * llvmjit_deform.c
4 : * Generate code for deforming a heap tuple.
5 : *
6 : * This gains performance benefits over unJITed deforming from compile-time
7 : * knowledge of the tuple descriptor. Fixed column widths, NOT NULLness, etc
8 : * can be taken advantage of.
9 : *
10 : * Portions Copyright (c) 1996-2026, PostgreSQL Global Development Group
11 : * Portions Copyright (c) 1994, Regents of the University of California
12 : *
13 : * IDENTIFICATION
14 : * src/backend/jit/llvm/llvmjit_deform.c
15 : *
16 : *-------------------------------------------------------------------------
17 : */
18 :
19 : #include "postgres.h"
20 :
21 : #include <llvm-c/Core.h>
22 :
23 : #include "access/htup_details.h"
24 : #include "access/tupdesc_details.h"
25 : #include "executor/tuptable.h"
26 : #include "jit/llvmjit.h"
27 : #include "jit/llvmjit_emit.h"
28 :
29 :
30 : /*
31 : * Create a function that deforms a tuple of type desc up to natts columns.
32 : */
33 : LLVMValueRef
34 3329 : slot_compile_deform(LLVMJitContext *context, TupleDesc desc,
35 : const TupleTableSlotOps *ops, int natts)
36 : {
37 : char *funcname;
38 :
39 : LLVMModuleRef mod;
40 : LLVMContextRef lc;
41 : LLVMBuilderRef b;
42 :
43 : LLVMTypeRef deform_sig;
44 : LLVMValueRef v_deform_fn;
45 :
46 : LLVMBasicBlockRef b_entry;
47 : LLVMBasicBlockRef b_adjust_unavail_cols;
48 : LLVMBasicBlockRef b_find_start;
49 :
50 : LLVMBasicBlockRef b_out;
51 : LLVMBasicBlockRef b_dead;
52 : LLVMBasicBlockRef *attcheckattnoblocks;
53 : LLVMBasicBlockRef *attstartblocks;
54 : LLVMBasicBlockRef *attisnullblocks;
55 : LLVMBasicBlockRef *attcheckalignblocks;
56 : LLVMBasicBlockRef *attalignblocks;
57 : LLVMBasicBlockRef *attstoreblocks;
58 :
59 : LLVMValueRef v_offp;
60 :
61 : LLVMValueRef v_tupdata_base;
62 : LLVMValueRef v_tts_values;
63 : LLVMValueRef v_tts_nulls;
64 : LLVMValueRef v_slotoffp;
65 : LLVMValueRef v_nvalidp;
66 : LLVMValueRef v_nvalid;
67 : LLVMValueRef v_maxatt;
68 :
69 : LLVMValueRef v_slot;
70 :
71 : LLVMValueRef v_tupleheaderp;
72 : LLVMValueRef v_tuplep;
73 : LLVMValueRef v_infomask1;
74 : LLVMValueRef v_infomask2;
75 : LLVMValueRef v_bits;
76 :
77 : LLVMValueRef v_hoff;
78 :
79 : LLVMValueRef v_hasnulls;
80 :
81 : /* last column (0 indexed) guaranteed to exist */
82 3329 : int guaranteed_column_number = -1;
83 :
84 : /* current known alignment */
85 3329 : int known_alignment = 0;
86 :
87 : /* if true, known_alignment describes definite offset of column */
88 3329 : bool attguaranteedalign = true;
89 :
90 : int attnum;
91 :
92 : /* virtual tuples never need deforming, so don't generate code */
93 3329 : if (ops == &TTSOpsVirtual)
94 0 : return NULL;
95 :
96 : /* decline to JIT for slot types we don't know to handle */
97 3329 : if (ops != &TTSOpsHeapTuple && ops != &TTSOpsBufferHeapTuple &&
98 : ops != &TTSOpsMinimalTuple)
99 0 : return NULL;
100 :
101 3329 : mod = llvm_mutable_module(context);
102 3329 : lc = LLVMGetModuleContext(mod);
103 :
104 3329 : funcname = llvm_expand_funcname(context, "deform");
105 :
106 : /*
107 : * Check which columns have to exist, so we don't have to check the row's
108 : * natts unnecessarily.
109 : */
110 18438 : for (attnum = 0; attnum < desc->natts; attnum++)
111 : {
112 15109 : CompactAttribute *att = TupleDescCompactAttr(desc, attnum);
113 :
114 : /*
115 : * If the column is declared NOT NULL then it must be present in every
116 : * tuple, unless there's a "missing" entry that could provide a
117 : * non-NULL value for it. That in turn guarantees that the NULL bitmap
118 : * - if there are any NULLable columns - is at least long enough to
119 : * cover columns up to attnum.
120 : *
121 : * Be paranoid and also check !attisdropped, even though the
122 : * combination of attisdropped && attnotnull combination shouldn't
123 : * exist.
124 : */
125 15109 : if (att->attnullability == ATTNULLABLE_VALID &&
126 4024 : !att->atthasmissing &&
127 4024 : !att->attisdropped)
128 4024 : guaranteed_column_number = attnum;
129 : }
130 :
131 : /* Create the signature and function */
132 : {
133 : LLVMTypeRef param_types[1];
134 :
135 3329 : param_types[0] = l_ptr(StructTupleTableSlot);
136 :
137 3329 : deform_sig = LLVMFunctionType(LLVMVoidTypeInContext(lc),
138 : param_types, lengthof(param_types), 0);
139 : }
140 3329 : v_deform_fn = LLVMAddFunction(mod, funcname, deform_sig);
141 3329 : LLVMSetLinkage(v_deform_fn, LLVMInternalLinkage);
142 3329 : LLVMSetParamAlignment(LLVMGetParam(v_deform_fn, 0), MAXIMUM_ALIGNOF);
143 3329 : llvm_copy_attributes(AttributeTemplate, v_deform_fn);
144 :
145 : b_entry =
146 3329 : LLVMAppendBasicBlockInContext(lc, v_deform_fn, "entry");
147 : b_adjust_unavail_cols =
148 3329 : LLVMAppendBasicBlockInContext(lc, v_deform_fn, "adjust_unavail_cols");
149 : b_find_start =
150 3329 : LLVMAppendBasicBlockInContext(lc, v_deform_fn, "find_startblock");
151 : b_out =
152 3329 : LLVMAppendBasicBlockInContext(lc, v_deform_fn, "outblock");
153 : b_dead =
154 3329 : LLVMAppendBasicBlockInContext(lc, v_deform_fn, "deadblock");
155 :
156 3329 : b = LLVMCreateBuilderInContext(lc);
157 :
158 3329 : attcheckattnoblocks = palloc_array(LLVMBasicBlockRef, natts);
159 3329 : attstartblocks = palloc_array(LLVMBasicBlockRef, natts);
160 3329 : attisnullblocks = palloc_array(LLVMBasicBlockRef, natts);
161 3329 : attcheckalignblocks = palloc_array(LLVMBasicBlockRef, natts);
162 3329 : attalignblocks = palloc_array(LLVMBasicBlockRef, natts);
163 3329 : attstoreblocks = palloc_array(LLVMBasicBlockRef, natts);
164 :
165 3329 : known_alignment = 0;
166 :
167 3329 : LLVMPositionBuilderAtEnd(b, b_entry);
168 :
169 : /* perform allocas first, llvm only converts those to registers */
170 3329 : v_offp = LLVMBuildAlloca(b, TypeSizeT, "v_offp");
171 :
172 3329 : v_slot = LLVMGetParam(v_deform_fn, 0);
173 :
174 : v_tts_values =
175 3329 : l_load_struct_gep(b, StructTupleTableSlot, v_slot, FIELDNO_TUPLETABLESLOT_VALUES,
176 : "tts_values");
177 : v_tts_nulls =
178 3329 : l_load_struct_gep(b, StructTupleTableSlot, v_slot, FIELDNO_TUPLETABLESLOT_ISNULL,
179 : "tts_ISNULL");
180 3329 : v_nvalidp = l_struct_gep(b, StructTupleTableSlot, v_slot, FIELDNO_TUPLETABLESLOT_NVALID, "");
181 :
182 3329 : if (ops == &TTSOpsHeapTuple || ops == &TTSOpsBufferHeapTuple)
183 1617 : {
184 : LLVMValueRef v_heapslot;
185 :
186 : v_heapslot =
187 1617 : LLVMBuildBitCast(b,
188 : v_slot,
189 : l_ptr(StructHeapTupleTableSlot),
190 : "heapslot");
191 1617 : v_slotoffp = l_struct_gep(b, StructHeapTupleTableSlot, v_heapslot, FIELDNO_HEAPTUPLETABLESLOT_OFF, "");
192 : v_tupleheaderp =
193 1617 : l_load_struct_gep(b, StructHeapTupleTableSlot, v_heapslot, FIELDNO_HEAPTUPLETABLESLOT_TUPLE,
194 : "tupleheader");
195 : }
196 1712 : else if (ops == &TTSOpsMinimalTuple)
197 : {
198 : LLVMValueRef v_minimalslot;
199 :
200 : v_minimalslot =
201 1712 : LLVMBuildBitCast(b,
202 : v_slot,
203 : l_ptr(StructMinimalTupleTableSlot),
204 : "minimalslot");
205 1712 : v_slotoffp = l_struct_gep(b,
206 : StructMinimalTupleTableSlot,
207 : v_minimalslot,
208 : FIELDNO_MINIMALTUPLETABLESLOT_OFF, "");
209 : v_tupleheaderp =
210 1712 : l_load_struct_gep(b,
211 : StructMinimalTupleTableSlot,
212 : v_minimalslot,
213 : FIELDNO_MINIMALTUPLETABLESLOT_TUPLE,
214 : "tupleheader");
215 : }
216 : else
217 : {
218 : /* should've returned at the start of the function */
219 0 : pg_unreachable();
220 : }
221 :
222 : v_tuplep =
223 3329 : l_load_struct_gep(b,
224 : StructHeapTupleData,
225 : v_tupleheaderp,
226 : FIELDNO_HEAPTUPLEDATA_DATA,
227 : "tuple");
228 : v_bits =
229 3329 : LLVMBuildBitCast(b,
230 : l_struct_gep(b,
231 : StructHeapTupleHeaderData,
232 : v_tuplep,
233 : FIELDNO_HEAPTUPLEHEADERDATA_BITS,
234 : ""),
235 : l_ptr(LLVMInt8TypeInContext(lc)),
236 : "t_bits");
237 : v_infomask1 =
238 3329 : l_load_struct_gep(b,
239 : StructHeapTupleHeaderData,
240 : v_tuplep,
241 : FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK,
242 : "infomask1");
243 : v_infomask2 =
244 3329 : l_load_struct_gep(b,
245 : StructHeapTupleHeaderData,
246 : v_tuplep, FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK2,
247 : "infomask2");
248 :
249 : /* t_infomask & HEAP_HASNULL */
250 : v_hasnulls =
251 3329 : LLVMBuildICmp(b, LLVMIntNE,
252 : LLVMBuildAnd(b,
253 : l_int16_const(lc, HEAP_HASNULL),
254 : v_infomask1, ""),
255 : l_int16_const(lc, 0),
256 : "hasnulls");
257 :
258 : /* t_infomask2 & HEAP_NATTS_MASK */
259 3329 : v_maxatt = LLVMBuildAnd(b,
260 : l_int16_const(lc, HEAP_NATTS_MASK),
261 : v_infomask2,
262 : "maxatt");
263 :
264 : /*
265 : * Need to zext, as getelementptr otherwise treats hoff as a signed 8bit
266 : * integer, which'd yield a negative offset for t_hoff > 127.
267 : */
268 3329 : v_hoff =
269 3329 : LLVMBuildZExt(b,
270 : l_load_struct_gep(b,
271 : StructHeapTupleHeaderData,
272 : v_tuplep,
273 : FIELDNO_HEAPTUPLEHEADERDATA_HOFF,
274 : ""),
275 : LLVMInt32TypeInContext(lc), "t_hoff");
276 :
277 3329 : v_tupdata_base = l_gep(b,
278 : LLVMInt8TypeInContext(lc),
279 : LLVMBuildBitCast(b,
280 : v_tuplep,
281 : l_ptr(LLVMInt8TypeInContext(lc)),
282 : ""),
283 : &v_hoff, 1,
284 : "v_tupdata_base");
285 :
286 : /*
287 : * Load tuple start offset from slot. Will be reset below in case there's
288 : * no existing deformed columns in slot.
289 : */
290 : {
291 : LLVMValueRef v_off_start;
292 :
293 3329 : v_off_start = l_load(b, LLVMInt32TypeInContext(lc), v_slotoffp, "v_slot_off");
294 3329 : v_off_start = LLVMBuildZExt(b, v_off_start, TypeSizeT, "");
295 3329 : LLVMBuildStore(b, v_off_start, v_offp);
296 : }
297 :
298 : /* build the basic block for each attribute, need them as jump target */
299 11832 : for (attnum = 0; attnum < natts; attnum++)
300 : {
301 17006 : attcheckattnoblocks[attnum] =
302 8503 : l_bb_append_v(v_deform_fn, "block.attr.%d.attcheckattno", attnum);
303 17006 : attstartblocks[attnum] =
304 8503 : l_bb_append_v(v_deform_fn, "block.attr.%d.start", attnum);
305 17006 : attisnullblocks[attnum] =
306 8503 : l_bb_append_v(v_deform_fn, "block.attr.%d.attisnull", attnum);
307 17006 : attcheckalignblocks[attnum] =
308 8503 : l_bb_append_v(v_deform_fn, "block.attr.%d.attcheckalign", attnum);
309 17006 : attalignblocks[attnum] =
310 8503 : l_bb_append_v(v_deform_fn, "block.attr.%d.align", attnum);
311 8503 : attstoreblocks[attnum] =
312 8503 : l_bb_append_v(v_deform_fn, "block.attr.%d.store", attnum);
313 : }
314 :
315 : /*
316 : * Check if it is guaranteed that all the desired attributes are available
317 : * in the tuple (but still possibly NULL), by dint of either the last
318 : * to-be-deformed column being NOT NULL, or subsequent ones not accessed
319 : * here being NOT NULL. If that's not guaranteed the tuple headers natt's
320 : * has to be checked, and missing attributes potentially have to be
321 : * fetched (using slot_getmissingattrs().
322 : */
323 3329 : if ((natts - 1) <= guaranteed_column_number)
324 : {
325 : /* just skip through unnecessary blocks */
326 307 : LLVMBuildBr(b, b_adjust_unavail_cols);
327 307 : LLVMPositionBuilderAtEnd(b, b_adjust_unavail_cols);
328 307 : LLVMBuildBr(b, b_find_start);
329 : }
330 : else
331 : {
332 : LLVMValueRef v_params[3];
333 : LLVMValueRef f;
334 :
335 : /* branch if not all columns available */
336 3022 : LLVMBuildCondBr(b,
337 : LLVMBuildICmp(b, LLVMIntULT,
338 : v_maxatt,
339 : l_int16_const(lc, natts),
340 : ""),
341 : b_adjust_unavail_cols,
342 : b_find_start);
343 :
344 : /* if not, memset tts_isnull of relevant cols to true */
345 3022 : LLVMPositionBuilderAtEnd(b, b_adjust_unavail_cols);
346 :
347 3022 : v_params[0] = v_slot;
348 3022 : v_params[1] = LLVMBuildZExt(b, v_maxatt, LLVMInt32TypeInContext(lc), "");
349 3022 : v_params[2] = l_int32_const(lc, natts);
350 3022 : f = llvm_pg_func(mod, "slot_getmissingattrs");
351 3022 : l_call(b,
352 : LLVMGetFunctionType(f), f,
353 : v_params, lengthof(v_params), "");
354 3022 : LLVMBuildBr(b, b_find_start);
355 : }
356 :
357 3329 : LLVMPositionBuilderAtEnd(b, b_find_start);
358 :
359 3329 : v_nvalid = l_load(b, LLVMInt16TypeInContext(lc), v_nvalidp, "");
360 :
361 : /*
362 : * Build switch to go from nvalid to the right startblock. Callers
363 : * currently don't have the knowledge, but it'd be good for performance to
364 : * avoid this check when it's known that the slot is empty (e.g. in scan
365 : * nodes).
366 : */
367 : if (true)
368 : {
369 3329 : LLVMValueRef v_switch = LLVMBuildSwitch(b, v_nvalid,
370 : b_dead, natts);
371 :
372 11832 : for (attnum = 0; attnum < natts; attnum++)
373 : {
374 8503 : LLVMValueRef v_attno = l_int16_const(lc, attnum);
375 :
376 8503 : LLVMAddCase(v_switch, v_attno, attcheckattnoblocks[attnum]);
377 : }
378 : }
379 : else
380 : {
381 : /* jump from entry block to first block */
382 : LLVMBuildBr(b, attcheckattnoblocks[0]);
383 : }
384 :
385 3329 : LLVMPositionBuilderAtEnd(b, b_dead);
386 3329 : LLVMBuildUnreachable(b);
387 :
388 : /*
389 : * Iterate over each attribute that needs to be deformed, build code to
390 : * deform it.
391 : */
392 11832 : for (attnum = 0; attnum < natts; attnum++)
393 : {
394 8503 : CompactAttribute *att = TupleDescCompactAttr(desc, attnum);
395 : LLVMValueRef v_incby;
396 8503 : int alignto = att->attalignby;
397 8503 : LLVMValueRef l_attno = l_int16_const(lc, attnum);
398 : LLVMValueRef v_attdatap;
399 : LLVMValueRef v_resultp;
400 :
401 : /* build block checking whether we did all the necessary attributes */
402 8503 : LLVMPositionBuilderAtEnd(b, attcheckattnoblocks[attnum]);
403 :
404 : /*
405 : * If this is the first attribute, slot->tts_nvalid was 0. Therefore
406 : * also reset offset to 0, it may be from a previous execution.
407 : */
408 8503 : if (attnum == 0)
409 : {
410 3329 : LLVMBuildStore(b, l_sizet_const(0), v_offp);
411 : }
412 :
413 : /*
414 : * Build check whether column is available (i.e. whether the tuple has
415 : * that many columns stored). We can avoid the branch if we know
416 : * there's a subsequent NOT NULL column.
417 : */
418 8503 : if (attnum <= guaranteed_column_number)
419 : {
420 1194 : LLVMBuildBr(b, attstartblocks[attnum]);
421 : }
422 : else
423 : {
424 : LLVMValueRef v_islast;
425 :
426 7309 : v_islast = LLVMBuildICmp(b, LLVMIntUGE,
427 : l_attno,
428 : v_maxatt,
429 : "heap_natts");
430 7309 : LLVMBuildCondBr(b, v_islast, b_out, attstartblocks[attnum]);
431 : }
432 8503 : LLVMPositionBuilderAtEnd(b, attstartblocks[attnum]);
433 :
434 : /*
435 : * Check for nulls if necessary. No need to take missing attributes
436 : * into account, because if they're present the heaptuple's natts
437 : * would have indicated that a slot_getmissingattrs() is needed.
438 : */
439 8503 : if (att->attnullability != ATTNULLABLE_VALID)
440 : {
441 : LLVMBasicBlockRef b_ifnotnull;
442 : LLVMBasicBlockRef b_ifnull;
443 : LLVMBasicBlockRef b_next;
444 : LLVMValueRef v_attisnull;
445 : LLVMValueRef v_nullbyteno;
446 : LLVMValueRef v_nullbytemask;
447 : LLVMValueRef v_nullbyte;
448 : LLVMValueRef v_nullbit;
449 :
450 7317 : b_ifnotnull = attcheckalignblocks[attnum];
451 7317 : b_ifnull = attisnullblocks[attnum];
452 :
453 7317 : if (attnum + 1 == natts)
454 3022 : b_next = b_out;
455 : else
456 4295 : b_next = attcheckattnoblocks[attnum + 1];
457 :
458 7317 : v_nullbyteno = l_int32_const(lc, attnum >> 3);
459 7317 : v_nullbytemask = l_int8_const(lc, 1 << ((attnum) & 0x07));
460 7317 : v_nullbyte = l_load_gep1(b, LLVMInt8TypeInContext(lc), v_bits, v_nullbyteno, "attnullbyte");
461 :
462 7317 : v_nullbit = LLVMBuildICmp(b,
463 : LLVMIntEQ,
464 : LLVMBuildAnd(b, v_nullbyte, v_nullbytemask, ""),
465 : l_int8_const(lc, 0),
466 : "attisnull");
467 :
468 7317 : v_attisnull = LLVMBuildAnd(b, v_hasnulls, v_nullbit, "");
469 :
470 7317 : LLVMBuildCondBr(b, v_attisnull, b_ifnull, b_ifnotnull);
471 :
472 7317 : LLVMPositionBuilderAtEnd(b, b_ifnull);
473 :
474 : /* store null-byte */
475 7317 : LLVMBuildStore(b,
476 : l_int8_const(lc, 1),
477 : l_gep(b, LLVMInt8TypeInContext(lc), v_tts_nulls, &l_attno, 1, ""));
478 : /* store zero datum */
479 7317 : LLVMBuildStore(b,
480 : l_datum_const(0),
481 : l_gep(b, TypeDatum, v_tts_values, &l_attno, 1, ""));
482 :
483 7317 : LLVMBuildBr(b, b_next);
484 7317 : attguaranteedalign = false;
485 : }
486 : else
487 : {
488 : /* nothing to do */
489 1186 : LLVMBuildBr(b, attcheckalignblocks[attnum]);
490 1186 : LLVMPositionBuilderAtEnd(b, attisnullblocks[attnum]);
491 1186 : LLVMBuildBr(b, attcheckalignblocks[attnum]);
492 : }
493 8503 : LLVMPositionBuilderAtEnd(b, attcheckalignblocks[attnum]);
494 :
495 : /* ------
496 : * Even if alignment is required, we can skip doing it if provably
497 : * unnecessary:
498 : * - first column is guaranteed to be aligned
499 : * - columns following a NOT NULL fixed width datum have known
500 : * alignment, can skip alignment computation if that known alignment
501 : * is compatible with current column.
502 : * ------
503 : */
504 8503 : if (alignto > 1 &&
505 3670 : (known_alignment < 0 || known_alignment != TYPEALIGN(alignto, known_alignment)))
506 : {
507 : /*
508 : * When accessing a varlena field, we have to "peek" to see if we
509 : * are looking at a pad byte or the first byte of a 1-byte-header
510 : * datum. A zero byte must be either a pad byte, or the first
511 : * byte of a correctly aligned 4-byte length word; in either case,
512 : * we can align safely. A non-zero byte must be either a 1-byte
513 : * length word, or the first byte of a correctly aligned 4-byte
514 : * length word; in either case, we need not align.
515 : */
516 4036 : if (att->attlen == -1)
517 : {
518 : LLVMValueRef v_possible_padbyte;
519 : LLVMValueRef v_ispad;
520 : LLVMValueRef v_off;
521 :
522 : /* don't know if short varlena or not */
523 470 : attguaranteedalign = false;
524 :
525 470 : v_off = l_load(b, TypeSizeT, v_offp, "");
526 :
527 : v_possible_padbyte =
528 470 : l_load_gep1(b, LLVMInt8TypeInContext(lc), v_tupdata_base, v_off, "padbyte");
529 : v_ispad =
530 470 : LLVMBuildICmp(b, LLVMIntEQ,
531 : v_possible_padbyte, l_int8_const(lc, 0),
532 : "ispadbyte");
533 470 : LLVMBuildCondBr(b, v_ispad,
534 470 : attalignblocks[attnum],
535 470 : attstoreblocks[attnum]);
536 : }
537 : else
538 : {
539 3566 : LLVMBuildBr(b, attalignblocks[attnum]);
540 : }
541 :
542 4036 : LLVMPositionBuilderAtEnd(b, attalignblocks[attnum]);
543 :
544 : /* translation of alignment code (cf TYPEALIGN()) */
545 : {
546 : LLVMValueRef v_off_aligned;
547 4036 : LLVMValueRef v_off = l_load(b, TypeSizeT, v_offp, "");
548 :
549 : /* ((ALIGNVAL) - 1) */
550 4036 : LLVMValueRef v_alignval = l_sizet_const(alignto - 1);
551 :
552 : /* ((uintptr_t) (LEN) + ((ALIGNVAL) - 1)) */
553 4036 : LLVMValueRef v_lh = LLVMBuildAdd(b, v_off, v_alignval, "");
554 :
555 : /* ~((uintptr_t) ((ALIGNVAL) - 1)) */
556 4036 : LLVMValueRef v_rh = l_sizet_const(~(alignto - 1));
557 :
558 4036 : v_off_aligned = LLVMBuildAnd(b, v_lh, v_rh, "aligned_offset");
559 :
560 4036 : LLVMBuildStore(b, v_off_aligned, v_offp);
561 : }
562 :
563 : /*
564 : * As alignment either was unnecessary or has been performed, we
565 : * now know the current alignment. This is only safe because this
566 : * value isn't used for varlena and nullable columns.
567 : */
568 4036 : if (known_alignment >= 0)
569 : {
570 : Assert(known_alignment != 0);
571 8 : known_alignment = TYPEALIGN(alignto, known_alignment);
572 : }
573 :
574 4036 : LLVMBuildBr(b, attstoreblocks[attnum]);
575 4036 : LLVMPositionBuilderAtEnd(b, attstoreblocks[attnum]);
576 : }
577 : else
578 : {
579 4467 : LLVMPositionBuilderAtEnd(b, attcheckalignblocks[attnum]);
580 4467 : LLVMBuildBr(b, attalignblocks[attnum]);
581 4467 : LLVMPositionBuilderAtEnd(b, attalignblocks[attnum]);
582 4467 : LLVMBuildBr(b, attstoreblocks[attnum]);
583 : }
584 8503 : LLVMPositionBuilderAtEnd(b, attstoreblocks[attnum]);
585 :
586 : /*
587 : * Store the current offset if known to be constant. That allows LLVM
588 : * to generate better code. Without that LLVM can't figure out that
589 : * the offset might be constant due to the jumps for previously
590 : * decoded columns.
591 : */
592 8503 : if (attguaranteedalign)
593 : {
594 : Assert(known_alignment >= 0);
595 1178 : LLVMBuildStore(b, l_sizet_const(known_alignment), v_offp);
596 : }
597 :
598 : /* compute what following columns are aligned to */
599 8503 : if (att->attlen < 0)
600 : {
601 : /* can't guarantee any alignment after variable length field */
602 797 : known_alignment = -1;
603 797 : attguaranteedalign = false;
604 : }
605 7706 : else if (att->attnullability == ATTNULLABLE_VALID &&
606 1170 : attguaranteedalign && known_alignment >= 0)
607 : {
608 : /*
609 : * If the offset to the column was previously known, a NOT NULL &
610 : * fixed-width column guarantees that alignment is just the
611 : * previous alignment plus column width.
612 : */
613 : Assert(att->attlen > 0);
614 1170 : known_alignment += att->attlen;
615 : }
616 6536 : else if (att->attnullability == ATTNULLABLE_VALID &&
617 0 : (att->attlen % alignto) == 0)
618 : {
619 : /*
620 : * After a NOT NULL fixed-width column with a length that is a
621 : * multiple of its alignment requirement, we know the following
622 : * column is aligned to at least the current column's alignment.
623 : */
624 : Assert(att->attlen > 0);
625 0 : known_alignment = alignto;
626 : Assert(known_alignment > 0);
627 0 : attguaranteedalign = false;
628 : }
629 : else
630 : {
631 6536 : known_alignment = -1;
632 6536 : attguaranteedalign = false;
633 : }
634 :
635 :
636 : /* compute address to load data from */
637 : {
638 8503 : LLVMValueRef v_off = l_load(b, TypeSizeT, v_offp, "");
639 :
640 8503 : v_attdatap =
641 8503 : l_gep(b, LLVMInt8TypeInContext(lc), v_tupdata_base, &v_off, 1, "");
642 : }
643 :
644 : /* compute address to store value at */
645 8503 : v_resultp = l_gep(b, TypeDatum, v_tts_values, &l_attno, 1, "");
646 :
647 : /* store null-byte (false) */
648 8503 : LLVMBuildStore(b, l_int8_const(lc, 0),
649 : l_gep(b, TypeStorageBool, v_tts_nulls, &l_attno, 1, ""));
650 :
651 : /*
652 : * Store datum. For byval: datums copy the value, extend to Datum's
653 : * width, and store. For byref types: store pointer to data.
654 : */
655 8503 : if (att->attbyval)
656 : {
657 : LLVMValueRef v_tmp_loaddata;
658 7056 : LLVMTypeRef vartype = LLVMIntTypeInContext(lc, att->attlen * 8);
659 7056 : LLVMTypeRef vartypep = LLVMPointerType(vartype, 0);
660 :
661 : v_tmp_loaddata =
662 7056 : LLVMBuildPointerCast(b, v_attdatap, vartypep, "");
663 7056 : v_tmp_loaddata = l_load(b, vartype, v_tmp_loaddata, "attr_byval");
664 7056 : v_tmp_loaddata = LLVMBuildSExt(b, v_tmp_loaddata, TypeDatum, "");
665 :
666 7056 : LLVMBuildStore(b, v_tmp_loaddata, v_resultp);
667 : }
668 : else
669 : {
670 : LLVMValueRef v_tmp_loaddata;
671 :
672 : /* store pointer */
673 : v_tmp_loaddata =
674 1447 : LLVMBuildPtrToInt(b,
675 : v_attdatap,
676 : TypeDatum,
677 : "attr_ptr");
678 1447 : LLVMBuildStore(b, v_tmp_loaddata, v_resultp);
679 : }
680 :
681 : /* increment data pointer */
682 8503 : if (att->attlen > 0)
683 : {
684 7706 : v_incby = l_sizet_const(att->attlen);
685 : }
686 797 : else if (att->attlen == -1)
687 : {
688 797 : v_incby = l_call(b,
689 : llvm_pg_var_func_type("varsize_any"),
690 : llvm_pg_func(mod, "varsize_any"),
691 : &v_attdatap, 1,
692 : "varsize_any");
693 797 : l_callsite_ro(v_incby);
694 797 : l_callsite_alwaysinline(v_incby);
695 : }
696 0 : else if (att->attlen == -2)
697 : {
698 0 : v_incby = l_call(b,
699 : llvm_pg_var_func_type("strlen"),
700 : llvm_pg_func(mod, "strlen"),
701 : &v_attdatap, 1, "strlen");
702 :
703 0 : l_callsite_ro(v_incby);
704 :
705 : /* add 1 for NUL byte */
706 0 : v_incby = LLVMBuildAdd(b, v_incby, l_sizet_const(1), "");
707 : }
708 : else
709 : {
710 : Assert(false);
711 0 : v_incby = NULL; /* silence compiler */
712 : }
713 :
714 8503 : if (attguaranteedalign)
715 : {
716 : Assert(known_alignment >= 0);
717 1170 : LLVMBuildStore(b, l_sizet_const(known_alignment), v_offp);
718 : }
719 : else
720 : {
721 7333 : LLVMValueRef v_off = l_load(b, TypeSizeT, v_offp, "");
722 :
723 7333 : v_off = LLVMBuildAdd(b, v_off, v_incby, "increment_offset");
724 7333 : LLVMBuildStore(b, v_off, v_offp);
725 : }
726 :
727 : /*
728 : * jump to next block, unless last possible column, or all desired
729 : * (available) attributes have been fetched.
730 : */
731 8503 : if (attnum + 1 == natts)
732 : {
733 : /* jump out */
734 3329 : LLVMBuildBr(b, b_out);
735 : }
736 : else
737 : {
738 5174 : LLVMBuildBr(b, attcheckattnoblocks[attnum + 1]);
739 : }
740 : }
741 :
742 :
743 : /* build block that returns */
744 3329 : LLVMPositionBuilderAtEnd(b, b_out);
745 :
746 : {
747 3329 : LLVMValueRef v_off = l_load(b, TypeSizeT, v_offp, "");
748 :
749 3329 : LLVMBuildStore(b, l_int16_const(lc, natts), v_nvalidp);
750 3329 : v_off = LLVMBuildTrunc(b, v_off, LLVMInt32TypeInContext(lc), "");
751 3329 : LLVMBuildStore(b, v_off, v_slotoffp);
752 3329 : LLVMBuildRetVoid(b);
753 : }
754 :
755 3329 : LLVMDisposeBuilder(b);
756 :
757 3329 : return v_deform_fn;
758 : }
|