moving -Olocal-temps to -O4 until the issues are solved
[xonotic/gmqcc.git] / ir.c
1 /*
2  * Copyright (C) 2012, 2013
3  *     Wolfgang Bumiller
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a copy of
6  * this software and associated documentation files (the "Software"), to deal in
7  * the Software without restriction, including without limitation the rights to
8  * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
9  * of the Software, and to permit persons to whom the Software is furnished to do
10  * so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in all
13  * copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21  * SOFTWARE.
22  */
23 #include <stdlib.h>
24 #include <string.h>
25 #include "gmqcc.h"
26 #include "ir.h"
27
28 /***********************************************************************
29  * Type sizes used at multiple points in the IR codegen
30  */
31
32 const char *type_name[TYPE_COUNT] = {
33     "void",
34     "string",
35     "float",
36     "vector",
37     "entity",
38     "field",
39     "function",
40     "pointer",
41     "integer",
42     "variant",
43     "struct",
44     "union",
45     "array",
46
47     "nil",
48     "<no-expression>"
49 };
50
51 size_t type_sizeof_[TYPE_COUNT] = {
52     1, /* TYPE_VOID     */
53     1, /* TYPE_STRING   */
54     1, /* TYPE_FLOAT    */
55     3, /* TYPE_VECTOR   */
56     1, /* TYPE_ENTITY   */
57     1, /* TYPE_FIELD    */
58     1, /* TYPE_FUNCTION */
59     1, /* TYPE_POINTER  */
60     1, /* TYPE_INTEGER  */
61     3, /* TYPE_VARIANT  */
62     0, /* TYPE_STRUCT   */
63     0, /* TYPE_UNION    */
64     0, /* TYPE_ARRAY    */
65     0, /* TYPE_NIL      */
66     0, /* TYPE_NOESPR   */
67 };
68
69 uint16_t type_store_instr[TYPE_COUNT] = {
70     INSTR_STORE_F, /* should use I when having integer support */
71     INSTR_STORE_S,
72     INSTR_STORE_F,
73     INSTR_STORE_V,
74     INSTR_STORE_ENT,
75     INSTR_STORE_FLD,
76     INSTR_STORE_FNC,
77     INSTR_STORE_ENT, /* should use I */
78 #if 0
79     INSTR_STORE_I, /* integer type */
80 #else
81     INSTR_STORE_F,
82 #endif
83
84     INSTR_STORE_V, /* variant, should never be accessed */
85
86     AINSTR_END, /* struct */
87     AINSTR_END, /* union  */
88     AINSTR_END, /* array  */
89     AINSTR_END, /* nil    */
90     AINSTR_END, /* noexpr */
91 };
92
93 uint16_t field_store_instr[TYPE_COUNT] = {
94     INSTR_STORE_FLD,
95     INSTR_STORE_FLD,
96     INSTR_STORE_FLD,
97     INSTR_STORE_V,
98     INSTR_STORE_FLD,
99     INSTR_STORE_FLD,
100     INSTR_STORE_FLD,
101     INSTR_STORE_FLD,
102 #if 0
103     INSTR_STORE_FLD, /* integer type */
104 #else
105     INSTR_STORE_FLD,
106 #endif
107
108     INSTR_STORE_V, /* variant, should never be accessed */
109
110     AINSTR_END, /* struct */
111     AINSTR_END, /* union  */
112     AINSTR_END, /* array  */
113     AINSTR_END, /* nil    */
114     AINSTR_END, /* noexpr */
115 };
116
117 uint16_t type_storep_instr[TYPE_COUNT] = {
118     INSTR_STOREP_F, /* should use I when having integer support */
119     INSTR_STOREP_S,
120     INSTR_STOREP_F,
121     INSTR_STOREP_V,
122     INSTR_STOREP_ENT,
123     INSTR_STOREP_FLD,
124     INSTR_STOREP_FNC,
125     INSTR_STOREP_ENT, /* should use I */
126 #if 0
127     INSTR_STOREP_ENT, /* integer type */
128 #else
129     INSTR_STOREP_F,
130 #endif
131
132     INSTR_STOREP_V, /* variant, should never be accessed */
133
134     AINSTR_END, /* struct */
135     AINSTR_END, /* union  */
136     AINSTR_END, /* array  */
137     AINSTR_END, /* nil    */
138     AINSTR_END, /* noexpr */
139 };
140
141 uint16_t type_eq_instr[TYPE_COUNT] = {
142     INSTR_EQ_F, /* should use I when having integer support */
143     INSTR_EQ_S,
144     INSTR_EQ_F,
145     INSTR_EQ_V,
146     INSTR_EQ_E,
147     INSTR_EQ_E, /* FLD has no comparison */
148     INSTR_EQ_FNC,
149     INSTR_EQ_E, /* should use I */
150 #if 0
151     INSTR_EQ_I,
152 #else
153     INSTR_EQ_F,
154 #endif
155
156     INSTR_EQ_V, /* variant, should never be accessed */
157
158     AINSTR_END, /* struct */
159     AINSTR_END, /* union  */
160     AINSTR_END, /* array  */
161     AINSTR_END, /* nil    */
162     AINSTR_END, /* noexpr */
163 };
164
165 uint16_t type_ne_instr[TYPE_COUNT] = {
166     INSTR_NE_F, /* should use I when having integer support */
167     INSTR_NE_S,
168     INSTR_NE_F,
169     INSTR_NE_V,
170     INSTR_NE_E,
171     INSTR_NE_E, /* FLD has no comparison */
172     INSTR_NE_FNC,
173     INSTR_NE_E, /* should use I */
174 #if 0
175     INSTR_NE_I,
176 #else
177     INSTR_NE_F,
178 #endif
179
180     INSTR_NE_V, /* variant, should never be accessed */
181
182     AINSTR_END, /* struct */
183     AINSTR_END, /* union  */
184     AINSTR_END, /* array  */
185     AINSTR_END, /* nil    */
186     AINSTR_END, /* noexpr */
187 };
188
189 uint16_t type_not_instr[TYPE_COUNT] = {
190     INSTR_NOT_F, /* should use I when having integer support */
191     INSTR_NOT_S,
192     INSTR_NOT_F,
193     INSTR_NOT_V,
194     INSTR_NOT_ENT,
195     INSTR_NOT_ENT,
196     INSTR_NOT_FNC,
197     INSTR_NOT_ENT, /* should use I */
198 #if 0
199     INSTR_NOT_I, /* integer type */
200 #else
201     INSTR_NOT_F,
202 #endif
203
204     INSTR_NOT_V, /* variant, should never be accessed */
205
206     AINSTR_END, /* struct */
207     AINSTR_END, /* union  */
208     AINSTR_END, /* array  */
209     AINSTR_END, /* nil    */
210     AINSTR_END, /* noexpr */
211 };
212
213 /* protos */
214 static ir_value* ir_gen_extparam_proto(ir_builder *ir);
215 static void      ir_gen_extparam      (ir_builder *ir);
216
217 /* error functions */
218
219 static void irerror(lex_ctx ctx, const char *msg, ...)
220 {
221     va_list ap;
222     va_start(ap, msg);
223     con_cvprintmsg((void*)&ctx, LVL_ERROR, "internal error", msg, ap);
224     va_end(ap);
225 }
226
227 static bool irwarning(lex_ctx ctx, int warntype, const char *fmt, ...)
228 {
229     bool    r;
230     va_list ap;
231     va_start(ap, fmt);
232     r = vcompile_warning(ctx, warntype, fmt, ap);
233     va_end(ap);
234     return r;
235 }
236
237 /***********************************************************************
238  * Vector utility functions
239  */
240
241 bool GMQCC_WARN vec_ir_value_find(ir_value **vec, const ir_value *what, size_t *idx)
242 {
243     size_t i;
244     size_t len = vec_size(vec);
245     for (i = 0; i < len; ++i) {
246         if (vec[i] == what) {
247             if (idx) *idx = i;
248             return true;
249         }
250     }
251     return false;
252 }
253
254 bool GMQCC_WARN vec_ir_block_find(ir_block **vec, ir_block *what, size_t *idx)
255 {
256     size_t i;
257     size_t len = vec_size(vec);
258     for (i = 0; i < len; ++i) {
259         if (vec[i] == what) {
260             if (idx) *idx = i;
261             return true;
262         }
263     }
264     return false;
265 }
266
267 bool GMQCC_WARN vec_ir_instr_find(ir_instr **vec, ir_instr *what, size_t *idx)
268 {
269     size_t i;
270     size_t len = vec_size(vec);
271     for (i = 0; i < len; ++i) {
272         if (vec[i] == what) {
273             if (idx) *idx = i;
274             return true;
275         }
276     }
277     return false;
278 }
279
280 /***********************************************************************
281  * IR Builder
282  */
283
284 static void ir_block_delete_quick(ir_block* self);
285 static void ir_instr_delete_quick(ir_instr *self);
286 static void ir_function_delete_quick(ir_function *self);
287
288 ir_builder* ir_builder_new(const char *modulename)
289 {
290     ir_builder* self;
291
292     self = (ir_builder*)mem_a(sizeof(*self));
293     if (!self)
294         return NULL;
295
296     self->functions   = NULL;
297     self->globals     = NULL;
298     self->fields      = NULL;
299     self->filenames   = NULL;
300     self->filestrings = NULL;
301     self->htglobals   = util_htnew(IR_HT_SIZE);
302     self->htfields    = util_htnew(IR_HT_SIZE);
303     self->htfunctions = util_htnew(IR_HT_SIZE);
304
305     self->extparams       = NULL;
306     self->extparam_protos = NULL;
307
308     self->first_common_globaltemp = 0;
309     self->max_globaltemps         = 0;
310     self->first_common_local      = 0;
311     self->max_locals              = 0;
312
313     self->str_immediate = 0;
314     self->name = NULL;
315     if (!ir_builder_set_name(self, modulename)) {
316         mem_d(self);
317         return NULL;
318     }
319
320     self->nil = ir_value_var("nil", store_value, TYPE_NIL);
321     self->nil->cvq = CV_CONST;
322
323     return self;
324 }
325
326 void ir_builder_delete(ir_builder* self)
327 {
328     size_t i;
329     util_htdel(self->htglobals);
330     util_htdel(self->htfields);
331     util_htdel(self->htfunctions);
332     mem_d((void*)self->name);
333     for (i = 0; i != vec_size(self->functions); ++i) {
334         ir_function_delete_quick(self->functions[i]);
335     }
336     vec_free(self->functions);
337     for (i = 0; i != vec_size(self->extparams); ++i) {
338         ir_value_delete(self->extparams[i]);
339     }
340     vec_free(self->extparams);
341     for (i = 0; i != vec_size(self->globals); ++i) {
342         ir_value_delete(self->globals[i]);
343     }
344     vec_free(self->globals);
345     for (i = 0; i != vec_size(self->fields); ++i) {
346         ir_value_delete(self->fields[i]);
347     }
348     ir_value_delete(self->nil);
349     vec_free(self->fields);
350     vec_free(self->filenames);
351     vec_free(self->filestrings);
352     mem_d(self);
353 }
354
355 bool ir_builder_set_name(ir_builder *self, const char *name)
356 {
357     if (self->name)
358         mem_d((void*)self->name);
359     self->name = util_strdup(name);
360     return !!self->name;
361 }
362
363 ir_function* ir_builder_get_function(ir_builder *self, const char *name)
364 {
365     return (ir_function*)util_htget(self->htfunctions, name);
366 }
367
368 ir_function* ir_builder_create_function(ir_builder *self, const char *name, int outtype)
369 {
370     ir_function *fn = ir_builder_get_function(self, name);
371     if (fn) {
372         return NULL;
373     }
374
375     fn = ir_function_new(self, outtype);
376     if (!ir_function_set_name(fn, name))
377     {
378         ir_function_delete(fn);
379         return NULL;
380     }
381     vec_push(self->functions, fn);
382     util_htset(self->htfunctions, name, fn);
383
384     fn->value = ir_builder_create_global(self, fn->name, TYPE_FUNCTION);
385     if (!fn->value) {
386         ir_function_delete(fn);
387         return NULL;
388     }
389
390     fn->value->hasvalue = true;
391     fn->value->outtype = outtype;
392     fn->value->constval.vfunc = fn;
393     fn->value->context = fn->context;
394
395     return fn;
396 }
397
398 ir_value* ir_builder_get_global(ir_builder *self, const char *name)
399 {
400     return (ir_value*)util_htget(self->htglobals, name);
401 }
402
403 ir_value* ir_builder_create_global(ir_builder *self, const char *name, int vtype)
404 {
405     ir_value *ve;
406
407     if (name && name[0] != '#')
408     {
409         ve = ir_builder_get_global(self, name);
410         if (ve) {
411             return NULL;
412         }
413     }
414
415     ve = ir_value_var(name, store_global, vtype);
416     vec_push(self->globals, ve);
417     util_htset(self->htglobals, name, ve);
418     return ve;
419 }
420
421 ir_value* ir_builder_get_field(ir_builder *self, const char *name)
422 {
423     return (ir_value*)util_htget(self->htfields, name);
424 }
425
426
427 ir_value* ir_builder_create_field(ir_builder *self, const char *name, int vtype)
428 {
429     ir_value *ve = ir_builder_get_field(self, name);
430     if (ve) {
431         return NULL;
432     }
433
434     ve = ir_value_var(name, store_global, TYPE_FIELD);
435     ve->fieldtype = vtype;
436     vec_push(self->fields, ve);
437     util_htset(self->htfields, name, ve);
438     return ve;
439 }
440
441 /***********************************************************************
442  *IR Function
443  */
444
445 bool ir_function_naive_phi(ir_function*);
446 void ir_function_enumerate(ir_function*);
447 bool ir_function_calculate_liferanges(ir_function*);
448 bool ir_function_allocate_locals(ir_function*);
449
450 ir_function* ir_function_new(ir_builder* owner, int outtype)
451 {
452     ir_function *self;
453     self = (ir_function*)mem_a(sizeof(*self));
454
455     if (!self)
456         return NULL;
457
458     memset(self, 0, sizeof(*self));
459
460     self->name = NULL;
461     if (!ir_function_set_name(self, "<@unnamed>")) {
462         mem_d(self);
463         return NULL;
464     }
465     self->flags = 0;
466
467     self->owner = owner;
468     self->context.file = "<@no context>";
469     self->context.line = 0;
470     self->outtype = outtype;
471     self->value = NULL;
472     self->builtin = 0;
473
474     self->params = NULL;
475     self->blocks = NULL;
476     self->values = NULL;
477     self->locals = NULL;
478
479     self->code_function_def = -1;
480     self->allocated_locals = 0;
481     self->globaltemps      = 0;
482
483     self->run_id = 0;
484     return self;
485 }
486
487 bool ir_function_set_name(ir_function *self, const char *name)
488 {
489     if (self->name)
490         mem_d((void*)self->name);
491     self->name = util_strdup(name);
492     return !!self->name;
493 }
494
495 static void ir_function_delete_quick(ir_function *self)
496 {
497     size_t i;
498     mem_d((void*)self->name);
499
500     for (i = 0; i != vec_size(self->blocks); ++i)
501         ir_block_delete_quick(self->blocks[i]);
502     vec_free(self->blocks);
503
504     vec_free(self->params);
505
506     for (i = 0; i != vec_size(self->values); ++i)
507         ir_value_delete(self->values[i]);
508     vec_free(self->values);
509
510     for (i = 0; i != vec_size(self->locals); ++i)
511         ir_value_delete(self->locals[i]);
512     vec_free(self->locals);
513
514     /* self->value is deleted by the builder */
515
516     mem_d(self);
517 }
518
519 void ir_function_delete(ir_function *self)
520 {
521     size_t i;
522     mem_d((void*)self->name);
523
524     for (i = 0; i != vec_size(self->blocks); ++i)
525         ir_block_delete(self->blocks[i]);
526     vec_free(self->blocks);
527
528     vec_free(self->params);
529
530     for (i = 0; i != vec_size(self->values); ++i)
531         ir_value_delete(self->values[i]);
532     vec_free(self->values);
533
534     for (i = 0; i != vec_size(self->locals); ++i)
535         ir_value_delete(self->locals[i]);
536     vec_free(self->locals);
537
538     /* self->value is deleted by the builder */
539
540     mem_d(self);
541 }
542
543 void ir_function_collect_value(ir_function *self, ir_value *v)
544 {
545     vec_push(self->values, v);
546 }
547
548 ir_block* ir_function_create_block(lex_ctx ctx, ir_function *self, const char *label)
549 {
550     ir_block* bn = ir_block_new(self, label);
551     bn->context = ctx;
552     vec_push(self->blocks, bn);
553     return bn;
554 }
555
556 static bool instr_is_operation(uint16_t op)
557 {
558     return ( (op >= INSTR_MUL_F  && op <= INSTR_GT) ||
559              (op >= INSTR_LOAD_F && op <= INSTR_LOAD_FNC) ||
560              (op == INSTR_ADDRESS) ||
561              (op >= INSTR_NOT_F  && op <= INSTR_NOT_FNC) ||
562              (op >= INSTR_AND    && op <= INSTR_BITOR) ||
563              (op >= INSTR_CALL0  && op <= INSTR_CALL8) );
564 }
565
566 bool ir_function_pass_peephole(ir_function *self)
567 {
568     size_t b;
569
570     for (b = 0; b < vec_size(self->blocks); ++b) {
571         size_t    i;
572         ir_block *block = self->blocks[b];
573
574         for (i = 0; i < vec_size(block->instr); ++i) {
575             ir_instr *inst;
576             inst = block->instr[i];
577
578             if (i >= 1 &&
579                 (inst->opcode >= INSTR_STORE_F &&
580                  inst->opcode <= INSTR_STORE_FNC))
581             {
582                 ir_instr *store;
583                 ir_instr *oper;
584                 ir_value *value;
585
586                 store = inst;
587
588                 oper  = block->instr[i-1];
589                 if (!instr_is_operation(oper->opcode))
590                     continue;
591
592                 value = oper->_ops[0];
593
594                 /* only do it for SSA values */
595                 if (value->store != store_value)
596                     continue;
597
598                 /* don't optimize out the temp if it's used later again */
599                 if (vec_size(value->reads) != 1)
600                     continue;
601
602                 /* The very next store must use this value */
603                 if (value->reads[0] != store)
604                     continue;
605
606                 /* And of course the store must _read_ from it, so it's in
607                  * OP 1 */
608                 if (store->_ops[1] != value)
609                     continue;
610
611                 ++opts_optimizationcount[OPTIM_PEEPHOLE];
612                 (void)!ir_instr_op(oper, 0, store->_ops[0], true);
613
614                 vec_remove(block->instr, i, 1);
615                 ir_instr_delete(store);
616             }
617             else if (inst->opcode == VINSTR_COND)
618             {
619                 /* COND on a value resulting from a NOT could
620                  * remove the NOT and swap its operands
621                  */
622                 while (true) {
623                     ir_block *tmp;
624                     size_t    inotid;
625                     ir_instr *inot;
626                     ir_value *value;
627                     value = inst->_ops[0];
628
629                     if (value->store != store_value ||
630                         vec_size(value->reads) != 1 ||
631                         value->reads[0] != inst)
632                     {
633                         break;
634                     }
635
636                     inot = value->writes[0];
637                     if (inot->_ops[0] != value ||
638                         inot->opcode < INSTR_NOT_F ||
639                         inot->opcode > INSTR_NOT_FNC ||
640                         inot->opcode == INSTR_NOT_V || /* can't do these */
641                         inot->opcode == INSTR_NOT_S)
642                     {
643                         break;
644                     }
645
646                     /* count */
647                     ++opts_optimizationcount[OPTIM_PEEPHOLE];
648                     /* change operand */
649                     (void)!ir_instr_op(inst, 0, inot->_ops[1], false);
650                     /* remove NOT */
651                     tmp = inot->owner;
652                     for (inotid = 0; inotid < vec_size(tmp->instr); ++inotid) {
653                         if (tmp->instr[inotid] == inot)
654                             break;
655                     }
656                     if (inotid >= vec_size(tmp->instr)) {
657                         compile_error(inst->context, "sanity-check failed: failed to find instruction to optimize out");
658                         return false;
659                     }
660                     vec_remove(tmp->instr, inotid, 1);
661                     ir_instr_delete(inot);
662                     /* swap ontrue/onfalse */
663                     tmp = inst->bops[0];
664                     inst->bops[0] = inst->bops[1];
665                     inst->bops[1] = tmp;
666                 }
667                 continue;
668             }
669         }
670     }
671
672     return true;
673 }
674
675 bool ir_function_pass_tailrecursion(ir_function *self)
676 {
677     size_t b, p;
678
679     for (b = 0; b < vec_size(self->blocks); ++b) {
680         ir_value *funcval;
681         ir_instr *ret, *call, *store = NULL;
682         ir_block *block = self->blocks[b];
683
684         if (!block->final || vec_size(block->instr) < 2)
685             continue;
686
687         ret = block->instr[vec_size(block->instr)-1];
688         if (ret->opcode != INSTR_DONE && ret->opcode != INSTR_RETURN)
689             continue;
690
691         call = block->instr[vec_size(block->instr)-2];
692         if (call->opcode >= INSTR_STORE_F && call->opcode <= INSTR_STORE_FNC) {
693             /* account for the unoptimized
694              * CALL
695              * STORE %return, %tmp
696              * RETURN %tmp
697              * version
698              */
699             if (vec_size(block->instr) < 3)
700                 continue;
701
702             store = call;
703             call = block->instr[vec_size(block->instr)-3];
704         }
705
706         if (call->opcode < INSTR_CALL0 || call->opcode > INSTR_CALL8)
707             continue;
708
709         if (store) {
710             /* optimize out the STORE */
711             if (ret->_ops[0]   &&
712                 ret->_ops[0]   == store->_ops[0] &&
713                 store->_ops[1] == call->_ops[0])
714             {
715                 ++opts_optimizationcount[OPTIM_PEEPHOLE];
716                 call->_ops[0] = store->_ops[0];
717                 vec_remove(block->instr, vec_size(block->instr) - 2, 1);
718                 ir_instr_delete(store);
719             }
720             else
721                 continue;
722         }
723
724         if (!call->_ops[0])
725             continue;
726
727         funcval = call->_ops[1];
728         if (!funcval)
729             continue;
730         if (funcval->vtype != TYPE_FUNCTION || funcval->constval.vfunc != self)
731             continue;
732
733         /* now we have a CALL and a RET, check if it's a tailcall */
734         if (ret->_ops[0] && call->_ops[0] != ret->_ops[0])
735             continue;
736
737         ++opts_optimizationcount[OPTIM_TAIL_RECURSION];
738         vec_shrinkby(block->instr, 2);
739
740         block->final = false; /* open it back up */
741
742         /* emite parameter-stores */
743         for (p = 0; p < vec_size(call->params); ++p) {
744             /* assert(call->params_count <= self->locals_count); */
745             if (!ir_block_create_store(block, call->context, self->locals[p], call->params[p])) {
746                 irerror(call->context, "failed to create tailcall store instruction for parameter %i", (int)p);
747                 return false;
748             }
749         }
750         if (!ir_block_create_jump(block, call->context, self->blocks[0])) {
751             irerror(call->context, "failed to create tailcall jump");
752             return false;
753         }
754
755         ir_instr_delete(call);
756         ir_instr_delete(ret);
757     }
758
759     return true;
760 }
761
762 bool ir_function_finalize(ir_function *self)
763 {
764     size_t i;
765
766     if (self->builtin)
767         return true;
768
769     if (OPTS_OPTIMIZATION(OPTIM_PEEPHOLE)) {
770         if (!ir_function_pass_peephole(self)) {
771             irerror(self->context, "generic optimization pass broke something in `%s`", self->name);
772             return false;
773         }
774     }
775
776     if (OPTS_OPTIMIZATION(OPTIM_TAIL_RECURSION)) {
777         if (!ir_function_pass_tailrecursion(self)) {
778             irerror(self->context, "tail-recursion optimization pass broke something in `%s`", self->name);
779             return false;
780         }
781     }
782
783     if (!ir_function_naive_phi(self)) {
784         irerror(self->context, "internal error: ir_function_naive_phi failed");
785         return false;
786     }
787
788     for (i = 0; i < vec_size(self->locals); ++i) {
789         ir_value *v = self->locals[i];
790         if (v->vtype == TYPE_VECTOR ||
791             (v->vtype == TYPE_FIELD && v->outtype == TYPE_VECTOR))
792         {
793             ir_value_vector_member(v, 0);
794             ir_value_vector_member(v, 1);
795             ir_value_vector_member(v, 2);
796         }
797     }
798     for (i = 0; i < vec_size(self->values); ++i) {
799         ir_value *v = self->values[i];
800         if (v->vtype == TYPE_VECTOR ||
801             (v->vtype == TYPE_FIELD && v->outtype == TYPE_VECTOR))
802         {
803             ir_value_vector_member(v, 0);
804             ir_value_vector_member(v, 1);
805             ir_value_vector_member(v, 2);
806         }
807     }
808
809     ir_function_enumerate(self);
810
811     if (!ir_function_calculate_liferanges(self))
812         return false;
813     if (!ir_function_allocate_locals(self))
814         return false;
815     return true;
816 }
817
818 ir_value* ir_function_create_local(ir_function *self, const char *name, int vtype, bool param)
819 {
820     ir_value *ve;
821
822     if (param &&
823         vec_size(self->locals) &&
824         self->locals[vec_size(self->locals)-1]->store != store_param) {
825         irerror(self->context, "cannot add parameters after adding locals");
826         return NULL;
827     }
828
829     ve = ir_value_var(name, (param ? store_param : store_local), vtype);
830     if (param)
831         ve->locked = true;
832     vec_push(self->locals, ve);
833     return ve;
834 }
835
836 /***********************************************************************
837  *IR Block
838  */
839
840 ir_block* ir_block_new(ir_function* owner, const char *name)
841 {
842     ir_block *self;
843     self = (ir_block*)mem_a(sizeof(*self));
844     if (!self)
845         return NULL;
846
847     memset(self, 0, sizeof(*self));
848
849     self->label = NULL;
850     if (name && !ir_block_set_label(self, name)) {
851         mem_d(self);
852         return NULL;
853     }
854     self->owner = owner;
855     self->context.file = "<@no context>";
856     self->context.line = 0;
857     self->final = false;
858
859     self->instr   = NULL;
860     self->entries = NULL;
861     self->exits   = NULL;
862
863     self->eid = 0;
864     self->is_return = false;
865     self->run_id = 0;
866
867     self->living = NULL;
868
869     self->generated = false;
870
871     return self;
872 }
873
874 static void ir_block_delete_quick(ir_block* self)
875 {
876     size_t i;
877     if (self->label) mem_d(self->label);
878     for (i = 0; i != vec_size(self->instr); ++i)
879         ir_instr_delete_quick(self->instr[i]);
880     vec_free(self->instr);
881     vec_free(self->entries);
882     vec_free(self->exits);
883     vec_free(self->living);
884     mem_d(self);
885 }
886
887 void ir_block_delete(ir_block* self)
888 {
889     size_t i;
890     if (self->label) mem_d(self->label);
891     for (i = 0; i != vec_size(self->instr); ++i)
892         ir_instr_delete(self->instr[i]);
893     vec_free(self->instr);
894     vec_free(self->entries);
895     vec_free(self->exits);
896     vec_free(self->living);
897     mem_d(self);
898 }
899
900 bool ir_block_set_label(ir_block *self, const char *name)
901 {
902     if (self->label)
903         mem_d((void*)self->label);
904     self->label = util_strdup(name);
905     return !!self->label;
906 }
907
908 /***********************************************************************
909  *IR Instructions
910  */
911
912 ir_instr* ir_instr_new(lex_ctx ctx, ir_block* owner, int op)
913 {
914     ir_instr *self;
915     self = (ir_instr*)mem_a(sizeof(*self));
916     if (!self)
917         return NULL;
918
919     self->owner = owner;
920     self->context = ctx;
921     self->opcode = op;
922     self->_ops[0] = NULL;
923     self->_ops[1] = NULL;
924     self->_ops[2] = NULL;
925     self->bops[0] = NULL;
926     self->bops[1] = NULL;
927
928     self->phi    = NULL;
929     self->params = NULL;
930
931     self->eid = 0;
932
933     self->likely = true;
934     return self;
935 }
936
937 static void ir_instr_delete_quick(ir_instr *self)
938 {
939     vec_free(self->phi);
940     vec_free(self->params);
941     mem_d(self);
942 }
943
944 void ir_instr_delete(ir_instr *self)
945 {
946     size_t i;
947     /* The following calls can only delete from
948      * vectors, we still want to delete this instruction
949      * so ignore the return value. Since with the warn_unused_result attribute
950      * gcc doesn't care about an explicit: (void)foo(); to ignore the result,
951      * I have to improvise here and use if(foo());
952      */
953     for (i = 0; i < vec_size(self->phi); ++i) {
954         size_t idx;
955         if (vec_ir_instr_find(self->phi[i].value->writes, self, &idx))
956             vec_remove(self->phi[i].value->writes, idx, 1);
957         if (vec_ir_instr_find(self->phi[i].value->reads, self, &idx))
958             vec_remove(self->phi[i].value->reads, idx, 1);
959     }
960     vec_free(self->phi);
961     for (i = 0; i < vec_size(self->params); ++i) {
962         size_t idx;
963         if (vec_ir_instr_find(self->params[i]->writes, self, &idx))
964             vec_remove(self->params[i]->writes, idx, 1);
965         if (vec_ir_instr_find(self->params[i]->reads, self, &idx))
966             vec_remove(self->params[i]->reads, idx, 1);
967     }
968     vec_free(self->params);
969     (void)!ir_instr_op(self, 0, NULL, false);
970     (void)!ir_instr_op(self, 1, NULL, false);
971     (void)!ir_instr_op(self, 2, NULL, false);
972     mem_d(self);
973 }
974
975 bool ir_instr_op(ir_instr *self, int op, ir_value *v, bool writing)
976 {
977     if (self->_ops[op]) {
978         size_t idx;
979         if (writing && vec_ir_instr_find(self->_ops[op]->writes, self, &idx))
980             vec_remove(self->_ops[op]->writes, idx, 1);
981         else if (vec_ir_instr_find(self->_ops[op]->reads, self, &idx))
982             vec_remove(self->_ops[op]->reads, idx, 1);
983     }
984     if (v) {
985         if (writing)
986             vec_push(v->writes, self);
987         else
988             vec_push(v->reads, self);
989     }
990     self->_ops[op] = v;
991     return true;
992 }
993
994 /***********************************************************************
995  *IR Value
996  */
997
998 void ir_value_code_setaddr(ir_value *self, int32_t gaddr)
999 {
1000     self->code.globaladdr = gaddr;
1001     if (self->members[0]) self->members[0]->code.globaladdr = gaddr;
1002     if (self->members[1]) self->members[1]->code.globaladdr = gaddr;
1003     if (self->members[2]) self->members[2]->code.globaladdr = gaddr;
1004 }
1005
1006 int32_t ir_value_code_addr(const ir_value *self)
1007 {
1008     if (self->store == store_return)
1009         return OFS_RETURN + self->code.addroffset;
1010     return self->code.globaladdr + self->code.addroffset;
1011 }
1012
1013 ir_value* ir_value_var(const char *name, int storetype, int vtype)
1014 {
1015     ir_value *self;
1016     self = (ir_value*)mem_a(sizeof(*self));
1017     self->vtype = vtype;
1018     self->fieldtype = TYPE_VOID;
1019     self->outtype = TYPE_VOID;
1020     self->store = storetype;
1021
1022     self->reads  = NULL;
1023     self->writes = NULL;
1024
1025     self->cvq          = CV_NONE;
1026     self->hasvalue     = false;
1027     self->context.file = "<@no context>";
1028     self->context.line = 0;
1029     self->name = NULL;
1030     if (name && !ir_value_set_name(self, name)) {
1031         irerror(self->context, "out of memory");
1032         mem_d(self);
1033         return NULL;
1034     }
1035
1036     memset(&self->constval, 0, sizeof(self->constval));
1037     memset(&self->code,     0, sizeof(self->code));
1038
1039     self->members[0] = NULL;
1040     self->members[1] = NULL;
1041     self->members[2] = NULL;
1042     self->memberof = NULL;
1043
1044     self->unique_life = false;
1045     self->locked      = false;
1046     self->callparam   = false;
1047
1048     self->life = NULL;
1049     return self;
1050 }
1051
1052 ir_value* ir_value_vector_member(ir_value *self, unsigned int member)
1053 {
1054     char     *name;
1055     size_t    len;
1056     ir_value *m;
1057     if (member >= 3)
1058         return NULL;
1059
1060     if (self->members[member])
1061         return self->members[member];
1062
1063     if (self->name) {
1064         len = strlen(self->name);
1065         name = (char*)mem_a(len + 3);
1066         memcpy(name, self->name, len);
1067         name[len+0] = '_';
1068         name[len+1] = 'x' + member;
1069         name[len+2] = '\0';
1070     }
1071     else
1072         name = NULL;
1073
1074     if (self->vtype == TYPE_VECTOR)
1075     {
1076         m = ir_value_var(name, self->store, TYPE_FLOAT);
1077         if (name)
1078             mem_d(name);
1079         if (!m)
1080             return NULL;
1081         m->context = self->context;
1082
1083         self->members[member] = m;
1084         m->code.addroffset = member;
1085     }
1086     else if (self->vtype == TYPE_FIELD)
1087     {
1088         if (self->fieldtype != TYPE_VECTOR)
1089             return NULL;
1090         m = ir_value_var(name, self->store, TYPE_FIELD);
1091         if (name)
1092             mem_d(name);
1093         if (!m)
1094             return NULL;
1095         m->fieldtype = TYPE_FLOAT;
1096         m->context = self->context;
1097
1098         self->members[member] = m;
1099         m->code.addroffset = member;
1100     }
1101     else
1102     {
1103         irerror(self->context, "invalid member access on %s", self->name);
1104         return NULL;
1105     }
1106
1107     m->memberof = self;
1108     return m;
1109 }
1110
1111 static GMQCC_INLINE size_t ir_value_sizeof(const ir_value *self)
1112 {
1113     if (self->vtype == TYPE_FIELD && self->fieldtype == TYPE_VECTOR)
1114         return type_sizeof_[TYPE_VECTOR];
1115     return type_sizeof_[self->vtype];
1116 }
1117
1118 ir_value* ir_value_out(ir_function *owner, const char *name, int storetype, int vtype)
1119 {
1120     ir_value *v = ir_value_var(name, storetype, vtype);
1121     if (!v)
1122         return NULL;
1123     ir_function_collect_value(owner, v);
1124     return v;
1125 }
1126
1127 void ir_value_delete(ir_value* self)
1128 {
1129     size_t i;
1130     if (self->name)
1131         mem_d((void*)self->name);
1132     if (self->hasvalue)
1133     {
1134         if (self->vtype == TYPE_STRING)
1135             mem_d((void*)self->constval.vstring);
1136     }
1137     for (i = 0; i < 3; ++i) {
1138         if (self->members[i])
1139             ir_value_delete(self->members[i]);
1140     }
1141     vec_free(self->reads);
1142     vec_free(self->writes);
1143     vec_free(self->life);
1144     mem_d(self);
1145 }
1146
1147 bool ir_value_set_name(ir_value *self, const char *name)
1148 {
1149     if (self->name)
1150         mem_d((void*)self->name);
1151     self->name = util_strdup(name);
1152     return !!self->name;
1153 }
1154
1155 bool ir_value_set_float(ir_value *self, float f)
1156 {
1157     if (self->vtype != TYPE_FLOAT)
1158         return false;
1159     self->constval.vfloat = f;
1160     self->hasvalue = true;
1161     return true;
1162 }
1163
1164 bool ir_value_set_func(ir_value *self, int f)
1165 {
1166     if (self->vtype != TYPE_FUNCTION)
1167         return false;
1168     self->constval.vint = f;
1169     self->hasvalue = true;
1170     return true;
1171 }
1172
1173 bool ir_value_set_vector(ir_value *self, vector v)
1174 {
1175     if (self->vtype != TYPE_VECTOR)
1176         return false;
1177     self->constval.vvec = v;
1178     self->hasvalue = true;
1179     return true;
1180 }
1181
1182 bool ir_value_set_field(ir_value *self, ir_value *fld)
1183 {
1184     if (self->vtype != TYPE_FIELD)
1185         return false;
1186     self->constval.vpointer = fld;
1187     self->hasvalue = true;
1188     return true;
1189 }
1190
1191 static char *ir_strdup(const char *str)
1192 {
1193     if (str && !*str) {
1194         /* actually dup empty strings */
1195         char *out = (char*)mem_a(1);
1196         *out = 0;
1197         return out;
1198     }
1199     return util_strdup(str);
1200 }
1201
1202 bool ir_value_set_string(ir_value *self, const char *str)
1203 {
1204     if (self->vtype != TYPE_STRING)
1205         return false;
1206     self->constval.vstring = ir_strdup(str);
1207     self->hasvalue = true;
1208     return true;
1209 }
1210
1211 #if 0
1212 bool ir_value_set_int(ir_value *self, int i)
1213 {
1214     if (self->vtype != TYPE_INTEGER)
1215         return false;
1216     self->constval.vint = i;
1217     self->hasvalue = true;
1218     return true;
1219 }
1220 #endif
1221
1222 bool ir_value_lives(ir_value *self, size_t at)
1223 {
1224     size_t i;
1225     for (i = 0; i < vec_size(self->life); ++i)
1226     {
1227         ir_life_entry_t *life = &self->life[i];
1228         if (life->start <= at && at <= life->end)
1229             return true;
1230         if (life->start > at) /* since it's ordered */
1231             return false;
1232     }
1233     return false;
1234 }
1235
1236 bool ir_value_life_insert(ir_value *self, size_t idx, ir_life_entry_t e)
1237 {
1238     size_t k;
1239     vec_push(self->life, e);
1240     for (k = vec_size(self->life)-1; k > idx; --k)
1241         self->life[k] = self->life[k-1];
1242     self->life[idx] = e;
1243     return true;
1244 }
1245
1246 bool ir_value_life_merge(ir_value *self, size_t s)
1247 {
1248     size_t i;
1249     ir_life_entry_t *life = NULL;
1250     ir_life_entry_t *before = NULL;
1251     ir_life_entry_t new_entry;
1252
1253     /* Find the first range >= s */
1254     for (i = 0; i < vec_size(self->life); ++i)
1255     {
1256         before = life;
1257         life = &self->life[i];
1258         if (life->start > s)
1259             break;
1260     }
1261     /* nothing found? append */
1262     if (i == vec_size(self->life)) {
1263         ir_life_entry_t e;
1264         if (life && life->end+1 == s)
1265         {
1266             /* previous life range can be merged in */
1267             life->end++;
1268             return true;
1269         }
1270         if (life && life->end >= s)
1271             return false;
1272         e.start = e.end = s;
1273         vec_push(self->life, e);
1274         return true;
1275     }
1276     /* found */
1277     if (before)
1278     {
1279         if (before->end + 1 == s &&
1280             life->start - 1 == s)
1281         {
1282             /* merge */
1283             before->end = life->end;
1284             vec_remove(self->life, i, 1);
1285             return true;
1286         }
1287         if (before->end + 1 == s)
1288         {
1289             /* extend before */
1290             before->end++;
1291             return true;
1292         }
1293         /* already contained */
1294         if (before->end >= s)
1295             return false;
1296     }
1297     /* extend */
1298     if (life->start - 1 == s)
1299     {
1300         life->start--;
1301         return true;
1302     }
1303     /* insert a new entry */
1304     new_entry.start = new_entry.end = s;
1305     return ir_value_life_insert(self, i, new_entry);
1306 }
1307
1308 bool ir_value_life_merge_into(ir_value *self, const ir_value *other)
1309 {
1310     size_t i, myi;
1311
1312     if (!vec_size(other->life))
1313         return true;
1314
1315     if (!vec_size(self->life)) {
1316         size_t count = vec_size(other->life);
1317         ir_life_entry_t *life = vec_add(self->life, count);
1318         memcpy(life, other->life, count * sizeof(*life));
1319         return true;
1320     }
1321
1322     myi = 0;
1323     for (i = 0; i < vec_size(other->life); ++i)
1324     {
1325         const ir_life_entry_t *life = &other->life[i];
1326         while (true)
1327         {
1328             ir_life_entry_t *entry = &self->life[myi];
1329
1330             if (life->end+1 < entry->start)
1331             {
1332                 /* adding an interval before entry */
1333                 if (!ir_value_life_insert(self, myi, *life))
1334                     return false;
1335                 ++myi;
1336                 break;
1337             }
1338
1339             if (life->start <  entry->start &&
1340                 life->end+1 >= entry->start)
1341             {
1342                 /* starts earlier and overlaps */
1343                 entry->start = life->start;
1344             }
1345
1346             if (life->end   >  entry->end &&
1347                 life->start <= entry->end+1)
1348             {
1349                 /* ends later and overlaps */
1350                 entry->end = life->end;
1351             }
1352
1353             /* see if our change combines it with the next ranges */
1354             while (myi+1 < vec_size(self->life) &&
1355                    entry->end+1 >= self->life[1+myi].start)
1356             {
1357                 /* overlaps with (myi+1) */
1358                 if (entry->end < self->life[1+myi].end)
1359                     entry->end = self->life[1+myi].end;
1360                 vec_remove(self->life, myi+1, 1);
1361                 entry = &self->life[myi];
1362             }
1363
1364             /* see if we're after the entry */
1365             if (life->start > entry->end)
1366             {
1367                 ++myi;
1368                 /* append if we're at the end */
1369                 if (myi >= vec_size(self->life)) {
1370                     vec_push(self->life, *life);
1371                     break;
1372                 }
1373                 /* otherweise check the next range */
1374                 continue;
1375             }
1376             break;
1377         }
1378     }
1379     return true;
1380 }
1381
1382 bool ir_values_overlap(const ir_value *a, const ir_value *b)
1383 {
1384     /* For any life entry in A see if it overlaps with
1385      * any life entry in B.
1386      * Note that the life entries are orderes, so we can make a
1387      * more efficient algorithm there than naively translating the
1388      * statement above.
1389      */
1390
1391     ir_life_entry_t *la, *lb, *enda, *endb;
1392
1393     /* first of all, if either has no life range, they cannot clash */
1394     if (!vec_size(a->life) || !vec_size(b->life))
1395         return false;
1396
1397     la = a->life;
1398     lb = b->life;
1399     enda = la + vec_size(a->life);
1400     endb = lb + vec_size(b->life);
1401     while (true)
1402     {
1403         /* check if the entries overlap, for that,
1404          * both must start before the other one ends.
1405          */
1406         if (la->start < lb->end &&
1407             lb->start < la->end)
1408         {
1409             return true;
1410         }
1411
1412         /* entries are ordered
1413          * one entry is earlier than the other
1414          * that earlier entry will be moved forward
1415          */
1416         if (la->start < lb->start)
1417         {
1418             /* order: A B, move A forward
1419              * check if we hit the end with A
1420              */
1421             if (++la == enda)
1422                 break;
1423         }
1424         else /* if (lb->start < la->start)  actually <= */
1425         {
1426             /* order: B A, move B forward
1427              * check if we hit the end with B
1428              */
1429             if (++lb == endb)
1430                 break;
1431         }
1432     }
1433     return false;
1434 }
1435
1436 /***********************************************************************
1437  *IR main operations
1438  */
1439
1440 static bool ir_check_unreachable(ir_block *self)
1441 {
1442     /* The IR should never have to deal with unreachable code */
1443     if (!self->final/* || OPTS_FLAG(ALLOW_UNREACHABLE_CODE)*/)
1444         return true;
1445     irerror(self->context, "unreachable statement (%s)", self->label);
1446     return false;
1447 }
1448
1449 bool ir_block_create_store_op(ir_block *self, lex_ctx ctx, int op, ir_value *target, ir_value *what)
1450 {
1451     ir_instr *in;
1452     if (!ir_check_unreachable(self))
1453         return false;
1454
1455     if (target->store == store_value &&
1456         (op < INSTR_STOREP_F || op > INSTR_STOREP_FNC))
1457     {
1458         irerror(self->context, "cannot store to an SSA value");
1459         irerror(self->context, "trying to store: %s <- %s", target->name, what->name);
1460         irerror(self->context, "instruction: %s", asm_instr[op].m);
1461         return false;
1462     }
1463
1464     in = ir_instr_new(ctx, self, op);
1465     if (!in)
1466         return false;
1467
1468     if (!ir_instr_op(in, 0, target, (op < INSTR_STOREP_F || op > INSTR_STOREP_FNC)) ||
1469         !ir_instr_op(in, 1, what, false))
1470     {
1471         ir_instr_delete(in);
1472         return false;
1473     }
1474     vec_push(self->instr, in);
1475     return true;
1476 }
1477
1478 bool ir_block_create_store(ir_block *self, lex_ctx ctx, ir_value *target, ir_value *what)
1479 {
1480     int op = 0;
1481     int vtype;
1482     if (target->vtype == TYPE_VARIANT)
1483         vtype = what->vtype;
1484     else
1485         vtype = target->vtype;
1486
1487 #if 0
1488     if      (vtype == TYPE_FLOAT   && what->vtype == TYPE_INTEGER)
1489         op = INSTR_CONV_ITOF;
1490     else if (vtype == TYPE_INTEGER && what->vtype == TYPE_FLOAT)
1491         op = INSTR_CONV_FTOI;
1492 #endif
1493         op = type_store_instr[vtype];
1494
1495     if (OPTS_FLAG(ADJUST_VECTOR_FIELDS)) {
1496         if (op == INSTR_STORE_FLD && what->fieldtype == TYPE_VECTOR)
1497             op = INSTR_STORE_V;
1498     }
1499
1500     return ir_block_create_store_op(self, ctx, op, target, what);
1501 }
1502
1503 bool ir_block_create_storep(ir_block *self, lex_ctx ctx, ir_value *target, ir_value *what)
1504 {
1505     int op = 0;
1506     int vtype;
1507
1508     if (target->vtype != TYPE_POINTER)
1509         return false;
1510
1511     /* storing using pointer - target is a pointer, type must be
1512      * inferred from source
1513      */
1514     vtype = what->vtype;
1515
1516     op = type_storep_instr[vtype];
1517     if (OPTS_FLAG(ADJUST_VECTOR_FIELDS)) {
1518         if (op == INSTR_STOREP_FLD && what->fieldtype == TYPE_VECTOR)
1519             op = INSTR_STOREP_V;
1520     }
1521
1522     return ir_block_create_store_op(self, ctx, op, target, what);
1523 }
1524
1525 bool ir_block_create_return(ir_block *self, lex_ctx ctx, ir_value *v)
1526 {
1527     ir_instr *in;
1528     if (!ir_check_unreachable(self))
1529         return false;
1530     self->final = true;
1531     self->is_return = true;
1532     in = ir_instr_new(ctx, self, INSTR_RETURN);
1533     if (!in)
1534         return false;
1535
1536     if (v && !ir_instr_op(in, 0, v, false)) {
1537         ir_instr_delete(in);
1538         return false;
1539     }
1540
1541     vec_push(self->instr, in);
1542     return true;
1543 }
1544
1545 bool ir_block_create_if(ir_block *self, lex_ctx ctx, ir_value *v,
1546                         ir_block *ontrue, ir_block *onfalse)
1547 {
1548     ir_instr *in;
1549     if (!ir_check_unreachable(self))
1550         return false;
1551     self->final = true;
1552     /*in = ir_instr_new(ctx, self, (v->vtype == TYPE_STRING ? INSTR_IF_S : INSTR_IF_F));*/
1553     in = ir_instr_new(ctx, self, VINSTR_COND);
1554     if (!in)
1555         return false;
1556
1557     if (!ir_instr_op(in, 0, v, false)) {
1558         ir_instr_delete(in);
1559         return false;
1560     }
1561
1562     in->bops[0] = ontrue;
1563     in->bops[1] = onfalse;
1564
1565     vec_push(self->instr, in);
1566
1567     vec_push(self->exits, ontrue);
1568     vec_push(self->exits, onfalse);
1569     vec_push(ontrue->entries,  self);
1570     vec_push(onfalse->entries, self);
1571     return true;
1572 }
1573
1574 bool ir_block_create_jump(ir_block *self, lex_ctx ctx, ir_block *to)
1575 {
1576     ir_instr *in;
1577     if (!ir_check_unreachable(self))
1578         return false;
1579     self->final = true;
1580     in = ir_instr_new(ctx, self, VINSTR_JUMP);
1581     if (!in)
1582         return false;
1583
1584     in->bops[0] = to;
1585     vec_push(self->instr, in);
1586
1587     vec_push(self->exits, to);
1588     vec_push(to->entries, self);
1589     return true;
1590 }
1591
1592 bool ir_block_create_goto(ir_block *self, lex_ctx ctx, ir_block *to)
1593 {
1594     self->owner->flags |= IR_FLAG_HAS_GOTO;
1595     return ir_block_create_jump(self, ctx, to);
1596 }
1597
1598 ir_instr* ir_block_create_phi(ir_block *self, lex_ctx ctx, const char *label, int ot)
1599 {
1600     ir_value *out;
1601     ir_instr *in;
1602     if (!ir_check_unreachable(self))
1603         return NULL;
1604     in = ir_instr_new(ctx, self, VINSTR_PHI);
1605     if (!in)
1606         return NULL;
1607     out = ir_value_out(self->owner, label, store_value, ot);
1608     if (!out) {
1609         ir_instr_delete(in);
1610         return NULL;
1611     }
1612     if (!ir_instr_op(in, 0, out, true)) {
1613         ir_instr_delete(in);
1614         ir_value_delete(out);
1615         return NULL;
1616     }
1617     vec_push(self->instr, in);
1618     return in;
1619 }
1620
1621 ir_value* ir_phi_value(ir_instr *self)
1622 {
1623     return self->_ops[0];
1624 }
1625
1626 void ir_phi_add(ir_instr* self, ir_block *b, ir_value *v)
1627 {
1628     ir_phi_entry_t pe;
1629
1630     if (!vec_ir_block_find(self->owner->entries, b, NULL)) {
1631         /* Must not be possible to cause this, otherwise the AST
1632          * is doing something wrong.
1633          */
1634         irerror(self->context, "Invalid entry block for PHI");
1635         abort();
1636     }
1637
1638     pe.value = v;
1639     pe.from = b;
1640     vec_push(v->reads, self);
1641     vec_push(self->phi, pe);
1642 }
1643
1644 /* call related code */
1645 ir_instr* ir_block_create_call(ir_block *self, lex_ctx ctx, const char *label, ir_value *func, bool noreturn)
1646 {
1647     ir_value *out;
1648     ir_instr *in;
1649     if (!ir_check_unreachable(self))
1650         return NULL;
1651     in = ir_instr_new(ctx, self, (noreturn ? VINSTR_NRCALL : INSTR_CALL0));
1652     if (!in)
1653         return NULL;
1654     if (noreturn) {
1655         self->final = true;
1656         self->is_return = true;
1657     }
1658     out = ir_value_out(self->owner, label, (func->outtype == TYPE_VOID) ? store_return : store_value, func->outtype);
1659     if (!out) {
1660         ir_instr_delete(in);
1661         return NULL;
1662     }
1663     if (!ir_instr_op(in, 0, out, true) ||
1664         !ir_instr_op(in, 1, func, false))
1665     {
1666         ir_instr_delete(in);
1667         ir_value_delete(out);
1668         return NULL;
1669     }
1670     vec_push(self->instr, in);
1671     /*
1672     if (noreturn) {
1673         if (!ir_block_create_return(self, ctx, NULL)) {
1674             compile_error(ctx, "internal error: failed to generate dummy-return instruction");
1675             ir_instr_delete(in);
1676             return NULL;
1677         }
1678     }
1679     */
1680     return in;
1681 }
1682
1683 ir_value* ir_call_value(ir_instr *self)
1684 {
1685     return self->_ops[0];
1686 }
1687
1688 void ir_call_param(ir_instr* self, ir_value *v)
1689 {
1690     vec_push(self->params, v);
1691     vec_push(v->reads, self);
1692 }
1693
1694 /* binary op related code */
1695
1696 ir_value* ir_block_create_binop(ir_block *self, lex_ctx ctx,
1697                                 const char *label, int opcode,
1698                                 ir_value *left, ir_value *right)
1699 {
1700     int ot = TYPE_VOID;
1701     switch (opcode) {
1702         case INSTR_ADD_F:
1703         case INSTR_SUB_F:
1704         case INSTR_DIV_F:
1705         case INSTR_MUL_F:
1706         case INSTR_MUL_V:
1707         case INSTR_AND:
1708         case INSTR_OR:
1709 #if 0
1710         case INSTR_AND_I:
1711         case INSTR_AND_IF:
1712         case INSTR_AND_FI:
1713         case INSTR_OR_I:
1714         case INSTR_OR_IF:
1715         case INSTR_OR_FI:
1716 #endif
1717         case INSTR_BITAND:
1718         case INSTR_BITOR:
1719 #if 0
1720         case INSTR_SUB_S: /* -- offset of string as float */
1721         case INSTR_MUL_IF:
1722         case INSTR_MUL_FI:
1723         case INSTR_DIV_IF:
1724         case INSTR_DIV_FI:
1725         case INSTR_BITOR_IF:
1726         case INSTR_BITOR_FI:
1727         case INSTR_BITAND_FI:
1728         case INSTR_BITAND_IF:
1729         case INSTR_EQ_I:
1730         case INSTR_NE_I:
1731 #endif
1732             ot = TYPE_FLOAT;
1733             break;
1734 #if 0
1735         case INSTR_ADD_I:
1736         case INSTR_ADD_IF:
1737         case INSTR_ADD_FI:
1738         case INSTR_SUB_I:
1739         case INSTR_SUB_FI:
1740         case INSTR_SUB_IF:
1741         case INSTR_MUL_I:
1742         case INSTR_DIV_I:
1743         case INSTR_BITAND_I:
1744         case INSTR_BITOR_I:
1745         case INSTR_XOR_I:
1746         case INSTR_RSHIFT_I:
1747         case INSTR_LSHIFT_I:
1748             ot = TYPE_INTEGER;
1749             break;
1750 #endif
1751         case INSTR_ADD_V:
1752         case INSTR_SUB_V:
1753         case INSTR_MUL_VF:
1754         case INSTR_MUL_FV:
1755 #if 0
1756         case INSTR_DIV_VF:
1757         case INSTR_MUL_IV:
1758         case INSTR_MUL_VI:
1759 #endif
1760             ot = TYPE_VECTOR;
1761             break;
1762 #if 0
1763         case INSTR_ADD_SF:
1764             ot = TYPE_POINTER;
1765             break;
1766 #endif
1767         default:
1768             /* ranges: */
1769             /* boolean operations result in floats */
1770             if (opcode >= INSTR_EQ_F && opcode <= INSTR_GT)
1771                 ot = TYPE_FLOAT;
1772             else if (opcode >= INSTR_LE && opcode <= INSTR_GT)
1773                 ot = TYPE_FLOAT;
1774 #if 0
1775             else if (opcode >= INSTR_LE_I && opcode <= INSTR_EQ_FI)
1776                 ot = TYPE_FLOAT;
1777 #endif
1778             break;
1779     };
1780     if (ot == TYPE_VOID) {
1781         /* The AST or parser were supposed to check this! */
1782         return NULL;
1783     }
1784
1785     return ir_block_create_general_instr(self, ctx, label, opcode, left, right, ot);
1786 }
1787
1788 ir_value* ir_block_create_unary(ir_block *self, lex_ctx ctx,
1789                                 const char *label, int opcode,
1790                                 ir_value *operand)
1791 {
1792     int ot = TYPE_FLOAT;
1793     switch (opcode) {
1794         case INSTR_NOT_F:
1795         case INSTR_NOT_V:
1796         case INSTR_NOT_S:
1797         case INSTR_NOT_ENT:
1798         case INSTR_NOT_FNC:
1799 #if 0
1800         case INSTR_NOT_I:
1801 #endif
1802             ot = TYPE_FLOAT;
1803             break;
1804         /* QC doesn't have other unary operations. We expect extensions to fill
1805          * the above list, otherwise we assume out-type = in-type, eg for an
1806          * unary minus
1807          */
1808         default:
1809             ot = operand->vtype;
1810             break;
1811     };
1812     if (ot == TYPE_VOID) {
1813         /* The AST or parser were supposed to check this! */
1814         return NULL;
1815     }
1816
1817     /* let's use the general instruction creator and pass NULL for OPB */
1818     return ir_block_create_general_instr(self, ctx, label, opcode, operand, NULL, ot);
1819 }
1820
1821 ir_value* ir_block_create_general_instr(ir_block *self, lex_ctx ctx, const char *label,
1822                                         int op, ir_value *a, ir_value *b, int outype)
1823 {
1824     ir_instr *instr;
1825     ir_value *out;
1826
1827     out = ir_value_out(self->owner, label, store_value, outype);
1828     if (!out)
1829         return NULL;
1830
1831     instr = ir_instr_new(ctx, self, op);
1832     if (!instr) {
1833         ir_value_delete(out);
1834         return NULL;
1835     }
1836
1837     if (!ir_instr_op(instr, 0, out, true) ||
1838         !ir_instr_op(instr, 1, a, false) ||
1839         !ir_instr_op(instr, 2, b, false) )
1840     {
1841         goto on_error;
1842     }
1843
1844     vec_push(self->instr, instr);
1845
1846     return out;
1847 on_error:
1848     ir_instr_delete(instr);
1849     ir_value_delete(out);
1850     return NULL;
1851 }
1852
1853 ir_value* ir_block_create_fieldaddress(ir_block *self, lex_ctx ctx, const char *label, ir_value *ent, ir_value *field)
1854 {
1855     ir_value *v;
1856
1857     /* Support for various pointer types todo if so desired */
1858     if (ent->vtype != TYPE_ENTITY)
1859         return NULL;
1860
1861     if (field->vtype != TYPE_FIELD)
1862         return NULL;
1863
1864     v = ir_block_create_general_instr(self, ctx, label, INSTR_ADDRESS, ent, field, TYPE_POINTER);
1865     v->fieldtype = field->fieldtype;
1866     return v;
1867 }
1868
1869 ir_value* ir_block_create_load_from_ent(ir_block *self, lex_ctx ctx, const char *label, ir_value *ent, ir_value *field, int outype)
1870 {
1871     int op;
1872     if (ent->vtype != TYPE_ENTITY)
1873         return NULL;
1874
1875     /* at some point we could redirect for TYPE_POINTER... but that could lead to carelessness */
1876     if (field->vtype != TYPE_FIELD)
1877         return NULL;
1878
1879     switch (outype)
1880     {
1881         case TYPE_FLOAT:    op = INSTR_LOAD_F;   break;
1882         case TYPE_VECTOR:   op = INSTR_LOAD_V;   break;
1883         case TYPE_STRING:   op = INSTR_LOAD_S;   break;
1884         case TYPE_FIELD:    op = INSTR_LOAD_FLD; break;
1885         case TYPE_ENTITY:   op = INSTR_LOAD_ENT; break;
1886         case TYPE_FUNCTION: op = INSTR_LOAD_FNC; break;
1887 #if 0
1888         case TYPE_POINTER: op = INSTR_LOAD_I;   break;
1889         case TYPE_INTEGER: op = INSTR_LOAD_I;   break;
1890 #endif
1891         default:
1892             irerror(self->context, "invalid type for ir_block_create_load_from_ent: %s", type_name[outype]);
1893             return NULL;
1894     }
1895
1896     return ir_block_create_general_instr(self, ctx, label, op, ent, field, outype);
1897 }
1898
1899 /* PHI resolving breaks the SSA, and must thus be the last
1900  * step before life-range calculation.
1901  */
1902
1903 static bool ir_block_naive_phi(ir_block *self);
1904 bool ir_function_naive_phi(ir_function *self)
1905 {
1906     size_t i;
1907
1908     for (i = 0; i < vec_size(self->blocks); ++i)
1909     {
1910         if (!ir_block_naive_phi(self->blocks[i]))
1911             return false;
1912     }
1913     return true;
1914 }
1915
1916 #if 0
1917 static bool ir_naive_phi_emit_store(ir_block *block, size_t iid, ir_value *old, ir_value *what)
1918 {
1919     ir_instr *instr;
1920     size_t i;
1921
1922     /* create a store */
1923     if (!ir_block_create_store(block, old, what))
1924         return false;
1925
1926     /* we now move it up */
1927     instr = vec_last(block->instr);
1928     for (i = vec_size(block->instr)-1; i > iid; --i)
1929         block->instr[i] = block->instr[i-1];
1930     block->instr[i] = instr;
1931
1932     return true;
1933 }
1934 #endif
1935
1936 static bool ir_block_naive_phi(ir_block *self)
1937 {
1938     size_t i, p; /*, w;*/
1939     /* FIXME: optionally, create_phi can add the phis
1940      * to a list so we don't need to loop through blocks
1941      * - anyway: "don't optimize YET"
1942      */
1943     for (i = 0; i < vec_size(self->instr); ++i)
1944     {
1945         ir_instr *instr = self->instr[i];
1946         if (instr->opcode != VINSTR_PHI)
1947             continue;
1948
1949         vec_remove(self->instr, i, 1);
1950         --i; /* NOTE: i+1 below */
1951
1952         for (p = 0; p < vec_size(instr->phi); ++p)
1953         {
1954             ir_value *v = instr->phi[p].value;
1955             ir_block *b = instr->phi[p].from;
1956
1957             if (v->store == store_value &&
1958                 vec_size(v->reads) == 1 &&
1959                 vec_size(v->writes) == 1)
1960             {
1961                 /* replace the value */
1962                 if (!ir_instr_op(v->writes[0], 0, instr->_ops[0], true))
1963                     return false;
1964             }
1965             else
1966             {
1967                 /* force a move instruction */
1968                 ir_instr *prevjump = vec_last(b->instr);
1969                 vec_pop(b->instr);
1970                 b->final = false;
1971                 instr->_ops[0]->store = store_global;
1972                 if (!ir_block_create_store(b, instr->context, instr->_ops[0], v))
1973                     return false;
1974                 instr->_ops[0]->store = store_value;
1975                 vec_push(b->instr, prevjump);
1976                 b->final = true;
1977             }
1978
1979 #if 0
1980             ir_value *v = instr->phi[p].value;
1981             for (w = 0; w < vec_size(v->writes); ++w) {
1982                 ir_value *old;
1983
1984                 if (!v->writes[w]->_ops[0])
1985                     continue;
1986
1987                 /* When the write was to a global, we have to emit a mov */
1988                 old = v->writes[w]->_ops[0];
1989
1990                 /* The original instruction now writes to the PHI target local */
1991                 if (v->writes[w]->_ops[0] == v)
1992                     v->writes[w]->_ops[0] = instr->_ops[0];
1993
1994                 if (old->store != store_value && old->store != store_local && old->store != store_param)
1995                 {
1996                     /* If it originally wrote to a global we need to store the value
1997                      * there as welli
1998                      */
1999                     if (!ir_naive_phi_emit_store(self, i+1, old, v))
2000                         return false;
2001                     if (i+1 < vec_size(self->instr))
2002                         instr = self->instr[i+1];
2003                     else
2004                         instr = NULL;
2005                     /* In case I forget and access instr later, it'll be NULL
2006                      * when it's a problem, to make sure we crash, rather than accessing
2007                      * invalid data.
2008                      */
2009                 }
2010                 else
2011                 {
2012                     /* If it didn't, we can replace all reads by the phi target now. */
2013                     size_t r;
2014                     for (r = 0; r < vec_size(old->reads); ++r)
2015                     {
2016                         size_t op;
2017                         ir_instr *ri = old->reads[r];
2018                         for (op = 0; op < vec_size(ri->phi); ++op) {
2019                             if (ri->phi[op].value == old)
2020                                 ri->phi[op].value = v;
2021                         }
2022                         for (op = 0; op < 3; ++op) {
2023                             if (ri->_ops[op] == old)
2024                                 ri->_ops[op] = v;
2025                         }
2026                     }
2027                 }
2028             }
2029 #endif
2030         }
2031         ir_instr_delete(instr);
2032     }
2033     return true;
2034 }
2035
2036 /***********************************************************************
2037  *IR Temp allocation code
2038  * Propagating value life ranges by walking through the function backwards
2039  * until no more changes are made.
2040  * In theory this should happen once more than once for every nested loop
2041  * level.
2042  * Though this implementation might run an additional time for if nests.
2043  */
2044
2045 /* Enumerate instructions used by value's life-ranges
2046  */
2047 static void ir_block_enumerate(ir_block *self, size_t *_eid)
2048 {
2049     size_t i;
2050     size_t eid = *_eid;
2051     for (i = 0; i < vec_size(self->instr); ++i)
2052     {
2053         self->instr[i]->eid = eid++;
2054     }
2055     *_eid = eid;
2056 }
2057
2058 /* Enumerate blocks and instructions.
2059  * The block-enumeration is unordered!
2060  * We do not really use the block enumreation, however
2061  * the instruction enumeration is important for life-ranges.
2062  */
2063 void ir_function_enumerate(ir_function *self)
2064 {
2065     size_t i;
2066     size_t instruction_id = 1;
2067     for (i = 0; i < vec_size(self->blocks); ++i)
2068     {
2069         self->blocks[i]->eid = i;
2070         self->blocks[i]->run_id = 0;
2071         ir_block_enumerate(self->blocks[i], &instruction_id);
2072     }
2073 }
2074
2075 static bool ir_block_life_propagate(ir_block *b, ir_block *prev, bool *changed);
2076 bool ir_function_calculate_liferanges(ir_function *self)
2077 {
2078     size_t i, s;
2079     bool changed;
2080
2081     /* parameters live at 0 */
2082     for (i = 0; i < vec_size(self->params); ++i)
2083         ir_value_life_merge(self->locals[i], 0);
2084
2085     do {
2086         self->run_id++;
2087         changed = false;
2088         for (i = 0; i != vec_size(self->blocks); ++i)
2089         {
2090             if (self->blocks[i]->is_return)
2091             {
2092                 vec_free(self->blocks[i]->living);
2093                 if (!ir_block_life_propagate(self->blocks[i], NULL, &changed))
2094                     return false;
2095             }
2096         }
2097     } while (changed);
2098     if (vec_size(self->blocks)) {
2099         ir_block *block = self->blocks[0];
2100         for (i = 0; i < vec_size(block->living); ++i) {
2101             ir_value *v = block->living[i];
2102             if (v->store != store_local)
2103                 continue;
2104             if (v->vtype == TYPE_VECTOR)
2105                 continue;
2106             self->flags |= IR_FLAG_HAS_UNINITIALIZED;
2107             /* find the instruction reading from it */
2108             for (s = 0; s < vec_size(v->reads); ++s) {
2109                 if (v->reads[s]->eid == v->life[0].end)
2110                     break;
2111             }
2112             if (s < vec_size(v->reads)) {
2113                 if (irwarning(v->context, WARN_USED_UNINITIALIZED,
2114                               "variable `%s` may be used uninitialized in this function\n"
2115                               " -> %s:%i",
2116                               v->name,
2117                               v->reads[s]->context.file, v->reads[s]->context.line)
2118                    )
2119                 {
2120                     return false;
2121                 }
2122                 continue;
2123             }
2124             if (v->memberof) {
2125                 ir_value *vec = v->memberof;
2126                 for (s = 0; s < vec_size(vec->reads); ++s) {
2127                     if (vec->reads[s]->eid == v->life[0].end)
2128                         break;
2129                 }
2130                 if (s < vec_size(vec->reads)) {
2131                     if (irwarning(v->context, WARN_USED_UNINITIALIZED,
2132                                   "variable `%s` may be used uninitialized in this function\n"
2133                                   " -> %s:%i",
2134                                   v->name,
2135                                   vec->reads[s]->context.file, vec->reads[s]->context.line)
2136                        )
2137                     {
2138                         return false;
2139                     }
2140                     continue;
2141                 }
2142             }
2143             if (irwarning(v->context, WARN_USED_UNINITIALIZED,
2144                           "variable `%s` may be used uninitialized in this function", v->name))
2145             {
2146                 return false;
2147             }
2148         }
2149     }
2150     return true;
2151 }
2152
2153 /* Local-value allocator
2154  * After finishing creating the liferange of all values used in a function
2155  * we can allocate their global-positions.
2156  * This is the counterpart to register-allocation in register machines.
2157  */
2158 typedef struct {
2159     ir_value **locals;
2160     size_t    *sizes;
2161     size_t    *positions;
2162     bool      *unique;
2163 } function_allocator;
2164
2165 static bool function_allocator_alloc(function_allocator *alloc, ir_value *var)
2166 {
2167     ir_value *slot;
2168     size_t vsize = ir_value_sizeof(var);
2169
2170     var->code.local = vec_size(alloc->locals);
2171
2172     slot = ir_value_var("reg", store_global, var->vtype);
2173     if (!slot)
2174         return false;
2175
2176     if (!ir_value_life_merge_into(slot, var))
2177         goto localerror;
2178
2179     vec_push(alloc->locals, slot);
2180     vec_push(alloc->sizes, vsize);
2181     vec_push(alloc->unique, var->unique_life);
2182
2183     return true;
2184
2185 localerror:
2186     ir_value_delete(slot);
2187     return false;
2188 }
2189
2190 static bool ir_function_allocator_assign(ir_function *self, function_allocator *alloc, ir_value *v)
2191 {
2192     size_t a;
2193     ir_value *slot;
2194
2195     for (a = 0; a < vec_size(alloc->locals); ++a)
2196     {
2197         /* if it's reserved for a unique liferange: skip */
2198         if (alloc->unique[a])
2199             continue;
2200
2201         slot = alloc->locals[a];
2202
2203         /* never resize parameters
2204          * will be required later when overlapping temps + locals
2205          */
2206         if (a < vec_size(self->params) &&
2207             alloc->sizes[a] < ir_value_sizeof(v))
2208         {
2209             continue;
2210         }
2211
2212         if (ir_values_overlap(v, slot))
2213             continue;
2214
2215         if (!ir_value_life_merge_into(slot, v))
2216             return false;
2217
2218         /* adjust size for this slot */
2219         if (alloc->sizes[a] < ir_value_sizeof(v))
2220             alloc->sizes[a] = ir_value_sizeof(v);
2221
2222         v->code.local = a;
2223         return true;
2224     }
2225     if (a >= vec_size(alloc->locals)) {
2226         if (!function_allocator_alloc(alloc, v))
2227             return false;
2228     }
2229     return true;
2230 }
2231
2232 bool ir_function_allocate_locals(ir_function *self)
2233 {
2234     size_t i;
2235     bool   retval = true;
2236     size_t pos;
2237     bool   opt_gt = OPTS_OPTIMIZATION(OPTIM_GLOBAL_TEMPS);
2238
2239     ir_value *v;
2240
2241     function_allocator lockalloc, globalloc;
2242
2243     if (!vec_size(self->locals) && !vec_size(self->values))
2244         return true;
2245
2246     globalloc.locals    = NULL;
2247     globalloc.sizes     = NULL;
2248     globalloc.positions = NULL;
2249     globalloc.unique    = NULL;
2250     lockalloc.locals    = NULL;
2251     lockalloc.sizes     = NULL;
2252     lockalloc.positions = NULL;
2253     lockalloc.unique    = NULL;
2254
2255     for (i = 0; i < vec_size(self->locals); ++i)
2256     {
2257         v = self->locals[i];
2258         if (!OPTS_OPTIMIZATION(OPTIM_LOCAL_TEMPS)) {
2259             v->locked      = true;
2260             v->unique_life = true;
2261         }
2262         else if (i >= vec_size(self->params))
2263             break;
2264         else
2265             v->locked = true; /* lock parameters locals */
2266         if (!function_allocator_alloc((v->locked || !opt_gt ? &lockalloc : &globalloc), self->locals[i]))
2267             goto error;
2268     }
2269     for (; i < vec_size(self->locals); ++i)
2270     {
2271         v = self->locals[i];
2272         if (!vec_size(v->life))
2273             continue;
2274         if (!ir_function_allocator_assign(self, (v->locked || !opt_gt ? &lockalloc : &globalloc), v))
2275             goto error;
2276     }
2277
2278     /* Allocate a slot for any value that still exists */
2279     for (i = 0; i < vec_size(self->values); ++i)
2280     {
2281         v = self->values[i];
2282
2283         if (!vec_size(v->life))
2284             continue;
2285
2286         /* CALL optimization:
2287          * If the value is a parameter-temp: 1 write, 1 read from a CALL
2288          * and it's not "locked", write it to the OFS_PARM directly.
2289          */
2290         if (OPTS_OPTIMIZATION(OPTIM_CALL_STORES) && !v->locked && !v->unique_life) {
2291             if (vec_size(v->reads) == 1 && vec_size(v->writes) == 1 &&
2292                 (v->reads[0]->opcode == VINSTR_NRCALL ||
2293                  (v->reads[0]->opcode >= INSTR_CALL0 && v->reads[0]->opcode <= INSTR_CALL8)
2294                 )
2295                )
2296             {
2297                 size_t    param;
2298                 ir_instr *call = v->reads[0];
2299                 if (!vec_ir_value_find(call->params, v, &param)) {
2300                     irerror(call->context, "internal error: unlocked parameter %s not found", v->name);
2301                     goto error;
2302                 }
2303
2304                 ++opts_optimizationcount[OPTIM_CALL_STORES];
2305                 v->callparam = true;
2306                 if (param < 8)
2307                     ir_value_code_setaddr(v, OFS_PARM0 + 3*param);
2308                 else {
2309                     ir_value *ep;
2310                     param -= 8;
2311                     if (vec_size(self->owner->extparam_protos) <= param)
2312                         ep = ir_gen_extparam_proto(self->owner);
2313                     else
2314                         ep = self->owner->extparam_protos[param];
2315                     ir_instr_op(v->writes[0], 0, ep, true);
2316                     call->params[param+8] = ep;
2317                 }
2318                 continue;
2319             }
2320             if (vec_size(v->writes) == 1 && v->writes[0]->opcode == INSTR_CALL0)
2321             {
2322                 v->store = store_return;
2323                 if (v->members[0]) v->members[0]->store = store_return;
2324                 if (v->members[1]) v->members[1]->store = store_return;
2325                 if (v->members[2]) v->members[2]->store = store_return;
2326                 ++opts_optimizationcount[OPTIM_CALL_STORES];
2327                 continue;
2328             }
2329         }
2330
2331         if (!ir_function_allocator_assign(self, (v->locked || !opt_gt ? &lockalloc : &globalloc), v))
2332             goto error;
2333     }
2334
2335     if (!lockalloc.sizes && !globalloc.sizes) {
2336         goto cleanup;
2337     }
2338     vec_push(lockalloc.positions, 0);
2339     vec_push(globalloc.positions, 0);
2340
2341     /* Adjust slot positions based on sizes */
2342     if (lockalloc.sizes) {
2343         pos = (vec_size(lockalloc.sizes) ? lockalloc.positions[0] : 0);
2344         for (i = 1; i < vec_size(lockalloc.sizes); ++i)
2345         {
2346             pos = lockalloc.positions[i-1] + lockalloc.sizes[i-1];
2347             vec_push(lockalloc.positions, pos);
2348         }
2349         self->allocated_locals = pos + vec_last(lockalloc.sizes);
2350     }
2351     if (globalloc.sizes) {
2352         pos = (vec_size(globalloc.sizes) ? globalloc.positions[0] : 0);
2353         for (i = 1; i < vec_size(globalloc.sizes); ++i)
2354         {
2355             pos = globalloc.positions[i-1] + globalloc.sizes[i-1];
2356             vec_push(globalloc.positions, pos);
2357         }
2358         self->globaltemps = pos + vec_last(globalloc.sizes);
2359     }
2360
2361     /* Locals need to know their new position */
2362     for (i = 0; i < vec_size(self->locals); ++i) {
2363         v = self->locals[i];
2364         if (i >= vec_size(self->params) && !vec_size(v->life))
2365             continue;
2366         if (v->locked || !opt_gt)
2367             v->code.local = lockalloc.positions[v->code.local];
2368         else
2369             v->code.local = globalloc.positions[v->code.local];
2370     }
2371     /* Take over the actual slot positions on values */
2372     for (i = 0; i < vec_size(self->values); ++i) {
2373         v = self->values[i];
2374         if (!vec_size(v->life))
2375             continue;
2376         if (v->locked || !opt_gt)
2377             v->code.local = lockalloc.positions[v->code.local];
2378         else
2379             v->code.local = globalloc.positions[v->code.local];
2380     }
2381
2382     goto cleanup;
2383
2384 error:
2385     retval = false;
2386 cleanup:
2387     for (i = 0; i < vec_size(lockalloc.locals); ++i)
2388         ir_value_delete(lockalloc.locals[i]);
2389     for (i = 0; i < vec_size(globalloc.locals); ++i)
2390         ir_value_delete(globalloc.locals[i]);
2391     vec_free(globalloc.unique);
2392     vec_free(globalloc.locals);
2393     vec_free(globalloc.sizes);
2394     vec_free(globalloc.positions);
2395     vec_free(lockalloc.unique);
2396     vec_free(lockalloc.locals);
2397     vec_free(lockalloc.sizes);
2398     vec_free(lockalloc.positions);
2399     return retval;
2400 }
2401
2402 /* Get information about which operand
2403  * is read from, or written to.
2404  */
2405 static void ir_op_read_write(int op, size_t *read, size_t *write)
2406 {
2407     switch (op)
2408     {
2409     case VINSTR_JUMP:
2410     case INSTR_GOTO:
2411         *write = 0;
2412         *read = 0;
2413         break;
2414     case INSTR_IF:
2415     case INSTR_IFNOT:
2416 #if 0
2417     case INSTR_IF_S:
2418     case INSTR_IFNOT_S:
2419 #endif
2420     case INSTR_RETURN:
2421     case VINSTR_COND:
2422         *write = 0;
2423         *read = 1;
2424         break;
2425     case INSTR_STOREP_F:
2426     case INSTR_STOREP_V:
2427     case INSTR_STOREP_S:
2428     case INSTR_STOREP_ENT:
2429     case INSTR_STOREP_FLD:
2430     case INSTR_STOREP_FNC:
2431         *write = 0;
2432         *read  = 7;
2433         break;
2434     default:
2435         *write = 1;
2436         *read = 6;
2437         break;
2438     };
2439 }
2440
2441 static bool ir_block_living_add_instr(ir_block *self, size_t eid)
2442 {
2443     size_t i;
2444     bool changed = false;
2445     bool tempbool;
2446     for (i = 0; i != vec_size(self->living); ++i)
2447     {
2448         tempbool = ir_value_life_merge(self->living[i], eid);
2449         changed = changed || tempbool;
2450     }
2451     return changed;
2452 }
2453
2454 static bool ir_block_living_lock(ir_block *self)
2455 {
2456     size_t i;
2457     bool changed = false;
2458     for (i = 0; i != vec_size(self->living); ++i)
2459     {
2460         if (!self->living[i]->locked)
2461             changed = true;
2462         self->living[i]->locked = true;
2463     }
2464     return changed;
2465 }
2466
2467 static bool ir_block_life_prop_previous(ir_block* self, ir_block *prev, bool *changed)
2468 {
2469     size_t i;
2470
2471     (void)changed;
2472
2473     /* values which have been read in a previous iteration are now
2474      * in the "living" array even if the previous block doesn't use them.
2475      * So we have to remove whatever does not exist in the previous block.
2476      * They will be re-added on-read, but the liferange merge won't cause
2477      * a change.
2478     for (i = 0; i < vec_size(self->living); ++i)
2479     {
2480         if (!vec_ir_value_find(prev->living, self->living[i], NULL)) {
2481             vec_remove(self->living, i, 1);
2482             --i;
2483         }
2484     }
2485      */
2486
2487     /* Whatever the previous block still has in its living set
2488      * must now be added to ours as well.
2489      */
2490     for (i = 0; i < vec_size(prev->living); ++i)
2491     {
2492         if (vec_ir_value_find(self->living, prev->living[i], NULL))
2493             continue;
2494         vec_push(self->living, prev->living[i]);
2495         /*
2496         irerror(self->contextt from prev: %s", self->label, prev->living[i]->_name);
2497         */
2498     }
2499     return true;
2500 }
2501
2502 static bool ir_block_life_propagate(ir_block *self, ir_block *prev, bool *changed)
2503 {
2504     ir_instr *instr;
2505     ir_value *value;
2506     bool  tempbool;
2507     size_t i, o, p, mem;
2508     /* bitmasks which operands are read from or written to */
2509     size_t read, write;
2510     char dbg_ind[16];
2511     dbg_ind[0] = '#';
2512     dbg_ind[1] = '0';
2513     (void)dbg_ind;
2514
2515     if (prev)
2516     {
2517         if (!ir_block_life_prop_previous(self, prev, changed))
2518             return false;
2519     }
2520
2521     i = vec_size(self->instr);
2522     while (i)
2523     { --i;
2524         instr = self->instr[i];
2525
2526         /* See which operands are read and write operands */
2527         ir_op_read_write(instr->opcode, &read, &write);
2528
2529         if (instr->opcode == INSTR_MUL_VF)
2530         {
2531             /* the float source will get an additional lifetime */
2532             tempbool = ir_value_life_merge(instr->_ops[2], instr->eid+1);
2533             *changed = *changed || tempbool;
2534         }
2535         else if (instr->opcode == INSTR_MUL_FV)
2536         {
2537             /* the float source will get an additional lifetime */
2538             tempbool = ir_value_life_merge(instr->_ops[1], instr->eid+1);
2539             *changed = *changed || tempbool;
2540         }
2541
2542         /* Go through the 3 main operands
2543          * writes first, then reads
2544          */
2545         for (o = 0; o < 3; ++o)
2546         {
2547             if (!instr->_ops[o]) /* no such operand */
2548                 continue;
2549
2550             value = instr->_ops[o];
2551
2552             /* We only care about locals */
2553             /* we also calculate parameter liferanges so that locals
2554              * can take up parameter slots */
2555             if (value->store != store_value &&
2556                 value->store != store_local &&
2557                 value->store != store_param)
2558                 continue;
2559
2560             /* write operands */
2561             /* When we write to a local, we consider it "dead" for the
2562              * remaining upper part of the function, since in SSA a value
2563              * can only be written once (== created)
2564              */
2565             if (write & (1<<o))
2566             {
2567                 size_t idx;
2568                 bool in_living = vec_ir_value_find(self->living, value, &idx);
2569                 if (!in_living)
2570                 {
2571                     /* If the value isn't alive it hasn't been read before... */
2572                     /* TODO: See if the warning can be emitted during parsing or AST processing
2573                      * otherwise have warning printed here.
2574                      * IF printing a warning here: include filecontext_t,
2575                      * and make sure it's only printed once
2576                      * since this function is run multiple times.
2577                      */
2578                     /* con_err( "Value only written %s\n", value->name); */
2579                     tempbool = ir_value_life_merge(value, instr->eid);
2580                     *changed = *changed || tempbool;
2581                 } else {
2582                     /* since 'living' won't contain it
2583                      * anymore, merge the value, since
2584                      * (A) doesn't.
2585                      */
2586                     tempbool = ir_value_life_merge(value, instr->eid);
2587                     *changed = *changed || tempbool;
2588                     /* Then remove */
2589                     vec_remove(self->living, idx, 1);
2590                 }
2591                 /* Removing a vector removes all members */
2592                 for (mem = 0; mem < 3; ++mem) {
2593                     if (value->members[mem] && vec_ir_value_find(self->living, value->members[mem], &idx)) {
2594                         tempbool = ir_value_life_merge(value->members[mem], instr->eid);
2595                         *changed = *changed || tempbool;
2596                         vec_remove(self->living, idx, 1);
2597                     }
2598                 }
2599                 /* Removing the last member removes the vector */
2600                 if (value->memberof) {
2601                     value = value->memberof;
2602                     for (mem = 0; mem < 3; ++mem) {
2603                         if (value->members[mem] && vec_ir_value_find(self->living, value->members[mem], NULL))
2604                             break;
2605                     }
2606                     if (mem == 3 && vec_ir_value_find(self->living, value, &idx)) {
2607                         tempbool = ir_value_life_merge(value, instr->eid);
2608                         *changed = *changed || tempbool;
2609                         vec_remove(self->living, idx, 1);
2610                     }
2611                 }
2612             }
2613         }
2614
2615         for (o = 0; o < 3; ++o)
2616         {
2617             if (!instr->_ops[o]) /* no such operand */
2618                 continue;
2619
2620             value = instr->_ops[o];
2621
2622             /* We only care about locals */
2623             /* we also calculate parameter liferanges so that locals
2624              * can take up parameter slots */
2625             if (value->store != store_value &&
2626                 value->store != store_local &&
2627                 value->store != store_param)
2628                 continue;
2629
2630             /* read operands */
2631             if (read & (1<<o))
2632             {
2633                 if (!vec_ir_value_find(self->living, value, NULL))
2634                     vec_push(self->living, value);
2635                 /* reading adds the full vector */
2636                 if (value->memberof && !vec_ir_value_find(self->living, value->memberof, NULL))
2637                     vec_push(self->living, value->memberof);
2638                 for (mem = 0; mem < 3; ++mem) {
2639                     if (value->members[mem] && !vec_ir_value_find(self->living, value->members[mem], NULL))
2640                         vec_push(self->living, value->members[mem]);
2641                 }
2642             }
2643         }
2644         /* PHI operands are always read operands */
2645         for (p = 0; p < vec_size(instr->phi); ++p)
2646         {
2647             value = instr->phi[p].value;
2648             if (!vec_ir_value_find(self->living, value, NULL))
2649                 vec_push(self->living, value);
2650             /* reading adds the full vector */
2651             if (value->memberof && !vec_ir_value_find(self->living, value->memberof, NULL))
2652                 vec_push(self->living, value->memberof);
2653             for (mem = 0; mem < 3; ++mem) {
2654                 if (value->members[mem] && !vec_ir_value_find(self->living, value->members[mem], NULL))
2655                     vec_push(self->living, value->members[mem]);
2656             }
2657         }
2658
2659         /* on a call, all these values must be "locked" */
2660         if (instr->opcode >= INSTR_CALL0 && instr->opcode <= INSTR_CALL8) {
2661             if (ir_block_living_lock(self))
2662                 *changed = true;
2663         }
2664         /* call params are read operands too */
2665         for (p = 0; p < vec_size(instr->params); ++p)
2666         {
2667             value = instr->params[p];
2668             if (!vec_ir_value_find(self->living, value, NULL))
2669                 vec_push(self->living, value);
2670             /* reading adds the full vector */
2671             if (value->memberof && !vec_ir_value_find(self->living, value->memberof, NULL))
2672                 vec_push(self->living, value->memberof);
2673             for (mem = 0; mem < 3; ++mem) {
2674                 if (value->members[mem] && !vec_ir_value_find(self->living, value->members[mem], NULL))
2675                     vec_push(self->living, value->members[mem]);
2676             }
2677         }
2678
2679         /* (A) */
2680         tempbool = ir_block_living_add_instr(self, instr->eid);
2681         /*con_err( "living added values\n");*/
2682         *changed = *changed || tempbool;
2683
2684     }
2685
2686     if (self->run_id == self->owner->run_id)
2687         return true;
2688
2689     self->run_id = self->owner->run_id;
2690
2691     for (i = 0; i < vec_size(self->entries); ++i)
2692     {
2693         ir_block *entry = self->entries[i];
2694         ir_block_life_propagate(entry, self, changed);
2695     }
2696
2697     return true;
2698 }
2699
2700 /***********************************************************************
2701  *IR Code-Generation
2702  *
2703  * Since the IR has the convention of putting 'write' operands
2704  * at the beginning, we have to rotate the operands of instructions
2705  * properly in order to generate valid QCVM code.
2706  *
2707  * Having destinations at a fixed position is more convenient. In QC
2708  * this is *mostly* OPC,  but FTE adds at least 2 instructions which
2709  * read from from OPA,  and store to OPB rather than OPC.   Which is
2710  * partially the reason why the implementation of these instructions
2711  * in darkplaces has been delayed for so long.
2712  *
2713  * Breaking conventions is annoying...
2714  */
2715 static bool ir_builder_gen_global(ir_builder *self, ir_value *global, bool islocal);
2716
2717 static bool gen_global_field(ir_value *global)
2718 {
2719     if (global->hasvalue)
2720     {
2721         ir_value *fld = global->constval.vpointer;
2722         if (!fld) {
2723             irerror(global->context, "Invalid field constant with no field: %s", global->name);
2724             return false;
2725         }
2726
2727         /* copy the field's value */
2728         ir_value_code_setaddr(global, vec_size(code_globals));
2729         vec_push(code_globals, fld->code.fieldaddr);
2730         if (global->fieldtype == TYPE_VECTOR) {
2731             vec_push(code_globals, fld->code.fieldaddr+1);
2732             vec_push(code_globals, fld->code.fieldaddr+2);
2733         }
2734     }
2735     else
2736     {
2737         ir_value_code_setaddr(global, vec_size(code_globals));
2738         vec_push(code_globals, 0);
2739         if (global->fieldtype == TYPE_VECTOR) {
2740             vec_push(code_globals, 0);
2741             vec_push(code_globals, 0);
2742         }
2743     }
2744     if (global->code.globaladdr < 0)
2745         return false;
2746     return true;
2747 }
2748
2749 static bool gen_global_pointer(ir_value *global)
2750 {
2751     if (global->hasvalue)
2752     {
2753         ir_value *target = global->constval.vpointer;
2754         if (!target) {
2755             irerror(global->context, "Invalid pointer constant: %s", global->name);
2756             /* NULL pointers are pointing to the NULL constant, which also
2757              * sits at address 0, but still has an ir_value for itself.
2758              */
2759             return false;
2760         }
2761
2762         /* Here, relocations ARE possible - in fteqcc-enhanced-qc:
2763          * void() foo; <- proto
2764          * void() *fooptr = &foo;
2765          * void() foo = { code }
2766          */
2767         if (!target->code.globaladdr) {
2768             /* FIXME: Check for the constant nullptr ir_value!
2769              * because then code.globaladdr being 0 is valid.
2770              */
2771             irerror(global->context, "FIXME: Relocation support");
2772             return false;
2773         }
2774
2775         ir_value_code_setaddr(global, vec_size(code_globals));
2776         vec_push(code_globals, target->code.globaladdr);
2777     }
2778     else
2779     {
2780         ir_value_code_setaddr(global, vec_size(code_globals));
2781         vec_push(code_globals, 0);
2782     }
2783     if (global->code.globaladdr < 0)
2784         return false;
2785     return true;
2786 }
2787
2788 static bool gen_blocks_recursive(ir_function *func, ir_block *block)
2789 {
2790     prog_section_statement stmt;
2791     ir_instr *instr;
2792     ir_block *target;
2793     ir_block *ontrue;
2794     ir_block *onfalse;
2795     size_t    stidx;
2796     size_t    i;
2797
2798 tailcall:
2799     block->generated = true;
2800     block->code_start = vec_size(code_statements);
2801     for (i = 0; i < vec_size(block->instr); ++i)
2802     {
2803         instr = block->instr[i];
2804
2805         if (instr->opcode == VINSTR_PHI) {
2806             irerror(block->context, "cannot generate virtual instruction (phi)");
2807             return false;
2808         }
2809
2810         if (instr->opcode == VINSTR_JUMP) {
2811             target = instr->bops[0];
2812             /* for uncoditional jumps, if the target hasn't been generated
2813              * yet, we generate them right here.
2814              */
2815             if (!target->generated) {
2816                 block = target;
2817                 goto tailcall;
2818             }
2819
2820             /* otherwise we generate a jump instruction */
2821             stmt.opcode = INSTR_GOTO;
2822             stmt.o1.s1 = (target->code_start) - vec_size(code_statements);
2823             stmt.o2.s1 = 0;
2824             stmt.o3.s1 = 0;
2825             if (stmt.o1.s1 != 1)
2826                 code_push_statement(&stmt, instr->context.line);
2827
2828             /* no further instructions can be in this block */
2829             return true;
2830         }
2831
2832         if (instr->opcode == VINSTR_COND) {
2833             ontrue  = instr->bops[0];
2834             onfalse = instr->bops[1];
2835             /* TODO: have the AST signal which block should
2836              * come first: eg. optimize IFs without ELSE...
2837              */
2838
2839             stmt.o1.u1 = ir_value_code_addr(instr->_ops[0]);
2840             stmt.o2.u1 = 0;
2841             stmt.o3.s1 = 0;
2842
2843             if (ontrue->generated) {
2844                 stmt.opcode = INSTR_IF;
2845                 stmt.o2.s1 = (ontrue->code_start) - vec_size(code_statements);
2846                 if (stmt.o2.s1 != 1)
2847                     code_push_statement(&stmt, instr->context.line);
2848             }
2849             if (onfalse->generated) {
2850                 stmt.opcode = INSTR_IFNOT;
2851                 stmt.o2.s1 = (onfalse->code_start) - vec_size(code_statements);
2852                 if (stmt.o2.s1 != 1)
2853                     code_push_statement(&stmt, instr->context.line);
2854             }
2855             if (!ontrue->generated) {
2856                 if (onfalse->generated) {
2857                     block = ontrue;
2858                     goto tailcall;
2859                 }
2860             }
2861             if (!onfalse->generated) {
2862                 if (ontrue->generated) {
2863                     block = onfalse;
2864                     goto tailcall;
2865                 }
2866             }
2867             /* neither ontrue nor onfalse exist */
2868             stmt.opcode = INSTR_IFNOT;
2869             if (!instr->likely) {
2870                 /* Honor the likelyhood hint */
2871                 ir_block *tmp = onfalse;
2872                 stmt.opcode = INSTR_IF;
2873                 onfalse = ontrue;
2874                 ontrue = tmp;
2875             }
2876             stidx = vec_size(code_statements);
2877             code_push_statement(&stmt, instr->context.line);
2878             /* on false we jump, so add ontrue-path */
2879             if (!gen_blocks_recursive(func, ontrue))
2880                 return false;
2881             /* fixup the jump address */
2882             code_statements[stidx].o2.s1 = vec_size(code_statements) - stidx;
2883             /* generate onfalse path */
2884             if (onfalse->generated) {
2885                 /* fixup the jump address */
2886                 code_statements[stidx].o2.s1 = (onfalse->code_start) - (stidx);
2887                 if (stidx+2 == vec_size(code_statements) && code_statements[stidx].o2.s1 == 1) {
2888                     code_statements[stidx] = code_statements[stidx+1];
2889                     if (code_statements[stidx].o1.s1 < 0)
2890                         code_statements[stidx].o1.s1++;
2891                     code_pop_statement();
2892                 }
2893                 stmt.opcode = vec_last(code_statements).opcode;
2894                 if (stmt.opcode == INSTR_GOTO ||
2895                     stmt.opcode == INSTR_IF ||
2896                     stmt.opcode == INSTR_IFNOT ||
2897                     stmt.opcode == INSTR_RETURN ||
2898                     stmt.opcode == INSTR_DONE)
2899                 {
2900                     /* no use jumping from here */
2901                     return true;
2902                 }
2903                 /* may have been generated in the previous recursive call */
2904                 stmt.opcode = INSTR_GOTO;
2905                 stmt.o1.s1 = (onfalse->code_start) - vec_size(code_statements);
2906                 stmt.o2.s1 = 0;
2907                 stmt.o3.s1 = 0;
2908                 if (stmt.o1.s1 != 1)
2909                     code_push_statement(&stmt, instr->context.line);
2910                 return true;
2911             }
2912             else if (stidx+2 == vec_size(code_statements) && code_statements[stidx].o2.s1 == 1) {
2913                 code_statements[stidx] = code_statements[stidx+1];
2914                 if (code_statements[stidx].o1.s1 < 0)
2915                     code_statements[stidx].o1.s1++;
2916                 code_pop_statement();
2917             }
2918             /* if not, generate now */
2919             block = onfalse;
2920             goto tailcall;
2921         }
2922
2923         if ( (instr->opcode >= INSTR_CALL0 && instr->opcode <= INSTR_CALL8)
2924            || instr->opcode == VINSTR_NRCALL)
2925         {
2926             size_t p, first;
2927             ir_value *retvalue;
2928
2929             first = vec_size(instr->params);
2930             if (first > 8)
2931                 first = 8;
2932             for (p = 0; p < first; ++p)
2933             {
2934                 ir_value *param = instr->params[p];
2935                 if (param->callparam)
2936                     continue;
2937
2938                 stmt.opcode = INSTR_STORE_F;
2939                 stmt.o3.u1 = 0;
2940
2941                 if (param->vtype == TYPE_FIELD)
2942                     stmt.opcode = field_store_instr[param->fieldtype];
2943                 else if (param->vtype == TYPE_NIL)
2944                     stmt.opcode = INSTR_STORE_V;
2945                 else
2946                     stmt.opcode = type_store_instr[param->vtype];
2947                 stmt.o1.u1 = ir_value_code_addr(param);
2948                 stmt.o2.u1 = OFS_PARM0 + 3 * p;
2949                 code_push_statement(&stmt, instr->context.line);
2950             }
2951             /* Now handle extparams */
2952             first = vec_size(instr->params);
2953             for (; p < first; ++p)
2954             {
2955                 ir_builder *ir = func->owner;
2956                 ir_value *param = instr->params[p];
2957                 ir_value *targetparam;
2958
2959                 if (param->callparam)
2960                     continue;
2961
2962                 if (p-8 >= vec_size(ir->extparams))
2963                     ir_gen_extparam(ir);
2964
2965                 targetparam = ir->extparams[p-8];
2966
2967                 stmt.opcode = INSTR_STORE_F;
2968                 stmt.o3.u1 = 0;
2969
2970                 if (param->vtype == TYPE_FIELD)
2971                     stmt.opcode = field_store_instr[param->fieldtype];
2972                 else if (param->vtype == TYPE_NIL)
2973                     stmt.opcode = INSTR_STORE_V;
2974                 else
2975                     stmt.opcode = type_store_instr[param->vtype];
2976                 stmt.o1.u1 = ir_value_code_addr(param);
2977                 stmt.o2.u1 = ir_value_code_addr(targetparam);
2978                 code_push_statement(&stmt, instr->context.line);
2979             }
2980
2981             stmt.opcode = INSTR_CALL0 + vec_size(instr->params);
2982             if (stmt.opcode > INSTR_CALL8)
2983                 stmt.opcode = INSTR_CALL8;
2984             stmt.o1.u1 = ir_value_code_addr(instr->_ops[1]);
2985             stmt.o2.u1 = 0;
2986             stmt.o3.u1 = 0;
2987             code_push_statement(&stmt, instr->context.line);
2988
2989             retvalue = instr->_ops[0];
2990             if (retvalue && retvalue->store != store_return &&
2991                 (retvalue->store == store_global || vec_size(retvalue->life)))
2992             {
2993                 /* not to be kept in OFS_RETURN */
2994                 if (retvalue->vtype == TYPE_FIELD && OPTS_FLAG(ADJUST_VECTOR_FIELDS))
2995                     stmt.opcode = field_store_instr[retvalue->fieldtype];
2996                 else
2997                     stmt.opcode = type_store_instr[retvalue->vtype];
2998                 stmt.o1.u1 = OFS_RETURN;
2999                 stmt.o2.u1 = ir_value_code_addr(retvalue);
3000                 stmt.o3.u1 = 0;
3001                 code_push_statement(&stmt, instr->context.line);
3002             }
3003             continue;
3004         }
3005
3006         if (instr->opcode == INSTR_STATE) {
3007             irerror(block->context, "TODO: state instruction");
3008             return false;
3009         }
3010
3011         stmt.opcode = instr->opcode;
3012         stmt.o1.u1 = 0;
3013         stmt.o2.u1 = 0;
3014         stmt.o3.u1 = 0;
3015
3016         /* This is the general order of operands */
3017         if (instr->_ops[0])
3018             stmt.o3.u1 = ir_value_code_addr(instr->_ops[0]);
3019
3020         if (instr->_ops[1])
3021             stmt.o1.u1 = ir_value_code_addr(instr->_ops[1]);
3022
3023         if (instr->_ops[2])
3024             stmt.o2.u1 = ir_value_code_addr(instr->_ops[2]);
3025
3026         if (stmt.opcode == INSTR_RETURN || stmt.opcode == INSTR_DONE)
3027         {
3028             stmt.o1.u1 = stmt.o3.u1;
3029             stmt.o3.u1 = 0;
3030         }
3031         else if ((stmt.opcode >= INSTR_STORE_F &&
3032                   stmt.opcode <= INSTR_STORE_FNC) ||
3033                  (stmt.opcode >= INSTR_STOREP_F &&
3034                   stmt.opcode <= INSTR_STOREP_FNC))
3035         {
3036             /* 2-operand instructions with A -> B */
3037             stmt.o2.u1 = stmt.o3.u1;
3038             stmt.o3.u1 = 0;
3039
3040             /* tiny optimization, don't output
3041              * STORE a, a
3042              */
3043             if (stmt.o2.u1 == stmt.o1.u1 &&
3044                 OPTS_OPTIMIZATION(OPTIM_PEEPHOLE))
3045             {
3046                 ++opts_optimizationcount[OPTIM_PEEPHOLE];
3047                 continue;
3048             }
3049         }
3050
3051         code_push_statement(&stmt, instr->context.line);
3052     }
3053     return true;
3054 }
3055
3056 static bool gen_function_code(ir_function *self)
3057 {
3058     ir_block *block;
3059     prog_section_statement stmt, *retst;
3060
3061     /* Starting from entry point, we generate blocks "as they come"
3062      * for now. Dead blocks will not be translated obviously.
3063      */
3064     if (!vec_size(self->blocks)) {
3065         irerror(self->context, "Function '%s' declared without body.", self->name);
3066         return false;
3067     }
3068
3069     block = self->blocks[0];
3070     if (block->generated)
3071         return true;
3072
3073     if (!gen_blocks_recursive(self, block)) {
3074         irerror(self->context, "failed to generate blocks for '%s'", self->name);
3075         return false;
3076     }
3077
3078     /* code_write and qcvm -disasm need to know that the function ends here */
3079     retst = &vec_last(code_statements);
3080     if (OPTS_OPTIMIZATION(OPTIM_VOID_RETURN) &&
3081         self->outtype == TYPE_VOID &&
3082         retst->opcode == INSTR_RETURN &&
3083         !retst->o1.u1 && !retst->o2.u1 && !retst->o3.u1)
3084     {
3085         retst->opcode = INSTR_DONE;
3086         ++opts_optimizationcount[OPTIM_VOID_RETURN];
3087     } else {
3088         stmt.opcode = INSTR_DONE;
3089         stmt.o1.u1 = 0;
3090         stmt.o2.u1 = 0;
3091         stmt.o3.u1 = 0;
3092         code_push_statement(&stmt, vec_last(code_linenums));
3093     }
3094     return true;
3095 }
3096
3097 static qcint ir_builder_filestring(ir_builder *ir, const char *filename)
3098 {
3099     /* NOTE: filename pointers are copied, we never strdup them,
3100      * thus we can use pointer-comparison to find the string.
3101      */
3102     size_t i;
3103     qcint  str;
3104
3105     for (i = 0; i < vec_size(ir->filenames); ++i) {
3106         if (ir->filenames[i] == filename)
3107             return ir->filestrings[i];
3108     }
3109
3110     str = code_genstring(filename);
3111     vec_push(ir->filenames, filename);
3112     vec_push(ir->filestrings, str);
3113     return str;
3114 }
3115
3116 static bool gen_global_function(ir_builder *ir, ir_value *global)
3117 {
3118     prog_section_function fun;
3119     ir_function          *irfun;
3120
3121     size_t i;
3122
3123     if (!global->hasvalue || (!global->constval.vfunc))
3124     {
3125         irerror(global->context, "Invalid state of function-global: not constant: %s", global->name);
3126         return false;
3127     }
3128
3129     irfun = global->constval.vfunc;
3130
3131     fun.name    = global->code.name;
3132     fun.file    = ir_builder_filestring(ir, global->context.file);
3133     fun.profile = 0; /* always 0 */
3134     fun.nargs   = vec_size(irfun->params);
3135     if (fun.nargs > 8)
3136         fun.nargs = 8;
3137
3138     for (i = 0;i < 8; ++i) {
3139         if ((int32_t)i >= fun.nargs)
3140             fun.argsize[i] = 0;
3141         else
3142             fun.argsize[i] = type_sizeof_[irfun->params[i]];
3143     }
3144
3145     fun.firstlocal = 0;
3146     fun.locals     = irfun->allocated_locals;
3147
3148     if (irfun->builtin)
3149         fun.entry = irfun->builtin+1;
3150     else {
3151         irfun->code_function_def = vec_size(code_functions);
3152         fun.entry = vec_size(code_statements);
3153     }
3154
3155     vec_push(code_functions, fun);
3156     return true;
3157 }
3158
3159 static ir_value* ir_gen_extparam_proto(ir_builder *ir)
3160 {
3161     ir_value *global;
3162     char      name[128];
3163
3164     snprintf(name, sizeof(name), "EXTPARM#%i", (int)(vec_size(ir->extparam_protos)+8));
3165     global = ir_value_var(name, store_global, TYPE_VECTOR);
3166
3167     vec_push(ir->extparam_protos, global);
3168     return global;
3169 }
3170
3171 static void ir_gen_extparam(ir_builder *ir)
3172 {
3173     prog_section_def def;
3174     ir_value        *global;
3175
3176     if (vec_size(ir->extparam_protos) < vec_size(ir->extparams)+1)
3177         global = ir_gen_extparam_proto(ir);
3178     else
3179         global = ir->extparam_protos[vec_size(ir->extparams)];
3180
3181     def.name = code_genstring(global->name);
3182     def.type = TYPE_VECTOR;
3183     def.offset = vec_size(code_globals);
3184
3185     vec_push(code_defs, def);
3186     ir_value_code_setaddr(global, def.offset);
3187     vec_push(code_globals, 0);
3188     vec_push(code_globals, 0);
3189     vec_push(code_globals, 0);
3190
3191     vec_push(ir->extparams, global);
3192 }
3193
3194 static bool gen_function_extparam_copy(ir_function *self)
3195 {
3196     size_t i, ext, numparams;
3197
3198     ir_builder *ir = self->owner;
3199     ir_value   *ep;
3200     prog_section_statement stmt;
3201
3202     numparams = vec_size(self->params);
3203     if (!numparams)
3204         return true;
3205
3206     stmt.opcode = INSTR_STORE_F;
3207     stmt.o3.s1 = 0;
3208     for (i = 8; i < numparams; ++i) {
3209         ext = i - 8;
3210         if (ext >= vec_size(ir->extparams))
3211             ir_gen_extparam(ir);
3212
3213         ep = ir->extparams[ext];
3214
3215         stmt.opcode = type_store_instr[self->locals[i]->vtype];
3216         if (self->locals[i]->vtype == TYPE_FIELD &&
3217             self->locals[i]->fieldtype == TYPE_VECTOR)
3218         {
3219             stmt.opcode = INSTR_STORE_V;
3220         }
3221         stmt.o1.u1 = ir_value_code_addr(ep);
3222         stmt.o2.u1 = ir_value_code_addr(self->locals[i]);
3223         code_push_statement(&stmt, self->context.line);
3224     }
3225
3226     return true;
3227 }
3228
3229 static bool gen_function_locals(ir_builder *ir, ir_value *global)
3230 {
3231     prog_section_function *def;
3232     ir_function           *irfun;
3233     size_t                 i;
3234     uint32_t               firstlocal, firstglobal;
3235
3236     irfun = global->constval.vfunc;
3237     def   = code_functions + irfun->code_function_def;
3238
3239     if (opts.g || !OPTS_OPTIMIZATION(OPTIM_OVERLAP_LOCALS) || (irfun->flags & IR_FLAG_MASK_NO_OVERLAP))
3240         firstlocal = def->firstlocal = vec_size(code_globals);
3241     else {
3242         firstlocal = def->firstlocal = ir->first_common_local;
3243         ++opts_optimizationcount[OPTIM_OVERLAP_LOCALS];
3244     }
3245
3246     firstglobal = (OPTS_OPTIMIZATION(OPTIM_GLOBAL_TEMPS) ? ir->first_common_globaltemp : firstlocal);
3247
3248     for (i = vec_size(code_globals); i < firstlocal + irfun->allocated_locals; ++i)
3249         vec_push(code_globals, 0);
3250     for (i = 0; i < vec_size(irfun->locals); ++i) {
3251         ir_value *v = irfun->locals[i];
3252         if (v->locked || !OPTS_OPTIMIZATION(OPTIM_GLOBAL_TEMPS)) {
3253             ir_value_code_setaddr(v, firstlocal + v->code.local);
3254             if (!ir_builder_gen_global(ir, irfun->locals[i], true)) {
3255                 irerror(irfun->locals[i]->context, "failed to generate local %s", irfun->locals[i]->name);
3256                 return false;
3257             }
3258         }
3259         else
3260             ir_value_code_setaddr(v, firstglobal + v->code.local);
3261     }
3262     for (i = 0; i < vec_size(irfun->values); ++i)
3263     {
3264         ir_value *v = irfun->values[i];
3265         if (v->callparam)
3266             continue;
3267         if (v->locked)
3268             ir_value_code_setaddr(v, firstlocal + v->code.local);
3269         else
3270             ir_value_code_setaddr(v, firstglobal + v->code.local);
3271     }
3272     return true;
3273 }
3274
3275 static bool gen_global_function_code(ir_builder *ir, ir_value *global)
3276 {
3277     prog_section_function *fundef;
3278     ir_function           *irfun;
3279
3280     (void)ir;
3281
3282     irfun = global->constval.vfunc;
3283     if (!irfun) {
3284         if (global->cvq == CV_NONE) {
3285             irwarning(global->context, WARN_IMPLICIT_FUNCTION_POINTER,
3286                       "function `%s` has no body and in QC implicitly becomes a function-pointer", global->name);
3287         }
3288         /* this was a function pointer, don't generate code for those */
3289         return true;
3290     }
3291
3292     if (irfun->builtin)
3293         return true;
3294
3295     if (irfun->code_function_def < 0) {
3296         irerror(irfun->context, "`%s`: IR global wasn't generated, failed to access function-def", irfun->name);
3297         return false;
3298     }
3299     fundef = &code_functions[irfun->code_function_def];
3300
3301     fundef->entry = vec_size(code_statements);
3302     if (!gen_function_locals(ir, global)) {
3303         irerror(irfun->context, "Failed to generate locals for function %s", irfun->name);
3304         return false;
3305     }
3306     if (!gen_function_extparam_copy(irfun)) {
3307         irerror(irfun->context, "Failed to generate extparam-copy code for function %s", irfun->name);
3308         return false;
3309     }
3310     if (!gen_function_code(irfun)) {
3311         irerror(irfun->context, "Failed to generate code for function %s", irfun->name);
3312         return false;
3313     }
3314     return true;
3315 }
3316
3317 static void gen_vector_defs(prog_section_def def, const char *name)
3318 {
3319     char  *component;
3320     size_t len, i;
3321
3322     if (!name || name[0] == '#' || OPTS_FLAG(SINGLE_VECTOR_DEFS))
3323         return;
3324
3325     def.type = TYPE_FLOAT;
3326
3327     len = strlen(name);
3328
3329     component = (char*)mem_a(len+3);
3330     memcpy(component, name, len);
3331     len += 2;
3332     component[len-0] = 0;
3333     component[len-2] = '_';
3334
3335     component[len-1] = 'x';
3336
3337     for (i = 0; i < 3; ++i) {
3338         def.name = code_genstring(component);
3339         vec_push(code_defs, def);
3340         def.offset++;
3341         component[len-1]++;
3342     }
3343 }
3344
3345 static void gen_vector_fields(prog_section_field fld, const char *name)
3346 {
3347     char  *component;
3348     size_t len, i;
3349
3350     if (!name || OPTS_FLAG(SINGLE_VECTOR_DEFS))
3351         return;
3352
3353     fld.type = TYPE_FLOAT;
3354
3355     len = strlen(name);
3356
3357     component = (char*)mem_a(len+3);
3358     memcpy(component, name, len);
3359     len += 2;
3360     component[len-0] = 0;
3361     component[len-2] = '_';
3362
3363     component[len-1] = 'x';
3364
3365     for (i = 0; i < 3; ++i) {
3366         fld.name = code_genstring(component);
3367         vec_push(code_fields, fld);
3368         fld.offset++;
3369         component[len-1]++;
3370     }
3371 }
3372
3373 static bool ir_builder_gen_global(ir_builder *self, ir_value *global, bool islocal)
3374 {
3375     size_t           i;
3376     int32_t         *iptr;
3377     prog_section_def def;
3378     bool             pushdef = false;
3379
3380     def.type   = global->vtype;
3381     def.offset = vec_size(code_globals);
3382     def.name   = 0;
3383     if (opts.g || !islocal)
3384     {
3385         pushdef = true;
3386
3387         if (OPTS_OPTIMIZATION(OPTIM_STRIP_CONSTANT_NAMES) &&
3388             (global->name[0] == '#' || global->cvq == CV_CONST))
3389         {
3390             pushdef = false;
3391         }
3392
3393         if (pushdef && global->name) {
3394             if (global->name[0] == '#') {
3395                 if (!self->str_immediate)
3396                     self->str_immediate = code_genstring("IMMEDIATE");
3397                 def.name = global->code.name = self->str_immediate;
3398             }
3399             else
3400                 def.name = global->code.name = code_genstring(global->name);
3401         }
3402         else
3403             def.name   = 0;
3404         if (islocal) {
3405             def.offset = ir_value_code_addr(global);
3406             vec_push(code_defs, def);
3407             if (global->vtype == TYPE_VECTOR)
3408                 gen_vector_defs(def, global->name);
3409             else if (global->vtype == TYPE_FIELD && global->fieldtype == TYPE_VECTOR)
3410                 gen_vector_defs(def, global->name);
3411             return true;
3412         }
3413     }
3414     if (islocal)
3415         return true;
3416
3417     switch (global->vtype)
3418     {
3419     case TYPE_VOID:
3420         if (!strcmp(global->name, "end_sys_globals")) {
3421             /* TODO: remember this point... all the defs before this one
3422              * should be checksummed and added to progdefs.h when we generate it.
3423              */
3424         }
3425         else if (!strcmp(global->name, "end_sys_fields")) {
3426             /* TODO: same as above but for entity-fields rather than globsl
3427              */
3428         }
3429         else
3430             irwarning(global->context, WARN_VOID_VARIABLES, "unrecognized variable of type void `%s`",
3431                       global->name);
3432         /* I'd argue setting it to 0 is sufficient, but maybe some depend on knowing how far
3433          * the system fields actually go? Though the engine knows this anyway...
3434          * Maybe this could be an -foption
3435          * fteqcc creates data for end_sys_* - of size 1, so let's do the same
3436          */
3437         ir_value_code_setaddr(global, vec_size(code_globals));
3438         vec_push(code_globals, 0);
3439         /* Add the def */
3440         if (pushdef) vec_push(code_defs, def);
3441         return true;
3442     case TYPE_POINTER:
3443         if (pushdef) vec_push(code_defs, def);
3444         return gen_global_pointer(global);
3445     case TYPE_FIELD:
3446         if (pushdef) {
3447             vec_push(code_defs, def);
3448             if (global->fieldtype == TYPE_VECTOR)
3449                 gen_vector_defs(def, global->name);
3450         }
3451         return gen_global_field(global);
3452     case TYPE_ENTITY:
3453         /* fall through */
3454     case TYPE_FLOAT:
3455     {
3456         ir_value_code_setaddr(global, vec_size(code_globals));
3457         if (global->hasvalue) {
3458             iptr = (int32_t*)&global->constval.ivec[0];
3459             vec_push(code_globals, *iptr);
3460         } else {
3461             vec_push(code_globals, 0);
3462         }
3463         if (!islocal && global->cvq != CV_CONST)
3464             def.type |= DEF_SAVEGLOBAL;
3465         if (pushdef) vec_push(code_defs, def);
3466
3467         return global->code.globaladdr >= 0;
3468     }
3469     case TYPE_STRING:
3470     {
3471         ir_value_code_setaddr(global, vec_size(code_globals));
3472         if (global->hasvalue) {
3473             vec_push(code_globals, code_genstring(global->constval.vstring));
3474         } else {
3475             vec_push(code_globals, 0);
3476         }
3477         if (!islocal && global->cvq != CV_CONST)
3478             def.type |= DEF_SAVEGLOBAL;
3479         if (pushdef) vec_push(code_defs, def);
3480         return global->code.globaladdr >= 0;
3481     }
3482     case TYPE_VECTOR:
3483     {
3484         size_t d;
3485         ir_value_code_setaddr(global, vec_size(code_globals));
3486         if (global->hasvalue) {
3487             iptr = (int32_t*)&global->constval.ivec[0];
3488             vec_push(code_globals, iptr[0]);
3489             if (global->code.globaladdr < 0)
3490                 return false;
3491             for (d = 1; d < type_sizeof_[global->vtype]; ++d) {
3492                 vec_push(code_globals, iptr[d]);
3493             }
3494         } else {
3495             vec_push(code_globals, 0);
3496             if (global->code.globaladdr < 0)
3497                 return false;
3498             for (d = 1; d < type_sizeof_[global->vtype]; ++d) {
3499                 vec_push(code_globals, 0);
3500             }
3501         }
3502         if (!islocal && global->cvq != CV_CONST)
3503             def.type |= DEF_SAVEGLOBAL;
3504
3505         if (pushdef) {
3506             vec_push(code_defs, def);
3507             def.type &= ~DEF_SAVEGLOBAL;
3508             gen_vector_defs(def, global->name);
3509         }
3510         return global->code.globaladdr >= 0;
3511     }
3512     case TYPE_FUNCTION:
3513         ir_value_code_setaddr(global, vec_size(code_globals));
3514         if (!global->hasvalue) {
3515             vec_push(code_globals, 0);
3516             if (global->code.globaladdr < 0)
3517                 return false;
3518         } else {
3519             vec_push(code_globals, vec_size(code_functions));
3520             if (!gen_global_function(self, global))
3521                 return false;
3522         }
3523         if (!islocal && global->cvq != CV_CONST)
3524             def.type |= DEF_SAVEGLOBAL;
3525         if (pushdef) vec_push(code_defs, def);
3526         return true;
3527     case TYPE_VARIANT:
3528         /* assume biggest type */
3529             ir_value_code_setaddr(global, vec_size(code_globals));
3530             vec_push(code_globals, 0);
3531             for (i = 1; i < type_sizeof_[TYPE_VARIANT]; ++i)
3532                 vec_push(code_globals, 0);
3533             return true;
3534     default:
3535         /* refuse to create 'void' type or any other fancy business. */
3536         irerror(global->context, "Invalid type for global variable `%s`: %s",
3537                 global->name, type_name[global->vtype]);
3538         return false;
3539     }
3540 }
3541
3542 static void ir_builder_prepare_field(ir_value *field)
3543 {
3544     field->code.fieldaddr = code_alloc_field(type_sizeof_[field->fieldtype]);
3545 }
3546
3547 static bool ir_builder_gen_field(ir_builder *self, ir_value *field)
3548 {
3549     prog_section_def def;
3550     prog_section_field fld;
3551
3552     (void)self;
3553
3554     def.type   = (uint16_t)field->vtype;
3555     def.offset = (uint16_t)vec_size(code_globals);
3556
3557     /* create a global named the same as the field */
3558     if (opts.standard == COMPILER_GMQCC) {
3559         /* in our standard, the global gets a dot prefix */
3560         size_t len = strlen(field->name);
3561         char name[1024];
3562
3563         /* we really don't want to have to allocate this, and 1024
3564          * bytes is more than enough for a variable/field name
3565          */
3566         if (len+2 >= sizeof(name)) {
3567             irerror(field->context, "invalid field name size: %u", (unsigned int)len);
3568             return false;
3569         }
3570
3571         name[0] = '.';
3572         memcpy(name+1, field->name, len); /* no strncpy - we used strlen above */
3573         name[len+1] = 0;
3574
3575         def.name = code_genstring(name);
3576         fld.name = def.name + 1; /* we reuse that string table entry */
3577     } else {
3578         /* in plain QC, there cannot be a global with the same name,
3579          * and so we also name the global the same.
3580          * FIXME: fteqcc should create a global as well
3581          * check if it actually uses the same name. Probably does
3582          */
3583         def.name = code_genstring(field->name);
3584         fld.name = def.name;
3585     }
3586
3587     field->code.name = def.name;
3588
3589     vec_push(code_defs, def);
3590
3591     fld.type = field->fieldtype;
3592
3593     if (fld.type == TYPE_VOID) {
3594         irerror(field->context, "field is missing a type: %s - don't know its size", field->name);
3595         return false;
3596     }
3597
3598     fld.offset = field->code.fieldaddr;
3599
3600     vec_push(code_fields, fld);
3601
3602     ir_value_code_setaddr(field, vec_size(code_globals));
3603     vec_push(code_globals, fld.offset);
3604     if (fld.type == TYPE_VECTOR) {
3605         vec_push(code_globals, fld.offset+1);
3606         vec_push(code_globals, fld.offset+2);
3607     }
3608
3609     if (field->fieldtype == TYPE_VECTOR) {
3610         gen_vector_defs(def, field->name);
3611         gen_vector_fields(fld, field->name);
3612     }
3613
3614     return field->code.globaladdr >= 0;
3615 }
3616
3617 bool ir_builder_generate(ir_builder *self, const char *filename)
3618 {
3619     prog_section_statement stmt;
3620     size_t i;
3621     char  *lnofile = NULL;
3622
3623     code_init();
3624
3625     for (i = 0; i < vec_size(self->fields); ++i)
3626     {
3627         ir_builder_prepare_field(self->fields[i]);
3628     }
3629
3630     for (i = 0; i < vec_size(self->globals); ++i)
3631     {
3632         if (!ir_builder_gen_global(self, self->globals[i], false)) {
3633             return false;
3634         }
3635         if (self->globals[i]->vtype == TYPE_FUNCTION) {
3636             ir_function *func = self->globals[i]->constval.vfunc;
3637             if (func && self->max_locals < func->allocated_locals &&
3638                 !(func->flags & IR_FLAG_MASK_NO_OVERLAP))
3639             {
3640                 self->max_locals = func->allocated_locals;
3641             }
3642             if (func && self->max_globaltemps < func->globaltemps)
3643                 self->max_globaltemps = func->globaltemps;
3644         }
3645     }
3646
3647     for (i = 0; i < vec_size(self->fields); ++i)
3648     {
3649         if (!ir_builder_gen_field(self, self->fields[i])) {
3650             return false;
3651         }
3652     }
3653
3654     /* generate nil */
3655     ir_value_code_setaddr(self->nil, vec_size(code_globals));
3656     vec_push(code_globals, 0);
3657     vec_push(code_globals, 0);
3658     vec_push(code_globals, 0);
3659
3660     /* generate global temps */
3661     self->first_common_globaltemp = vec_size(code_globals);
3662     for (i = 0; i < self->max_globaltemps; ++i) {
3663         vec_push(code_globals, 0);
3664     }
3665     /* generate common locals */
3666     self->first_common_local = vec_size(code_globals);
3667     for (i = 0; i < self->max_locals; ++i) {
3668         vec_push(code_globals, 0);
3669     }
3670
3671     /* generate function code */
3672     for (i = 0; i < vec_size(self->globals); ++i)
3673     {
3674         if (self->globals[i]->vtype == TYPE_FUNCTION) {
3675             if (!gen_global_function_code(self, self->globals[i])) {
3676                 return false;
3677             }
3678         }
3679     }
3680
3681     if (vec_size(code_globals) >= 65536) {
3682         irerror(vec_last(self->globals)->context, "This progs file would require more globals than the metadata can handle. Bailing out.");
3683         return false;
3684     }
3685
3686     /* DP errors if the last instruction is not an INSTR_DONE. */
3687     if (vec_last(code_statements).opcode != INSTR_DONE)
3688     {
3689         stmt.opcode = INSTR_DONE;
3690         stmt.o1.u1 = 0;
3691         stmt.o2.u1 = 0;
3692         stmt.o3.u1 = 0;
3693         code_push_statement(&stmt, vec_last(code_linenums));
3694     }
3695
3696     if (opts.pp_only)
3697         return true;
3698
3699     if (vec_size(code_statements) != vec_size(code_linenums)) {
3700         con_err("Linecounter wrong: %lu != %lu\n",
3701                 (unsigned long)vec_size(code_statements),
3702                 (unsigned long)vec_size(code_linenums));
3703     } else if (OPTS_FLAG(LNO)) {
3704         char *dot;
3705         size_t filelen = strlen(filename);
3706
3707         memcpy(vec_add(lnofile, filelen+1), filename, filelen+1);
3708         dot = strrchr(lnofile, '.');
3709         if (!dot) {
3710             vec_pop(lnofile);
3711         } else {
3712             vec_shrinkto(lnofile, dot - lnofile);
3713         }
3714         memcpy(vec_add(lnofile, 5), ".lno", 5);
3715     }
3716
3717     if (!opts.quiet) {
3718         if (lnofile)
3719             con_out("writing '%s' and '%s'...\n", filename, lnofile);
3720         else
3721             con_out("writing '%s'\n", filename);
3722     }
3723     if (!code_write(filename, lnofile)) {
3724         vec_free(lnofile);
3725         return false;
3726     }
3727     vec_free(lnofile);
3728     return true;
3729 }
3730
3731 /***********************************************************************
3732  *IR DEBUG Dump functions...
3733  */
3734
3735 #define IND_BUFSZ 1024
3736
3737 #ifdef _MSC_VER
3738 #   define strncat(dst, src, sz) strncat_s(dst, sz, src, _TRUNCATE)
3739 #endif
3740
3741 const char *qc_opname(int op)
3742 {
3743     if (op < 0) return "<INVALID>";
3744     if (op < (int)( sizeof(asm_instr) / sizeof(asm_instr[0]) ))
3745         return asm_instr[op].m;
3746     switch (op) {
3747         case VINSTR_PHI:  return "PHI";
3748         case VINSTR_JUMP: return "JUMP";
3749         case VINSTR_COND: return "COND";
3750         default:          return "<UNK>";
3751     }
3752 }
3753
3754 void ir_builder_dump(ir_builder *b, int (*oprintf)(const char*, ...))
3755 {
3756     size_t i;
3757     char indent[IND_BUFSZ];
3758     indent[0] = '\t';
3759     indent[1] = 0;
3760
3761     oprintf("module %s\n", b->name);
3762     for (i = 0; i < vec_size(b->globals); ++i)
3763     {
3764         oprintf("global ");
3765         if (b->globals[i]->hasvalue)
3766             oprintf("%s = ", b->globals[i]->name);
3767         ir_value_dump(b->globals[i], oprintf);
3768         oprintf("\n");
3769     }
3770     for (i = 0; i < vec_size(b->functions); ++i)
3771         ir_function_dump(b->functions[i], indent, oprintf);
3772     oprintf("endmodule %s\n", b->name);
3773 }
3774
3775 void ir_function_dump(ir_function *f, char *ind,
3776                       int (*oprintf)(const char*, ...))
3777 {
3778     size_t i;
3779     if (f->builtin != 0) {
3780         oprintf("%sfunction %s = builtin %i\n", ind, f->name, -f->builtin);
3781         return;
3782     }
3783     oprintf("%sfunction %s\n", ind, f->name);
3784     strncat(ind, "\t", IND_BUFSZ);
3785     if (vec_size(f->locals))
3786     {
3787         oprintf("%s%i locals:\n", ind, (int)vec_size(f->locals));
3788         for (i = 0; i < vec_size(f->locals); ++i) {
3789             oprintf("%s\t", ind);
3790             ir_value_dump(f->locals[i], oprintf);
3791             oprintf("\n");
3792         }
3793     }
3794     oprintf("%sliferanges:\n", ind);
3795     for (i = 0; i < vec_size(f->locals); ++i) {
3796         const char *attr = "";
3797         size_t l, m;
3798         ir_value *v = f->locals[i];
3799         if (v->unique_life && v->locked)
3800             attr = "unique,locked ";
3801         else if (v->unique_life)
3802             attr = "unique ";
3803         else if (v->locked)
3804             attr = "locked ";
3805         oprintf("%s\t%s: %s %s@%i ", ind, v->name, type_name[v->vtype], attr, (int)v->code.local);
3806         for (l = 0; l < vec_size(v->life); ++l) {
3807             oprintf("[%i,%i] ", v->life[l].start, v->life[l].end);
3808         }
3809         oprintf("\n");
3810         for (m = 0; m < 3; ++m) {
3811             ir_value *vm = v->members[m];
3812             if (!vm)
3813                 continue;
3814             if (vm->unique_life && vm->locked)
3815                 attr = "unique,locked ";
3816             else if (vm->unique_life)
3817                 attr = "unique ";
3818             else if (vm->locked)
3819                 attr = "locked ";
3820             oprintf("%s\t%s: %s@%i ", ind, vm->name, attr, (int)vm->code.local);
3821             for (l = 0; l < vec_size(vm->life); ++l) {
3822                 oprintf("[%i,%i] ", vm->life[l].start, vm->life[l].end);
3823             }
3824             oprintf("\n");
3825         }
3826     }
3827     for (i = 0; i < vec_size(f->values); ++i) {
3828         const char *attr = "";
3829         size_t l, m;
3830         ir_value *v = f->values[i];
3831         if (v->unique_life && v->locked)
3832             attr = "unique,locked ";
3833         else if (v->unique_life)
3834             attr = "unique ";
3835         else if (v->locked)
3836             attr = "locked ";
3837         oprintf("%s\t%s: %s %s@%i ", ind, v->name, type_name[v->vtype], attr, (int)v->code.local);
3838         for (l = 0; l < vec_size(v->life); ++l) {
3839             oprintf("[%i,%i] ", v->life[l].start, v->life[l].end);
3840         }
3841         oprintf("\n");
3842         for (m = 0; m < 3; ++m) {
3843             ir_value *vm = v->members[m];
3844             if (!vm)
3845                 continue;
3846             if (vm->unique_life && vm->locked)
3847                 attr = "unique,locked ";
3848             else if (vm->unique_life)
3849                 attr = "unique ";
3850             else if (vm->locked)
3851                 attr = "locked ";
3852             oprintf("%s\t%s: %s@%i ", ind, vm->name, attr, (int)vm->code.local);
3853             for (l = 0; l < vec_size(vm->life); ++l) {
3854                 oprintf("[%i,%i] ", vm->life[l].start, vm->life[l].end);
3855             }
3856             oprintf("\n");
3857         }
3858     }
3859     if (vec_size(f->blocks))