summaryrefslogtreecommitdiffstats
path: root/src/shader_recompiler/backend/glsl/reg_alloc.cpp
blob: 5fdad5acb1d9cf233ee759a4fe3d66c53e585faf (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
// Copyright 2021 yuzu Emulator Project
// Licensed under GPLv2 or any later version
// Refer to the license.txt file included.

#include <string>
#include <string_view>

#include <fmt/format.h>

#include "shader_recompiler/backend/glsl/reg_alloc.h"
#include "shader_recompiler/exception.h"
#include "shader_recompiler/frontend/ir/value.h"
#pragma optimize("", off)
namespace Shader::Backend::GLSL {
namespace {
constexpr std::string_view SWIZZLE = "xyzw";

std::string Representation(Id id) {
    if (id.is_condition_code != 0) {
        throw NotImplementedException("Condition code");
    }
    if (id.is_spill != 0) {
        throw NotImplementedException("Spilling");
    }
    const u32 num_elements{id.num_elements_minus_one + 1};
    const u32 index{static_cast<u32>(id.index)};
    return fmt::format("R{}", index);
}

std::string MakeImm(const IR::Value& value) {
    switch (value.Type()) {
    case IR::Type::U1:
        return fmt::format("{}", value.U1() ? "true" : "false");
    case IR::Type::U32:
        return fmt::format("{}", value.U32());
    case IR::Type::F32:
        return fmt::format("{}", value.F32());
    case IR::Type::U64:
        return fmt::format("{}", value.U64());
    case IR::Type::F64:
        return fmt::format("{}", value.F64());
    default:
        throw NotImplementedException("Immediate type {}", value.Type());
    }
}
} // Anonymous namespace

std::string RegAlloc::Define(IR::Inst& inst, u32 num_elements, u32 alignment) {
    const Id id{Alloc(num_elements, alignment)};
    inst.SetDefinition<Id>(id);
    return Representation(id);
}

std::string RegAlloc::Consume(const IR::Value& value) {
    const auto result = value.IsImmediate() ? MakeImm(value) : Consume(*value.InstRecursive());
    return result;
}

std::string RegAlloc::Consume(IR::Inst& inst) {
    const Id id{inst.Definition<Id>()};
    inst.DestructiveRemoveUsage();
    if (!inst.HasUses()) {
        Free(id);
    }
    return Representation(inst.Definition<Id>());
}

Id RegAlloc::Alloc(u32 num_elements, [[maybe_unused]] u32 alignment) {
    for (size_t reg = 0; reg < NUM_REGS; ++reg) {
        if (register_use[reg]) {
            continue;
        }
        num_used_registers = std::max(num_used_registers, reg + 1);
        register_use[reg] = true;
        return Id{
            .base_element = 0,
            .num_elements_minus_one = num_elements - 1,
            .index = static_cast<u32>(reg),
            .is_spill = 0,
            .is_condition_code = 0,
        };
    }
    throw NotImplementedException("Register spilling");
}

void RegAlloc::Free(Id id) {
    if (id.is_spill != 0) {
        throw NotImplementedException("Free spill");
    }
    register_use[id.index] = false;
}

/*static*/ bool RegAlloc::IsAliased(const IR::Inst& inst) {
    switch (inst.GetOpcode()) {
    case IR::Opcode::Identity:
    case IR::Opcode::BitCastU16F16:
    case IR::Opcode::BitCastU32F32:
    case IR::Opcode::BitCastU64F64:
    case IR::Opcode::BitCastF16U16:
    case IR::Opcode::BitCastF32U32:
    case IR::Opcode::BitCastF64U64:
        return true;
    default:
        return false;
    }
}

/*static*/ IR::Inst& RegAlloc::AliasInst(IR::Inst& inst) {
    IR::Inst* it{&inst};
    while (IsAliased(*it)) {
        const IR::Value arg{it->Arg(0)};
        if (arg.IsImmediate()) {
            break;
        }
        it = arg.InstRecursive();
    }
    return *it;
}
} // namespace Shader::Backend::GLSL