1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148
|
/**
* @file
* @brief Functions for picking random entries from weighted, gradiated lists
**/
#pragma once
#include "random.h"
enum distrib_type
{
FLAT, // full chance throughout the range
SEMI, // 50% chance at range ends, 100% in the middle
PEAK, // 0% chance just outside range ends, 100% in the middle, range
// ends typically get ~20% or more
RISE, // linearly from near-zero to 100%, increasing with depth
FALL, // linearly from 100% at the start to near-zero
};
template <typename T>
struct random_pick_entry
{
int minr;
int maxr;
int rarity; // 0..1000
distrib_type distrib;
T value;
};
template <typename T, int max>
class random_picker
{
public:
virtual ~random_picker();
T pick(const vector<random_pick_entry<T>>& weights, int level, T none);
int probability_at(T entry, const vector<random_pick_entry<T>>& weights,
int level, int scale = 100);
int rarity_at(const random_pick_entry<T>& pop,
int depth);
virtual bool veto(T) { return false; }
};
template <typename T, int max>
random_picker<T, max>::~random_picker()
{
}
template <typename T, int max>
T random_picker<T, max>::pick(const vector<random_pick_entry<T>>& weights, int level,
T none)
{
struct { T value; int rarity; } valid[max];
int nvalid = 0;
int totalrar = 0;
for (const random_pick_entry<T>& pop : weights)
{
if (level < pop.minr || level > pop.maxr)
continue;
if (veto(pop.value))
continue;
int rar = rarity_at(pop, level);
ASSERTM(rar > 0, "Rarity %d: %d at level %d", rar, pop.value, level);
valid[nvalid].value = pop.value;
valid[nvalid].rarity = rar;
totalrar += rar;
nvalid++;
}
if (!nvalid)
return none;
totalrar = random2(totalrar); // the roll!
for (int i = 0; i < nvalid; i++)
if ((totalrar -= valid[i].rarity) < 0)
return valid[i].value;
die("random_pick roll out of range");
}
template <typename T, int max>
int random_picker<T, max>::probability_at(T entry,
const vector<random_pick_entry<T>>& weights,
int level, int scale)
{
int totalrar = 0;
int entry_rarity = 0;
for (const random_pick_entry<T>& pop : weights)
{
if (level < pop.minr || level > pop.maxr)
continue;
if (veto(pop.value))
continue;
int rar = rarity_at(pop, level);
ASSERTM(rar > 0, "Rarity %d: %d at level %d", rar, pop.value, level);
if (entry == pop.value)
entry_rarity += rar;
totalrar += rar;
}
if (totalrar == 0)
return 0;
return entry_rarity * scale / totalrar;
}
template <typename T, int max>
int random_picker<T, max>::rarity_at(const random_pick_entry<T>& pop, int depth)
{
// 2520 is divisible by any number 1..10, and provides enough scale
// to make round-off errors even for degenerate distributions ok.
int rar = pop.rarity * 2520;
int len = pop.maxr - pop.minr;
switch (pop.distrib)
{
case FLAT: // 100% everywhere
return rar;
case SEMI: // 100% in the middle, 50% at the edges
if (len)
len *= 2;
else
len += 2; // a single-level range
return rar * (len - abs(pop.minr + pop.maxr - 2 * depth))
/ len;
case PEAK: // 100% in the middle, small at the edges, 0% outside
len += 2; // we want it to zero outside the range, not at the edge
return rar * (len - abs(pop.minr + pop.maxr - 2 * depth))
/ len;
case RISE:
return rar * (depth - pop.minr + 1) / (len + 1);
case FALL:
return rar * (pop.maxr - depth + 1) / (len + 1);
}
die("bad distrib");
}
|