Skip to content

Demand Generators

deepbullwhip.demand.base.DemandGenerator

Bases: ABC

Abstract base class for demand generators.

Source code in deepbullwhip/demand/base.py
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
class DemandGenerator(ABC):
    """Abstract base class for demand generators."""

    @abstractmethod
    def generate(self, T: int, seed: int | None = None) -> TimeSeries:
        """Generate a demand time series of length T.

        Parameters
        ----------
        T : int
            Number of periods.
        seed : int or None
            Random seed for reproducibility.

        Returns
        -------
        TimeSeries
            1-D array of non-negative demand values, shape (T,).
        """
        ...

generate(T, seed=None) abstractmethod

Generate a demand time series of length T.

Parameters:

Name Type Description Default
T int

Number of periods.

required
seed int or None

Random seed for reproducibility.

None

Returns:

Type Description
TimeSeries

1-D array of non-negative demand values, shape (T,).

Source code in deepbullwhip/demand/base.py
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
@abstractmethod
def generate(self, T: int, seed: int | None = None) -> TimeSeries:
    """Generate a demand time series of length T.

    Parameters
    ----------
    T : int
        Number of periods.
    seed : int or None
        Random seed for reproducibility.

    Returns
    -------
    TimeSeries
        1-D array of non-negative demand values, shape (T,).
    """
    ...

deepbullwhip.demand.semiconductor.SemiconductorDemandGenerator

Bases: DemandGenerator

AR(1) + seasonal + structural-shock demand, calibrated to WSTS data.

Parameters:

Name Type Description Default
mu float

Mean monthly demand ($B/month), scaled internally to weekly.

50.2
phi float

AR(1) autocorrelation coefficient.

0.72
sigma_eps float

Residual coefficient of variation (sigma_eps / mu).

0.08
seasonal_amp float

Seasonal amplitude as fraction of weekly mean.

0.06
shock_period int

Period index when the structural shock begins.

104
shock_magnitude float

Shock size as fraction of weekly mean.

0.1
Source code in deepbullwhip/demand/semiconductor.py
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
@register("demand", "semiconductor_ar1")
class SemiconductorDemandGenerator(DemandGenerator):
    """AR(1) + seasonal + structural-shock demand, calibrated to WSTS data.

    Parameters
    ----------
    mu : float
        Mean monthly demand ($B/month), scaled internally to weekly.
    phi : float
        AR(1) autocorrelation coefficient.
    sigma_eps : float
        Residual coefficient of variation (sigma_eps / mu).
    seasonal_amp : float
        Seasonal amplitude as fraction of weekly mean.
    shock_period : int
        Period index when the structural shock begins.
    shock_magnitude : float
        Shock size as fraction of weekly mean.
    """

    def __init__(
        self,
        mu: float = 50.2,
        phi: float = 0.72,
        sigma_eps: float = 0.08,
        seasonal_amp: float = 0.06,
        shock_period: int = 104,
        shock_magnitude: float = 0.10,
    ) -> None:
        self.mu = mu
        self.phi = phi
        self.sigma_eps = sigma_eps
        self.seasonal_amp = seasonal_amp
        self.shock_period = shock_period
        self.shock_magnitude = shock_magnitude

    def generate(self, T: int = 156, seed: int | None = None) -> TimeSeries:
        rng = np.random.RandomState(seed)
        mu_weekly = self.mu / 4.33
        sigma_eps_abs = self.sigma_eps * mu_weekly

        D = np.zeros(T)
        D[0] = mu_weekly

        # Pre-compute deterministic components (vectorized)
        t_arr = np.arange(1, T)
        seasonal = self.seasonal_amp * mu_weekly * np.sin(2 * np.pi * t_arr / 52)
        shock = np.where(t_arr >= self.shock_period,
                         self.shock_magnitude * mu_weekly, 0.0)
        eps = rng.normal(0, sigma_eps_abs, T - 1)

        # AR(1) loop — sequential due to D[t-1] dependency
        for i, t in enumerate(t_arr):
            ar_term = mu_weekly + self.phi * (D[t - 1] - mu_weekly)
            D[t] = max(0.1, ar_term + seasonal[i] + shock[i] + eps[i])

        return D

    def generate_batch(
        self, T: int = 156, n_paths: int = 100, seed: int | None = None
    ) -> np.ndarray:
        """Generate N demand paths in parallel. Returns shape (n_paths, T).

        The AR(1) time dependency requires a sequential loop over T, but
        all N paths are updated simultaneously via vectorized operations.
        This is the primary GPU/vectorization opportunity: O(T) steps
        each processing N paths in parallel.

        Parameters
        ----------
        T : int
            Number of periods per path.
        n_paths : int
            Number of independent demand paths to generate.
        seed : int or None
            Random seed for reproducibility.

        Returns
        -------
        np.ndarray, shape (n_paths, T)
        """
        rng = np.random.RandomState(seed)
        mu_weekly = self.mu / 4.33
        sigma_eps_abs = self.sigma_eps * mu_weekly

        # Pre-compute all random innovations at once: (n_paths, T-1)
        eps = rng.normal(0, sigma_eps_abs, (n_paths, T - 1))

        # Pre-compute deterministic components: (T-1,)
        t_arr = np.arange(1, T)
        seasonal = self.seasonal_amp * mu_weekly * np.sin(2 * np.pi * t_arr / 52)
        shock = np.where(t_arr >= self.shock_period,
                         self.shock_magnitude * mu_weekly, 0.0)

        # Allocate output: (n_paths, T)
        D = np.zeros((n_paths, T))
        D[:, 0] = mu_weekly

        # Sequential over T (AR(1) dependency), vectorized over N
        for i in range(T - 1):
            ar_term = mu_weekly + self.phi * (D[:, i] - mu_weekly)
            D[:, i + 1] = np.maximum(
                0.1, ar_term + seasonal[i] + shock[i] + eps[:, i]
            )

        return D

generate_batch(T=156, n_paths=100, seed=None)

Generate N demand paths in parallel. Returns shape (n_paths, T).

The AR(1) time dependency requires a sequential loop over T, but all N paths are updated simultaneously via vectorized operations. This is the primary GPU/vectorization opportunity: O(T) steps each processing N paths in parallel.

Parameters:

Name Type Description Default
T int

Number of periods per path.

156
n_paths int

Number of independent demand paths to generate.

100
seed int or None

Random seed for reproducibility.

None

Returns:

Type Description
(ndarray, shape(n_paths, T))
Source code in deepbullwhip/demand/semiconductor.py
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
def generate_batch(
    self, T: int = 156, n_paths: int = 100, seed: int | None = None
) -> np.ndarray:
    """Generate N demand paths in parallel. Returns shape (n_paths, T).

    The AR(1) time dependency requires a sequential loop over T, but
    all N paths are updated simultaneously via vectorized operations.
    This is the primary GPU/vectorization opportunity: O(T) steps
    each processing N paths in parallel.

    Parameters
    ----------
    T : int
        Number of periods per path.
    n_paths : int
        Number of independent demand paths to generate.
    seed : int or None
        Random seed for reproducibility.

    Returns
    -------
    np.ndarray, shape (n_paths, T)
    """
    rng = np.random.RandomState(seed)
    mu_weekly = self.mu / 4.33
    sigma_eps_abs = self.sigma_eps * mu_weekly

    # Pre-compute all random innovations at once: (n_paths, T-1)
    eps = rng.normal(0, sigma_eps_abs, (n_paths, T - 1))

    # Pre-compute deterministic components: (T-1,)
    t_arr = np.arange(1, T)
    seasonal = self.seasonal_amp * mu_weekly * np.sin(2 * np.pi * t_arr / 52)
    shock = np.where(t_arr >= self.shock_period,
                     self.shock_magnitude * mu_weekly, 0.0)

    # Allocate output: (n_paths, T)
    D = np.zeros((n_paths, T))
    D[:, 0] = mu_weekly

    # Sequential over T (AR(1) dependency), vectorized over N
    for i in range(T - 1):
        ar_term = mu_weekly + self.phi * (D[:, i] - mu_weekly)
        D[:, i + 1] = np.maximum(
            0.1, ar_term + seasonal[i] + shock[i] + eps[:, i]
        )

    return D