// Cost-effectiveness of messaging campaigns directed at elected representatives.
// I created the core model and then ran it through Squiggle AI to create the boilerplate. Then I wrote all the docstrings.
// ==== Inputs ====
inputs = {
@name("Proportion of Votes Within 1% (%)")
@doc(
"Proportion of vote outcomes that fall within one percentage point of the pass threshold. Based on historical voting records. The number has varied over time—it was 2.2% from 2000 to 2010, and 5.1% from 2015 to 2025.
/* Nov 2025 */ boost = 1 + (0.1 to 1) // Under the simple version, all orgs get the same boost. // to test the max difference this could make, // compare (1) longterm FB boost // and (2) Gen Capacity boost, which should be the lowest
voll = lognormal({p5: 10.6, p95:18.7}) // $/kWh
//via ERCOT/Brattle — their 95CI fitted to 90% CI due to squiggle idiosyncracies
threshold = 1*10^11 // $
lost_load_kWh = threshold / voll
lost_load_TWh = lost_load_kWh / 10^9
total_us_consumption_TWh = 4070
duration_multiplier = lognormal({p25:2.3, p75:3.2})
//via /* Describe your code here */ maxFoodPerPerson = 1 defaultWesternMan = .25 to maxFoodPerPerson defaultIndianMan = .9 to maxFoodPerPerson womanFactor = triangular(0.75, 0.85, .95)
/*
Generated by Squiggle AI. Workflow ID: f0421d48-236b-4158-b2d9-6c830c768e48
*/
// Cost-Benefit Analysis for Bubble Tea Store in Berkeley over 5 years
import "hub:ozziegooen/sTest" as sTest
// == Time Parameters ==
@name("Analysis Period (months)")// 2026 Fundraising Budget // 1. Foresight Team forecasters = 4 to 6 forecaster_salary = 25k to 60k foresight_team_total = forecaster_salary * forecasters // 2. OSINT and Surveillance Technology software_engineer_osint = 90k to 250k
// Calendar-year 2025 data cash costs for OpenAI, Anthropic, Google DeepMind.
// Buckets: licenses, dataVendors (vendors producing/scoring data + RL gyms), internalGeneration (in-house human data).
// For OpenAI: estimate vendors first from sector shares, then internal = anchor - licenses - vendors.
// ===== Anchors & shared sector priors =====
@name("OpenAI 2025 total data costs ($)")
@doc("The Information: 'OpenAI plans to pay around $1B this year in data-related costs' (includes human experts + RL gyms). Tight core, with a tail.")
openaiDataTotal2025 = mx([950M to 1.05B, 800M to 1.2B], [0.7, 0.3]) // :contentReference[oaicite:5]{index=5}
@name("Anthropic RL gyms next-12mo ($)")// PRIORS FOR PROBABILITIES // Uniform on p Prior uniform_prob_prior = beta(1,1) // Jeffreys Priors for probabilities jeffreys_prob_prior = truncate(beta(0.5, 0.5),0.00001,0.9999)
// Innovation data with correlated lognormal distributions
techData = [
{name: "SHR Geothermal", weight: 117, min: 0.43, max: 8.82},
{name: "SMR", weight: 966, min: 0.29, max: 7.64},
{name: "EGS", weight: 170, min: 0.22, max: 4.41},
{name: "Plant-Based Protein", weight: 153, min: 0.18, max: 4.04},
{name: "Cultivated Protein", weight: 117, min: 0.00001, max: 0.07}, // Avoid zero for lognormal
{name: "Nat Gas + CCS", weight: 313, min: 0.21, max: 5.21},
{name: "Solar", weight: 497, min: 0.15, max: 4.19},
{name: "Green Cement", weight: 136, min: 0.13, max: 2.42},crises__per_decade = 1 to 7 chance_sentinel_identifies_a_crisis_a_week_to_two_months_beforehand = beta(6,10) // Large scale catastrophe chance_crisis_is_catastrophic_but_not_existential = beta(3, 100) badness_of_catastrophic_risk_vs_existential = 0.1 // Existential catastrophe chance_crisis_is_existential = beta(1, 100)
questions = 10 forecaster_time_for_intial_forecast = 4 to 20 forecaster_hourly_rate = 100 to 200 initial_forecast_cost = questions * forecaster_time_for_intial_forecast * forecaster_hourly_rate forecast_update = questions * (0.3 to 1) update_cost = forecast_update * forecaster_hourly_rate annual_cost_to_deliver = initial_forecast_cost + update_cost * 52
// How many AI digital workers? // ===================================================== // Key question: on tasks that AI can currently solve, how many digital workers could OpenAI deploy given their current inference compute stocks? // Method: // 1. We start with estimates we've made of OpenAI's compute stocks (currently around 1M H100-equivalents, split roughly equally between Hoppers and GB200s) // 2. From this, we account for utilization + only some fraction of compute going to inference, and get a daily amount of inference FLOP // 3. We then make assumptions about of GPT-5 active params, and based on this estimate (a) the number of GPT-5 inference tokens OpenAI could generate in a day. We ensemble this with another estimate of (a), based on OpenAI's announcement that they process 3 billion messages a day. // 4. We then estimate (b) the number of "tokens" spent by a human employee in a day, based on estimates of human thinking speed, as well as some token usage data from METR
/* Generated by Squiggle AI. Workflow ID: 168d0158-1934-49d1-9972-32851ae39b32 */ import "hub:ozziegooen/sTest" as sTest // Democracy Preservation vs. AI Safety Funding Analysis // This model examines the relative cost-effectiveness of funding // democratic election interventions vs. AI safety interventions // == Core Parameters ==
/*
Cost to morally offset the harm of an LLM subscription by donating to AI safety
organizations.
Variables and their values were written by me; scaffolding and docs were initially written by AI and then heavily edited by me.
Note: Any reference to "AI companies" refers specifically to frontier AI companies that are working toward AGI/ASI.
*/
@name("Revenue to Valuation Ratio")/*
Generated by Squiggle AI. Workflow ID: 583117f3-e55f-4334-86f4-4d094f193e4e
*/
import "hub:ozziegooen/sTest" as sTest
// Youth Programs Cost-Effectiveness Model
// Comparing Leaf (large, low-cost with select Fellows) vs NT (selective with scholarships)
@name("Model Inputs")
inputs = {p =.3% to 3% // Percentage pop = 2M to 16M // Species population a = 1/(0.2 to 0.3125) // index -- typically 0.25 is used, ie. f^0.25 of species live in fraction 0.5 of the habitat. total_area = 10M to 15M // global rainforest area in sqkm fraction = (1-(1-p)^a)/(p*pop) // fraction of forest that must be destroyed to extinct one species area_per_extinction = total_area * fraction // in sqkm
/* relevant variables here p misaligned p control works timeline to agi */ agi_arrival_year = 28 to 39