annual_benefit = 100 risk_window = 30 speedup = 1 probability_increase = 1% counterfactual_probability = 1% to 10% counterfactual_years_in_future = 10 to 100
@name("Total addressable burden of respiratory disease") addressable_burden = 234.8B to 842.3B // See https://squigglehub.org/models/blueprint-biosecurity/burden-of-resp-disease @name("Total indoor public space in the US (1000 sqft)") space = 96423000 // https://www.eia.gov/consumption/commercial/data/2018/#b1-b2 @name("Space covered") space_covered = 10% // We fix the percentage of public indoor space covered at 10% @name("Transmission in the space")
/* Describe your code here */ // The value of work which pays off in short timelines scenarios only p_tai_before_2030 = 1% to 24% // 1% is an assumption, 24% is the Metaculus forecast here https: https://metaculus.com/questions/19356/transformative-ai-date/ annual_biorisk = (0.1% to 10%)/30*(1 to 10) // assuming that annual biorisk is 1-10x as high in short timelines worlds, using ASB's total biorisk/30 year risk window risk_reduction = 0.5% to 5% // assumption years_of_risk_reduction = 0.5 to 5 // assuming for a successful intervention we get at least 0.5 years, and max 5 (2030-2025) bps_at_stake = 10k*p_tai_before_2030*annual_biorisk*risk_reduction*years_of_risk_reduction
/* Describe your code here */ fixture = 200 to 1000 installation = 50 to 500 lamp_energy_usage = 11 energy_price = 0.1 to 0.3 annual_operating_hours = 2500 to 365*24 annual_energy_costs_per_fixture = lamp_energy_usage*energy_price/1000*annual_operating_hours
/* Describe your code here */ bps = 10k bps_per_bn_bar = 0.5 to 50 risk_window = 30 // probably makes sense for this to be a distribution, e.g. TAI timelines or similar bio_x_risk = (0.1% to 10%)/risk_window // annual biorisk during risk window risk_reduction = lognormal({p25: 2%, p75: 5%})
/* Describe your code here */ bps = 10k bps_per_bn_bar = 0.5 to 50 risk_window = 30 // probably makes sense for this to be a distribution, e.g. TAI timelines or similar bio_x_risk = 0.001% to 0.1% // annual biorisk during risk window, Luca (previously ASB: (0.1% to 10%)/risk_window) risk_reduction = lognormal({p25: 2%, p75: 5%})
risk_window = 30 annual_bio_x_risk = (0.1% to 10%)/risk_window risk_reduction = 1% years_speedup = 1 prob_maturity_in_window = 20% bps = 10k
// This is an uncertainty analysis of the annual burden of respiratory disease from long range aeorosol transmission in the US. See this documentation for why we have chosen these parameter values: https://docs.google.com/document/d/1Tnyb5WHn727QrFKknFiMyqSm68fYG8tTlgfHedjzT98/edit //Background parameters @name("QALY value") qaly = 750k @name("QALY adjustment") qaly_adjustment = normal(0.7,0.2) @name("Long range aerosol transmission rates") aeorol_rates = {
/* Describe your code here */ //Baseline daly = 100k pop = 8B economic_multiplier = 2 // I think GWH uses 2 as a rule of thumb for non-existential pandemics pandemic_duration = 2 to 4 // https://zenodo.org/records/4626111: average duration in the dataset is 4
qaly = 750k qaly_adjustment = 71% aeorol_rates = { covid: 20% to 80%, flu: 20% to 80%, lris: 12% to 50%, non_flu_lris: 4% to 16% }
// Background params bps = 10k bps_per_bn_bar = 0.5 to 50 // OP rough bar // How much risk is at stake? bio_x_risk = 0.1% to 10% // biorisk during 30 year risk window, from ASB aerosolisation_rate = 1% to 50% // placeholder - addressable transmission vectors aware_of_transmission = 20% to 60% infections_in_workplace = 30% to 90% // placeholder non_redundant_infections = 30% to 70% // placeholder
// Far-UVC // 1. Set-up // 1.1 Set-up: units m = 10^6 b = 10^9 bps = 10000 // 1.2 Set-up: biorisk
/* Describe your code here */ bps = 10k bps_per_bn_bar = 0.5 to 50 risk_window = 30 // probably makes sense for this to be a distribution, e.g. TAI timelines or similar bio_x_risk = (0.1% to 10%)/risk_window // annual biorisk during risk window risk_reduction = 1% // placeholder
/* Describe your code here */ bps = 10k bps_per_bn_bar = 0.5 to 50 risk_window = 30 // probably makes sense for this to be a distribution, e.g. TAI timelines or similar bio_x_risk = (0.1% to 10%)/risk_window // annual biorisk during risk window risk_reduction = 13% //
/* Describe your code here */ /*3.2 Bottom up theories of change */ grantmaker_budget = 5M to 100M uvc_speed_up = 1 to 10 // Blueprint cost module //Costs: direct costs
/* Describe your code here */ // Case rate info annual_deaths = 60k to 150k //https://covid.cdc.gov/covid-data-tracker/#trends_totaldeaths_select_00 provisional cases in the last year: 65355 //https://ourworldindata.org/covid-cases confirmed deaths over the last year 1.18m-1.12m = 60000 //https://www.metaculus.com/questions/7546/deaths-from-covid-19-per-year-2022-2025-in-us/ 130k forecasted per year 2022 to 2025
/* Describe your code here */ k = 0.01 to 15 // roughly the range here https://docs.google.com/spreadsheets/d/1lVr0aWTFvlcjG2Rp7GPKOan_ET2hwSBoy05Ap8KsUko/edit#gid=0 fluence = 1 // max that's plausible within current safety guidelines according to Richard base_ACH = 1 to 10 // 1DS IAQ report: ASHRAE standards "approximately 1-2 ACH in residences and offices (though half of studied buildings fall below ASHRAE standards)." "high levels of eACH up to CDC hospital standards (8-12 eACH)". Assuming that base ACH might be much higher in a pandemic base_decay = 0.1 to 0.3 // numbers from Richard, I don't understand them
/* Describe your code here */ // current method flu_deaths = 700k // https://ourworldindata.org/influenza flu_yll = 10 to 30 // https://vizhub.healthdata.org/gbd-results/ lower resp infections: ~40: (84.2M/2.3M) to (112.4M/2.7M). That seems to high so arbitrarily lowering to 10 to 30 covid_deaths = 7.05M - 6.92M // https://ourworldindata.org/grapher/cumulative-covid-deaths-region?time=2023-04-19..latest covid_yll = 3 to 15 // https://bmcpublichealth.biomedcentral.com/articles/10.1186/s12889-021-12377-1/figures/1 expected_yll = flu_deaths*flu_yll+covid_deaths*covid_yll
/* Describe your code here */ //Risk reduction risk_reduction = 1% // placeholder //Baseline pandemic_data = { pop: 8B,