From ee1cdf3efe05092b1c8ceec0326be0c5afc9c292 Mon Sep 17 00:00:00 2001 From: Jeremy Cunningham <34543464+jpcunningh@users.noreply.github.com> Date: Sat, 9 Nov 2019 22:39:06 -0600 Subject: [PATCH 01/66] Add ability to calculate glucose noise (#1298) * Add ability to calculate glucose noise * don not use ** operator * log messages for iob error * remove test debug * log message for calculating glucose noise * fix preference check for calc_glucose_noise * add calculate-glucose-noise to package.json * default calc_glucose_noise preference to false * fix error in comments * use max noise value of CGM or oref0 computed noise * update log messages for glucose noise; add noise test --- bin/oref0-calculate-glucose-noise.js | 52 ++++++ bin/oref0-pump-loop.sh | 11 ++ lib/calc-glucose-stats.js | 30 ++++ lib/glucose-stats.js | 244 +++++++++++++++++++++++++++ lib/profile/index.js | 1 + package.json | 1 + tests/glucose-noise.js | 185 ++++++++++++++++++++ 7 files changed, 524 insertions(+) create mode 100755 bin/oref0-calculate-glucose-noise.js create mode 100644 lib/calc-glucose-stats.js create mode 100644 lib/glucose-stats.js create mode 100644 tests/glucose-noise.js diff --git a/bin/oref0-calculate-glucose-noise.js b/bin/oref0-calculate-glucose-noise.js new file mode 100755 index 000000000..796f956b6 --- /dev/null +++ b/bin/oref0-calculate-glucose-noise.js @@ -0,0 +1,52 @@ +#!/usr/bin/env node + +/* + Glucose noise calculation + + Released under MIT license. See the accompanying LICENSE.txt file for + full terms and conditions + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + +*/ + +var generate = require('../lib/calc-glucose-stats').updateGlucoseStats; + +function usage ( ) { + console.log('usage: ', process.argv.slice(0, 2), ''); +} + +if (!module.parent) { + var argv = require('yargs') + .usage("$0 ") + .strict(true) + .help('help'); + + var params = argv.argv; + var inputs = params._ + + if (inputs.length !== 1) { + argv.showHelp() + console.error('Incorrect number of arguments'); + process.exit(1); + } + + var glucose_input = inputs[0]; + + var cwd = process.cwd(); + var glucose_hist = require(cwd + '/' + glucose_input); + + inputs = { + glucose_hist: glucose_hist + }; + + glucose_hist = generate(inputs); + console.log(JSON.stringify(glucose_hist)); +} + diff --git a/bin/oref0-pump-loop.sh b/bin/oref0-pump-loop.sh index cb1424901..9abe96bc8 100755 --- a/bin/oref0-pump-loop.sh +++ b/bin/oref0-pump-loop.sh @@ -317,6 +317,9 @@ function smb_suggest { function determine_basal { cat monitor/meal.json + + update_glucose_noise + if ( grep -q 12 settings/model.json ); then oref0-determine-basal monitor/iob.json monitor/temp_basal.json monitor/glucose.json settings/profile.json --auto-sens settings/autosens.json --meal monitor/meal.json --reservoir monitor/reservoir.json > enact/smb-suggested.json else @@ -911,6 +914,14 @@ function compare_with_fullhistory() { fi } +function update_glucose_noise() { + if check_pref_bool .calc_glucose_noise false; then + echo "Recalculating glucose noise measurement" + oref0-calculate-glucose-noise monitor/glucose.json > monitor/glucose.json.new + mv monitor/glucose.json.new monitor/glucose.json + fi +} + function valid_pump_settings() { SUCCESS=1 diff --git a/lib/calc-glucose-stats.js b/lib/calc-glucose-stats.js new file mode 100644 index 000000000..0b1b3694e --- /dev/null +++ b/lib/calc-glucose-stats.js @@ -0,0 +1,30 @@ +const moment = require('moment'); +const _ = require('lodash'); +const stats = require('./glucose-stats'); + +module.exports = {}; +const calcStatsExports = module.exports; + +calcStatsExports.updateGlucoseStats = (options) => { + var hist = _.map(_.sortBy(options.glucose_hist, 'dateString'), function readDate(value) { + value.readDateMills = moment(value.dateString).valueOf(); + return value; + }); + + if (hist && hist.length > 0) { + var noise_val = stats.calcSensorNoise(null, hist, null, null); + + var ns_noise_val = stats.calcNSNoise(noise_val, hist); + + if ('noise' in options.glucose_hist[0]) { + console.error("Glucose noise CGM reported level: ", options.glucose_hist[0].noise); + ns_noise_val = Math.max(ns_noise_val, options.glucose_hist[0].noise); + } + + console.error("Glucose noise calculated: ", noise_val, " setting noise level to ", ns_noise_val); + + options.glucose_hist[0].noise = ns_noise_val; + } + + return options.glucose_hist; +}; diff --git a/lib/glucose-stats.js b/lib/glucose-stats.js new file mode 100644 index 000000000..c89497f4a --- /dev/null +++ b/lib/glucose-stats.js @@ -0,0 +1,244 @@ + + +const moment = require('moment'); + +const log = console.error; + +/* eslint-disable-next-line no-unused-vars */ +const error = console.error; +const debug = console.error; + +module.exports = {}; +const calcStatsExports = module.exports; + +// Calculate the sum of the distance of all points (sod) +// Calculate the overall distance between the first and the last point (overallDistance) +// Calculate the noise as the following formula: 1 - sod / overallDistance +// Noise will get closer to zero as the sum of the individual lines are mostly +// in a straight or straight moving curve +// Noise will get closer to one as the sum of the distance of the individual lines get large +// Also add multiplier to get more weight to the latest BG values +// Also added weight for points where the delta shifts from pos to neg or neg to pos (peaks/valleys) +// the more peaks and valleys, the more noise is amplified +// Input: +// [ +// { +// real glucose -- glucose value in mg/dL +// real readDate -- milliseconds since Epoch +// },... +// ] +const calcNoise = (sgvArr) => { + let noise = 0; + + const n = sgvArr.length; + + const firstSGV = sgvArr[0].glucose * 1000.0; + const firstTime = sgvArr[0].readDate / 1000.0 * 30.0; + + const lastSGV = sgvArr[n - 1].glucose * 1000.0; + const lastTime = sgvArr[n - 1].readDate / 1000.0 * 30.0; + + const xarr = []; + + for (let i = 0; i < n; i += 1) { + xarr.push(sgvArr[i].readDate / 1000.0 * 30.0 - firstTime); + } + + // sod = sum of distances + let sod = 0; + const lastDelta = 0; + + for (let i = 1; i < n; i += 1) { + // y2y1Delta adds a multiplier that gives + // higher priority to the latest BG's + let y2y1Delta = (sgvArr[i].glucose - sgvArr[i - 1].glucose) * 1000.0 * (1 + i / (n * 3)); + + const x2x1Delta = xarr[i] - xarr[i - 1]; + + if ((lastDelta > 0) && (y2y1Delta < 0)) { + // switched from positive delta to negative, increase noise impact + y2y1Delta *= 1.1; + } else if ((lastDelta < 0) && (y2y1Delta > 0)) { + // switched from negative delta to positive, increase noise impact + y2y1Delta *= 1.2; + } + + sod += Math.sqrt(Math.pow(x2x1Delta, 2) + Math.pow(y2y1Delta, 2)); + } + + const overallsod = Math.sqrt(Math.pow(lastSGV - firstSGV, 2) + Math.pow(lastTime - firstTime, 2)); + + if (sod === 0) { + // protect from divide by 0 + noise = 0; + } else { + noise = 1 - (overallsod / sod); + } + + return noise; +}; + +calcStatsExports.calcSensorNoise = (calcGlucose, glucoseHist, lastCal, sgv) => { + const MAXRECORDS = 8; + const MINRECORDS = 4; + const sgvArr = []; + + const numRecords = Math.max(glucoseHist.length - MAXRECORDS, 0); + + for (let i = numRecords; i < glucoseHist.length; i += 1) { + // Only use values that are > 30 to filter out invalid values. + if (lastCal && (glucoseHist[i].glucose > 30) && ('unfiltered' in glucoseHist[i]) && (glucoseHist[i].unfiltered > 100)) { + // use the unfiltered data with the most recent calculated calibration value + // this will provide a noise calculation that is independent of calibration jumps + sgvArr.push({ + glucose: calcGlucose(glucoseHist[i], lastCal), + readDate: glucoseHist[i].readDateMills, + }); + } else if (glucoseHist[i].glucose > 30) { + // if raw data isn't available, use the transmitter calibrated glucose + sgvArr.push({ + glucose: glucoseHist[i].glucose, + readDate: glucoseHist[i].readDateMills, + }); + } + } + + if (sgv) { + if (lastCal && 'unfiltered' in sgv && sgv.unfiltered > 100) { + sgvArr.push({ + glucose: calcGlucose(sgv, lastCal), + readDate: sgv.readDateMills, + }); + } else { + sgvArr.push({ + glucose: sgv.glucose, + readDate: sgv.readDateMills, + }); + } + } + if (sgvArr.length < MINRECORDS) { + return 0; + } + return calcNoise(sgvArr); +}; + +// Return 10 minute trend total +calcStatsExports.calcTrend = (calcGlucose, glucoseHist, lastCal, sgv) => { + let sgvHist = null; + + let trend = 0; + + if (glucoseHist.length > 0) { + let maxDate = null; + let timeSpan = 0; + let totalDelta = 0; + const currentTime = sgv ? moment(sgv.readDateMills) + : moment(glucoseHist[glucoseHist.length - 1].readDateMills); + + sgvHist = []; + + // delete any deltas > 16 minutes and any that don't have an unfiltered value (backfill records) + let minDate = currentTime.valueOf() - 16 * 60 * 1000; + for (let i = 0; i < glucoseHist.length; i += 1) { + if (lastCal && (glucoseHist[i].readDateMills >= minDate) && ('unfiltered' in glucoseHist[i]) && (glucoseHist[i].unfiltered > 100)) { + sgvHist.push({ + glucose: calcGlucose(glucoseHist[i], lastCal), + readDate: glucoseHist[i].readDateMills, + }); + } else if (glucoseHist[i].readDateMills >= minDate) { + sgvHist.push({ + glucose: glucoseHist[i].glucose, + readDate: glucoseHist[i].readDateMills, + }); + } + } + + if (sgv) { + if (lastCal && ('unfiltered' in sgv) && (sgv.unfiltered > 100)) { + sgvHist.push({ + glucose: calcGlucose(sgv, lastCal), + readDate: sgv.readDateMills, + }); + } else { + sgvHist.push({ + glucose: sgv.glucose, + readDate: sgv.readDateMills, + }); + } + } + + if (sgvHist.length > 1) { + minDate = sgvHist[0].readDate; + maxDate = sgvHist[sgvHist.length - 1].readDate; + + // Use the current calibration value to calculate the glucose from the + // unfiltered data. This allows the trend calculation to be independent + // of the calibration jumps + totalDelta = sgvHist[sgvHist.length - 1].glucose - sgvHist[0].glucose; + + timeSpan = (maxDate - minDate) / 1000.0 / 60.0; + + trend = 10 * totalDelta / timeSpan; + } + } else { + debug(`Not enough history for trend calculation: ${glucoseHist.length}`); + } + + return trend; +}; + +// Return sensor noise +calcStatsExports.calcNSNoise = (noise, glucoseHist) => { + let nsNoise = 0; // Unknown + const currSGV = glucoseHist[glucoseHist.length - 1]; + let deltaSGV = 0; + + if (glucoseHist.length > 1) { + const priorSGV = glucoseHist[glucoseHist.length - 2]; + + if ((currSGV.glucose > 30) && (priorSGV.glucose > 30)) { + deltaSGV = currSGV.glucose - priorSGV.glucose; + } + } + + if (!currSGV) { + nsNoise = 1; + } else if (currSGV.glucose > 400) { + log(`Glucose ${currSGV.glucose} > 400 - setting noise level Heavy`); + nsNoise = 4; + } else if (currSGV.glucose < 40) { + log(`Glucose ${currSGV.glucose} < 40 - setting noise level Light`); + nsNoise = 2; + } else if (Math.abs(deltaSGV) > 30) { + // This is OK even during a calibration jump because we don't want OpenAPS to be too + // agressive with the "false" trend implied by a large positive jump + log(`Glucose change ${deltaSGV} out of range [-30, 30] - setting noise level Heavy`); + nsNoise = 4; + } else if (noise < 0.35) { + nsNoise = 1; // Clean + } else if (noise < 0.5) { + nsNoise = 2; // Light + } else if (noise < 0.7) { + nsNoise = 3; // Medium + } else if (noise >= 0.7) { + nsNoise = 4; // Heavy + } + + return nsNoise; +}; + +calcStatsExports.NSNoiseString = (nsNoise) => { + switch (nsNoise) { + case 1: + return 'Clean'; + case 2: + return 'Light'; + case 3: + return 'Medium'; + case 4: + return 'Heavy'; + case 0: + default: + return 'Unknown'; + } +}; diff --git a/lib/profile/index.js b/lib/profile/index.js index e43ecb40c..00ba074ca 100644 --- a/lib/profile/index.js +++ b/lib/profile/index.js @@ -67,6 +67,7 @@ function defaults ( ) { , enableEnliteBgproxy: false // TODO: make maxRaw a preference here usable by oref0-raw in myopenaps-cgm-loop //, maxRaw: 200 // highest raw/noisy CGM value considered safe to use for looping + , calc_glucose_noise: false }; } diff --git a/package.json b/package.json index 1d1005acf..ce0f5080f 100644 --- a/package.json +++ b/package.json @@ -45,6 +45,7 @@ "oref0-bash-common-functions.sh": "./bin/oref0-bash-common-functions.sh", "oref0-bluetoothup": "./bin/oref0-bluetoothup.sh", "oref0-calculate-iob": "./bin/oref0-calculate-iob.js", + "oref0-calculate-glucose-noise": "./bin/oref0-calculate-glucose-noise.js", "oref0-copy-fresher": "./bin/oref0-copy-fresher", "oref0-crun": "./bin/oref0-conditional-run.sh", "oref0-cron-every-minute": "./bin/oref0-cron-every-minute.sh", diff --git a/tests/glucose-noise.js b/tests/glucose-noise.js new file mode 100644 index 000000000..f6f9828fc --- /dev/null +++ b/tests/glucose-noise.js @@ -0,0 +1,185 @@ +'use strict'; + +require('should'); + +var moment = require('moment'); +var stats = require('../lib/calc-glucose-stats'); + +describe('NOISE', function() { + it('should calculate Clean Sensor Noise', () => { + const glucoseHist = [{ + status: 0, + state: 7, + readDate: 1528890389945, + readDateMills: 1528890389945, + filtered: 161056, + unfiltered: 158400, + glucose: 155, + trend: -3.9982585362819747, + }, { + status: 0, + state: 7, + readDate: 1528890689766, + readDateMills: 1528890689766, + filtered: 159360, + unfiltered: 156544, + glucose: 153, + trend: -3.9992534726850986, + }, { + status: 0, + state: 7, + readDate: 1528890989467, + readDateMills: 1528890989467, + filtered: 157504, + unfiltered: 154432, + glucose: 150, + trend: -4.667973699302471, + }, { + status: 0, + state: 7, + readDate: 1528891289963, + readDateMills: 1528891289963, + filtered: 155488, + unfiltered: 151872, + glucose: 147, + trend: -5.3332266687999565, + }, { + status: 0, + state: 7, + readDate: 1528891589664, + readDateMills: 1528891589664, + filtered: 153312, + unfiltered: 149984, + glucose: 145, + trend: -5.333937846289246, + }, { + status: 0, + state: 7, + readDate: 1528891889576, + readDateMills: 1528891889576, + filtered: 151008, + unfiltered: 147264, + glucose: 141, + trend: -5.999273421330083, + }, { + status: 0, + state: 7, + readDate: 1528892189592, + readDateMills: 1528892189592, + filtered: 148544, + unfiltered: 144256, + glucose: 138, + trend: -6.002474353316756, + }]; + + const currSGV = { + status: 0, + state: 7, + readDate: 1528892489488, + readDateMills: 1528892489488, + filtered: 145920, + unfiltered: 141632, + glucose: 134, + trend: -7.334767687903413, + }; + + glucoseHist.push(currSGV); + + var options = { + glucose_hist: glucoseHist + }; + + const newHist = stats.updateGlucoseStats(options); + + newHist[0].noise.should.equal(1); + }); + + it('should calculate Medium Sensor Noise', () => { + const glucoseHist = [{ + status: 0, + state: 7, + readDate: 1528890389945, + readDateMills: 1528890389945, + filtered: 161056, + unfiltered: 158400, + glucose: 155, + trend: -3.9982585362819747, + }, { + status: 0, + state: 7, + readDate: 1528890689766, + readDateMills: 1528890689766, + filtered: 159360, + unfiltered: 156544, + glucose: 153, + trend: -3.9992534726850986, + }, { + status: 0, + state: 7, + readDate: 1528890989467, + readDateMills: 1528890989467, + filtered: 157504, + unfiltered: 154432, + glucose: 150, + trend: -4.667973699302471, + }, { + status: 0, + state: 7, + readDate: 1528891289963, + readDateMills: 1528891289963, + filtered: 155488, + unfiltered: 151872, + glucose: 147, + trend: -5.3332266687999565, + }, { + status: 0, + state: 7, + readDate: 1528891589664, + readDateMills: 1528891589664, + filtered: 153312, + unfiltered: 149984, + glucose: 145, + trend: -5.333937846289246, + }, { + status: 0, + state: 7, + readDate: 1528891889576, + readDateMills: 1528891889576, + filtered: 151008, + unfiltered: 147264, + glucose: 141, + trend: -5.999273421330083, + }, { + status: 0, + state: 7, + readDate: 1528892189592, + readDateMills: 1528892189592, + filtered: 148544, + unfiltered: 144256, + glucose: 158, + trend: -6.002474353316756, + }]; + + const currSGV = { + status: 0, + state: 7, + readDate: 1528892489488, + readDateMills: 1528892489488, + filtered: 145920, + unfiltered: 141632, + glucose: 134, + trend: -7.334767687903413, + }; + + glucoseHist.push(currSGV); + + var options = { + glucose_hist: glucoseHist + }; + + const newHist = stats.updateGlucoseStats(options); + + newHist[0].noise.should.equal(3); + }); +}); + From 6bc60eb98b0381ccb0beb734a36e21bf4b8b83ed Mon Sep 17 00:00:00 2001 From: tzachi-dar Date: Fri, 15 Nov 2019 19:09:57 +0200 Subject: [PATCH 02/66] Remove aCOBpredBGs (accelerated carbohydrate absorption) as it is not used anymore. (#1319) Signed-off-by: Tzachi Dar --- lib/determine-basal/determine-basal.js | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index a5923eaf9..002a07ea3 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -414,12 +414,10 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // generate predicted future BGs based on IOB, COB, and current absorption rate var COBpredBGs = []; - var aCOBpredBGs = []; var IOBpredBGs = []; var UAMpredBGs = []; var ZTpredBGs = []; COBpredBGs.push(bg); - aCOBpredBGs.push(bg); IOBpredBGs.push(bg); ZTpredBGs.push(bg); UAMpredBGs.push(bg); @@ -527,10 +525,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } else { cid = Math.min(remainingCATime*60/5/2,Math.max(0, meal_data.mealCOB * csf / ci )); } - var acid = Math.max(0, meal_data.mealCOB * csf / aci ); // duration (hours) = duration (5m) * 5 / 60 * 2 (to account for linear decay) console.error("Carb Impact:",ci,"mg/dL per 5m; CI Duration:",round(cid*5/60*2,1),"hours; remaining CI (~2h peak):",round(remainingCIpeak,1),"mg/dL per 5m"); - //console.error("Accel. Carb Impact:",aci,"mg/dL per 5m; ACI Duration:",round(acid*5/60*2,1),"hours"); + //console.error("Accel. Carb Impact:",aci,"mg/dL per 5m; ACI Duration:",round(aci*5/60*2,1),"hours"); var minIOBPredBG = 999; var minCOBPredBG = 999; var minUAMPredBG = 999; @@ -569,7 +566,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // for COBpredBGs, predicted carb impact drops linearly from current carb impact down to zero // eventually accounting for all carbs (if they can be absorbed over DIA) var predCI = Math.max(0, Math.max(0,ci) * ( 1 - COBpredBGs.length/Math.max(cid*2,1) ) ); - var predACI = Math.max(0, Math.max(0,aci) * ( 1 - COBpredBGs.length/Math.max(acid*2,1) ) ); // if any carbs aren't absorbed after remainingCATime hours, assume they'll absorb in a /\ shaped // bilinear curve peaking at remainingCIpeak at remainingCATime/2 hours (remainingCATime/2*12 * 5m) // and ending at remainingCATime h (remainingCATime*12 * 5m intervals) @@ -580,7 +576,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ predCIs.push(round(predCI,0)); //process.stderr.write(round(predCI,1)+"+"+round(remainingCI,1)+" "); COBpredBG = COBpredBGs[COBpredBGs.length-1] + predBGI + Math.min(0,predDev) + predCI + remainingCI; - var aCOBpredBG = aCOBpredBGs[aCOBpredBGs.length-1] + predBGI + Math.min(0,predDev) + predACI; // for UAMpredBGs, predicted carb impact drops at slopeFromDeviations // calculate predicted CI from UAM based on slopeFromDeviations var predUCIslope = Math.max(0, uci + ( UAMpredBGs.length*slopeFromDeviations ) ); @@ -599,7 +594,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // truncate all BG predictions at 4 hours if ( IOBpredBGs.length < 48) { IOBpredBGs.push(IOBpredBG); } if ( COBpredBGs.length < 48) { COBpredBGs.push(COBpredBG); } - if ( aCOBpredBGs.length < 48) { aCOBpredBGs.push(aCOBpredBG); } if ( UAMpredBGs.length < 48) { UAMpredBGs.push(UAMpredBG); } if ( ZTpredBGs.length < 48) { ZTpredBGs.push(ZTpredBG); } // calculate minGuardBGs without a wait from COB, UAM, IOB predBGs @@ -654,15 +648,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } rT.predBGs.ZT = ZTpredBGs; lastZTpredBG=round(ZTpredBGs[ZTpredBGs.length-1]); - if (meal_data.mealCOB > 0) { - aCOBpredBGs.forEach(function(p, i, theArray) { - theArray[i] = round(Math.min(401,Math.max(39,p))); - }); - for (i=aCOBpredBGs.length-1; i > 12; i--) { - if (aCOBpredBGs[i-1] !== aCOBpredBGs[i]) { break; } - else { aCOBpredBGs.pop(); } - } - } if (meal_data.mealCOB > 0 && ( ci > 0 || remainingCIpeak > 0 )) { COBpredBGs.forEach(function(p, i, theArray) { theArray[i] = round(Math.min(401,Math.max(39,p))); From 1f8c40037abaa968b760240d70845a4cfafd9689 Mon Sep 17 00:00:00 2001 From: tzachi-dar Date: Sun, 17 Nov 2019 00:48:43 +0200 Subject: [PATCH 03/66] Allow to upload profile even when the url ends with / (for example: https://site.herokuapp.com/) (#1318) --- bin/oref0-upload-profile.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bin/oref0-upload-profile.js b/bin/oref0-upload-profile.js index 06c892e79..07c4d768a 100755 --- a/bin/oref0-upload-profile.js +++ b/bin/oref0-upload-profile.js @@ -65,6 +65,10 @@ if (!module.parent) { process.exit(1); } + if(nsurl.endsWith('/')) { + nsurl = nsurl.substring(0, nsurl.length - 1); + } + if (apisecret.indexOf('token=') !== 0 && apisecret.length !== 40) { var shasum = crypto.createHash('sha1'); shasum.update(String(apisecret)); From 62875eb0027a4ec1257f323c5016a62f363143b2 Mon Sep 17 00:00:00 2001 From: old-square-eyes Date: Sat, 23 Nov 2019 09:56:09 +1300 Subject: [PATCH 04/66] Make explicit that NS secret prompt will follow (#1320) --- bin/oref0-setup.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index 261da95b8..cdf572158 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -446,7 +446,7 @@ if [[ -z "$DIR" || -z "$serial" ]]; then echo if [[ ! -z $NIGHTSCOUT_HOST ]]; then echo "Starting with oref 0.5.0 you can use token based authentication to Nightscout. This makes it possible to deny anonymous access to your Nightscout instance. It's more secure than using your API_SECRET, but must first be configured in Nightscout." - if prompt_yn "Do you want to use token based authentication?" N; then + if prompt_yn "Do you want to use token based authentication? (Enter 'N' to provide your Nightscout secret instead)" N; then prompt_and_validate REPLY "What Nightscout access token (i.e. subjectname-hashof16characters) do you want to use for this rig?" validate_nightscout_token API_SECRET="token=${REPLY}" echocolor "Ok, $API_SECRET it is." From f3b52071accf1d840c48c7b7479b81786aa34f96 Mon Sep 17 00:00:00 2001 From: Scott Date: Sun, 1 Dec 2019 11:07:21 -0800 Subject: [PATCH 05/66] xdrip-js also works with G6 --- bin/oref0-setup.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index cdf572158..8be43304c 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -317,7 +317,7 @@ if [[ -z "$DIR" || -z "$serial" ]]; then echo "G6-upload: will use and upload BGs from a plugged in G5/G6 touchscreen receiver to Nightscout" echo "MDT: will use and upload BGs from an Enlite sensor paired to your pump" echo "xdrip: will work with an xDrip receiver app on your Android phone" - echo "xdrip-js: will work directly with a Dexcom G5 transmitter and will upload to Nightscout" + echo "xdrip-js: will work directly with a Dexcom G5/G6 transmitter and will upload to Nightscout" echo "Note: no matter which option you choose, CGM data will also be downloaded from NS when available." echo prompt_and_validate CGM "What kind of CGM would you like to configure?:" validate_cgm From cb8e94990301277fb1016c778b4e9efa55a6edbc Mon Sep 17 00:00:00 2001 From: Scott Date: Sun, 1 Dec 2019 17:32:51 -0800 Subject: [PATCH 06/66] fix bug in #1258 breaking don't loop on too-flat CGM data behavior --- lib/determine-basal/determine-basal.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index 002a07ea3..a7c546226 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -142,14 +142,14 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ if (minAgo > 12 || minAgo < -5) { // Dexcom data is too old, or way in the future rT.reason = "If current system time "+systemTime+" is correct, then BG data is too old. The last BG data was read "+minAgo+"m ago at "+bgTime; // if BG is too old/noisy, or is changing less than 1 mg/dL/5m for 45m, cancel any high temps and shorten any long zero temps - } else if ( bg > 60 && glucose_status == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1 ) { + } else if ( bg > 60 && glucose_status.delta == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1 ) { if ( glucose_status.last_cal && glucose_status.last_cal < 3 ) { rT.reason = "CGM was just calibrated"; } else { rT.reason = "Error: CGM data is unchanged for the past ~45m"; } } - if (bg <= 10 || bg === 38 || noise >= 3 || minAgo > 12 || minAgo < -5 || ( bg > 60 && glucose_status == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1 ) ) { + if (bg <= 10 || bg === 38 || noise >= 3 || minAgo > 12 || minAgo < -5 || ( bg > 60 && glucose_status.delta == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1 ) ) { if (currenttemp.rate > basal) { // high temp is running rT.reason += ". Replacing high temp basal of "+currenttemp.rate+" with neutral temp of "+basal; rT.deliverAt = deliverAt; From b6e831b6085ff1a314adfd13123e7274dacef9b5 Mon Sep 17 00:00:00 2001 From: Scott Date: Sun, 1 Dec 2019 19:52:00 -0800 Subject: [PATCH 07/66] clarify CGM too flat error --- lib/determine-basal/determine-basal.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index a7c546226..2a00236e2 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -146,7 +146,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ if ( glucose_status.last_cal && glucose_status.last_cal < 3 ) { rT.reason = "CGM was just calibrated"; } else { - rT.reason = "Error: CGM data is unchanged for the past ~45m"; + rT.reason = "Error: CGM data is unchanged for 5m and too flat for the past ~15m and ~45m"; } } if (bg <= 10 || bg === 38 || noise >= 3 || minAgo > 12 || minAgo < -5 || ( bg > 60 && glucose_status.delta == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1 ) ) { From 209432145ff0659b2355438b49129fd02b38bd95 Mon Sep 17 00:00:00 2001 From: Jeremy Cunningham <34543464+jpcunningh@users.noreply.github.com> Date: Mon, 2 Dec 2019 22:31:46 -0600 Subject: [PATCH 08/66] fix glucose-noise lastDelta logic error (#1326) --- lib/glucose-stats.js | 4 +++- tests/glucose-noise.js | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/glucose-stats.js b/lib/glucose-stats.js index c89497f4a..71563d628 100644 --- a/lib/glucose-stats.js +++ b/lib/glucose-stats.js @@ -46,7 +46,7 @@ const calcNoise = (sgvArr) => { // sod = sum of distances let sod = 0; - const lastDelta = 0; + let lastDelta = 0; for (let i = 1; i < n; i += 1) { // y2y1Delta adds a multiplier that gives @@ -63,6 +63,8 @@ const calcNoise = (sgvArr) => { y2y1Delta *= 1.2; } + lastDelta = y2y1Delta; + sod += Math.sqrt(Math.pow(x2x1Delta, 2) + Math.pow(y2y1Delta, 2)); } diff --git a/tests/glucose-noise.js b/tests/glucose-noise.js index f6f9828fc..b4977c250 100644 --- a/tests/glucose-noise.js +++ b/tests/glucose-noise.js @@ -156,7 +156,7 @@ describe('NOISE', function() { readDateMills: 1528892189592, filtered: 148544, unfiltered: 144256, - glucose: 158, + glucose: 148, trend: -6.002474353316756, }]; From f9c8b6905876ccb86a560dd4879956dc284be570 Mon Sep 17 00:00:00 2001 From: Scott Date: Tue, 3 Dec 2019 19:06:46 -0800 Subject: [PATCH 09/66] flat CGM data isn't really an error, just a reason to dose neutrally --- lib/determine-basal/determine-basal.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index 2a00236e2..5e69391f3 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -146,7 +146,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ if ( glucose_status.last_cal && glucose_status.last_cal < 3 ) { rT.reason = "CGM was just calibrated"; } else { - rT.reason = "Error: CGM data is unchanged for 5m and too flat for the past ~15m and ~45m"; + rT.reason = "CGM data is unchanged for 5m w/ <1 mg/dL change for the past ~15m and ~45m"; } } if (bg <= 10 || bg === 38 || noise >= 3 || minAgo > 12 || minAgo < -5 || ( bg > 60 && glucose_status.delta == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1 ) ) { From b9f6cf557fa6e7632334b19a813d841701fc4620 Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Tue, 3 Dec 2019 22:13:58 -0800 Subject: [PATCH 10/66] More comments (#1321) * first round of new comments and reorg notes; remove old comments * more comments * remove unused aci variable * comment as to why we don't use setTempBasal() --- lib/determine-basal/determine-basal.js | 74 ++++++++++++++++---------- 1 file changed, 47 insertions(+), 27 deletions(-) diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index 2a00236e2..a5e7e416f 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -13,6 +13,7 @@ THE SOFTWARE. */ +// Define various functions used later on, in the main function determine_basal() below var round_basal = require('../round-basal') @@ -111,6 +112,8 @@ function enable_smb( } var determine_basal = function determine_basal(glucose_status, currenttemp, iob_data, profile, autosens_data, meal_data, tempBasalFunctions, microBolusAllowed, reservoir_data, currentTime) { + +// Set variables required for evaluating error conditions var rT = {}; //short for requestedTemp var deliverAt = new Date(); @@ -134,8 +137,12 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var bg = glucose_status.glucose; var noise = glucose_status.noise; + +// Cancel high temps (and replace with neutral) or shorten long zero temps for various error conditions + // 38 is an xDrip error state that usually indicates sensor failure // all other BG values between 11 and 37 mg/dL reflect non-error-code BG values, so we should zero temp for those +// First, print out different explanations for each different error condition if (bg <= 10 || bg === 38 || noise >= 3) { //Dexcom is in ??? mode or calibrating, or xDrip reports high noise rT.reason = "CGM is calibrating, in ??? state, or noise is high"; } @@ -149,6 +156,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ rT.reason = "Error: CGM data is unchanged for 5m and too flat for the past ~15m and ~45m"; } } +// Then, for all such error conditions, cancel any running high temp or shorten any long zero temp, and return. if (bg <= 10 || bg === 38 || noise >= 3 || minAgo > 12 || minAgo < -5 || ( bg > 60 && glucose_status.delta == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1 ) ) { if (currenttemp.rate > basal) { // high temp is running rT.reason += ". Replacing high temp basal of "+currenttemp.rate+" with neutral temp of "+basal; @@ -157,6 +165,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ rT.duration = 30; rT.rate = basal; return rT; + // don't use setTempBasal(), as it has logic that allows <120% high temps to continue running //return tempBasalFunctions.setTempBasal(basal, 30, profile, rT, currenttemp); } else if ( currenttemp.rate === 0 && currenttemp.duration > 30 ) { //shorten long zero temps to 30m rT.reason += ". Shortening " + currenttemp.duration + "m long zero temp to 30m. "; @@ -165,6 +174,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ rT.duration = 30; rT.rate = 0; return rT; + // don't use setTempBasal(), as it has logic that allows long zero temps to continue running //return tempBasalFunctions.setTempBasal(0, 30, profile, rT, currenttemp); } else { //do nothing. rT.reason += ". Temp " + currenttemp.rate + " <= current basal " + basal + "U/hr; doing nothing. "; @@ -172,6 +182,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } } +// Get configured target, and return if unable to do so. +// This should occur after checking that we're not in one of the CGM-data-related error conditions handled above, +// and before using target_bg to adjust sensitivityRatio below. var max_iob = profile.max_iob; // maximum amount of non-bolus IOB OpenAPS will ever deliver // if min and max are set, then set target to their average @@ -191,6 +204,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ return rT; } +// Calculate sensitivityRatio based on temp targets, if applicable, or using the value calculated by autosens var sensitivityRatio; var high_temptarget_raises_sensitivity = profile.exercise_mode || profile.high_temptarget_raises_sensitivity; var normalTarget = 100; // evaluate high/low temptarget against 100, not scheduled target (which might change) @@ -225,6 +239,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } } +// Conversely, adjust BG target based on autosens ratio if no temp target is running // adjust min, max, and target BG for sensitivity, such that 50% increase in ISF raises target from 100 to 120 if (profile.temptargetSet) { //process.stderr.write("Temp Target set, not adjusting with autosens; "); @@ -245,6 +260,10 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } } +// If iob_data or its required properties are missing, return. +// This has to be checked after checking that we're not in one of the CGM-data-related error conditions handled above, +// and before attempting to use iob_data below. +// TODO: move this down to be just above // compare currenttemp to iob_data.lastTemp and cancel temp if they don't match if (typeof iob_data === 'undefined' ) { rT.error ='Error: iob_data undefined. '; return rT; @@ -261,6 +280,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ return rT; } +// Prep various delta variables. TODO: make this happen earlier along with other variable prep var tick; if (glucose_status.delta > -0.5) { @@ -273,6 +293,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var minAvgDelta = Math.min(glucose_status.short_avgdelta, glucose_status.long_avgdelta); var maxDelta = Math.max(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta); +// Adjust ISF based on sensitivityRatio var profile_sens = round(profile.sens,1) var sens = profile.sens; if (typeof autosens_data !== 'undefined' && autosens_data) { @@ -287,7 +308,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } console.error("; CR:",profile.carb_ratio); - // compare currenttemp to iob_data.lastTemp and cancel temp if they don't match +// Compare currenttemp to iob_data.lastTemp and cancel temp if they don't match, as a safety check +// This should occur after checking that we're not in one of the CGM-data-related error conditions handled above, +// and before returning (doing nothing) below if eventualBG is undefined. var lastTempAge; if (typeof iob_data.lastTemp !== 'undefined' ) { lastTempAge = round(( new Date(systemTime).getTime() - iob_data.lastTemp.date ) / 60000); // in minutes @@ -304,11 +327,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ return tempBasalFunctions.setTempBasal(0, 0, profile, rT, currenttemp); } if ( currenttemp && iob_data.lastTemp && currenttemp.duration > 0 ) { - // TODO: fix this (lastTemp.duration is how long it has run; currenttemp.duration is time left - //if ( currenttemp.duration < iob_data.lastTemp.duration - 2) { - //rT.reason = "Warning: currenttemp duration "+currenttemp.duration+" << lastTemp duration "+round(iob_data.lastTemp.duration,1)+" from pumphistory; setting neutral temp of "+basal+"."; - //return tempBasalFunctions.setTempBasal(basal, 30, profile, rT, currenttemp); - //} //console.error(lastTempAge, round(iob_data.lastTemp.duration,1), round(lastTempAge - iob_data.lastTemp.duration,1)); var lastTempEnded = lastTempAge - iob_data.lastTemp.duration if ( lastTempEnded > 5 && lastTempAge > 10 ) { @@ -316,14 +334,11 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ //console.error(currenttemp, round(iob_data.lastTemp,1), round(lastTempAge,1)); return tempBasalFunctions.setTempBasal(0, 0, profile, rT, currenttemp); } - // TODO: figure out a way to do this check that doesn't fail across basal schedule boundaries - //if ( tempModulus < 25 && tempModulus > 5 ) { - //rT.reason = "Warning: currenttemp duration "+currenttemp.duration+" + lastTempAge "+lastTempAge+" isn't a multiple of 30m; setting neutral temp of "+basal+"."; - //console.error(rT.reason); - //return tempBasalFunctions.setTempBasal(basal, 30, profile, rT, currenttemp); - //} } +// Calculate BGI, deviation, and eventualBG. +// This has to happen after we obtain iob_data + //calculate BG impact: the amount BG "should" be rising or falling based on insulin activity alone var bgi = round(( -iob_data.activity * sens * 5 ), 2); // project deviations for 30 minutes @@ -346,7 +361,8 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // and adjust it for the deviation above var eventualBG = naive_eventualBG + deviation; - // raise target for noisy / raw CGM data +// Raise target for noisy / raw CGM data. +// TODO: move this up to immediately after parsing targets above (once adv_target_adjustments is deprecated) if (glucose_status.noise >= 2) { // increase target at least 10% (default 30%) for raw / noisy data var noisyCGMTargetMultiplier = Math.max( 1.1, profile.noisyCGMTargetMultiplier ); @@ -360,6 +376,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ target_bg = adjustedTargetBG; max_bg = adjustedMaxBG; // adjust target BG range if configured to bring down high BG faster + // TODO: deprecate this } else if ( bg > max_bg && profile.adv_target_adjustments && ! profile.temptargetSet ) { // with target=100, as BG rises from 100 to 160, adjustedTarget drops from 100 to 80 adjustedMinBG = round(Math.max(80, min_bg - (bg - min_bg)/3 ),0); @@ -389,17 +406,20 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } } + // TODO: move this line to be 4 lines down var expectedDelta = calculate_expected_delta(target_bg, eventualBG, bgi); if (typeof eventualBG === 'undefined' || isNaN(eventualBG)) { rT.error ='Error: could not calculate eventualBG. '; return rT; } + // TODO: move this up to immediately after calculating targets // min_bg of 90 -> threshold of 65, 100 -> 70 110 -> 75, and 130 -> 85 var threshold = min_bg - 0.5*(min_bg-40); //console.error(reservoir_data); +// Initialize rT (requestedTemp) object. Has to be done after eventualBG is calculated. rT = { 'temp': 'absolute' , 'bg': bg @@ -411,8 +431,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ , 'sensitivityRatio' : sensitivityRatio // autosens ratio (fraction of normal basal) }; - // generate predicted future BGs based on IOB, COB, and current absorption rate +// Generate predicted future BGs based on IOB, COB, and current absorption rate +// Initialize and calculate variables used for predicting BGs var COBpredBGs = []; var IOBpredBGs = []; var UAMpredBGs = []; @@ -443,15 +464,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var uci = round((minDelta - bgi),1); // ISF (mg/dL/U) / CR (g/U) = CSF (mg/dL/g) - // TODO: remove commented-out code for old behavior - //if (profile.temptargetSet) { - // if temptargetSet, use unadjusted profile.sens to allow activity mode sensitivityRatio to adjust CR - //var csf = profile.sens / profile.carb_ratio; - //} else { - // otherwise, use autosens-adjusted sens to counteract autosens meal insulin dosing adjustments - // so that autotuned CR is still in effect even when basals and ISF are being adjusted by autosens - //var csf = sens / profile.carb_ratio; - //} // use autosens-adjusted sens to counteract autosens meal insulin dosing adjustments so that // autotuned CR is still in effect even when basals and ISF are being adjusted by TT or autosens // this avoids overdosing insulin for large meals when low temp targets are active @@ -465,7 +477,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ console.error("Limiting carb impact from",ci,"to",maxCI,"mg/dL/5m (",maxCarbAbsorptionRate,"g/h )"); ci = maxCI; } - var remainingCATimeMin = 3; // h; duration of expected not-yet-observed carb absorption + var remainingCATimeMin = 3; // h; minimum duration of expected not-yet-observed carb absorption // adjust remainingCATime (instead of CR) for autosens if sensitivityRatio defined if (sensitivityRatio){ remainingCATimeMin = remainingCATimeMin / sensitivityRatio; @@ -482,6 +494,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ //console.error(meal_data.lastCarbTime, lastCarbAge); var fractionCOBAbsorbed = ( meal_data.carbs - meal_data.mealCOB ) / meal_data.carbs; + // if the lastCarbTime was 1h ago, increase remainingCATime by 1.5 hours remainingCATime = remainingCATimeMin + 1.5 * lastCarbAge/60; remainingCATime = round(remainingCATime,1); //console.error(fractionCOBAbsorbed, remainingCATimeAdjustment, remainingCATime) @@ -515,7 +528,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var slopeFromDeviations = Math.min(slopeFromMaxDeviation,-slopeFromMinDeviation/3); //console.error(slopeFromMaxDeviation); - var aci = 10; //5m data points = g * (1U/10g) * (40mg/dL/1U) / (mg/dL/5m) // duration (in 5m data points) = COB (g) * CSF (mg/dL/g) / ci (mg/dL/5m) // limit cid to remainingCATime hours: the reset goes to remainingCI @@ -527,7 +539,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } // duration (hours) = duration (5m) * 5 / 60 * 2 (to account for linear decay) console.error("Carb Impact:",ci,"mg/dL per 5m; CI Duration:",round(cid*5/60*2,1),"hours; remaining CI (~2h peak):",round(remainingCIpeak,1),"mg/dL per 5m"); - //console.error("Accel. Carb Impact:",aci,"mg/dL per 5m; ACI Duration:",round(aci*5/60*2,1),"hours"); + var minIOBPredBG = 999; var minCOBPredBG = 999; var minUAMPredBG = 999; @@ -542,7 +554,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var maxIOBPredBG = bg; var maxCOBPredBG = bg; var maxUAMPredBG = bg; - //var maxPredBG = bg; var eventualPredBG = bg; var lastIOBpredBG; var lastCOBpredBG; @@ -767,6 +778,8 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // make sure minPredBG isn't higher than avgPredBG minPredBG = Math.min( minPredBG, avgPredBG ); +// Print summary variables based on predBGs etc. + process.stderr.write("minPredBG: "+minPredBG+" minIOBPredBG: "+minIOBPredBG+" minZTGuardBG: "+minZTGuardBG); if (minCOBPredBG < 999) { process.stderr.write(" minCOBPredBG: "+minCOBPredBG); @@ -791,6 +804,8 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ rT.reason += ", UAMpredBG " + convert_bg(lastUAMpredBG, profile) } rT.reason += "; "; + +// Use minGuardBG to prevent overdosing in hypo-risk situations // use naive_eventualBG if above 40, but switch to minGuardBG if both eventualBGs hit floor of 39 var carbsReqBG = naive_eventualBG; if ( carbsReqBG < 40 ) { @@ -837,12 +852,14 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ //rT.reason += "minGuardBG "+minGuardBG+"<"+threshold+": SMB disabled; "; enableSMB = false; } +// Disable SMB for sudden rises (often caused by calibrations or activation/deactivation of Dexcom's noise-filtering algorithm) if ( maxDelta > 0.20 * bg ) { console.error("maxDelta",convert_bg(maxDelta, profile),"> 20% of BG",convert_bg(bg, profile),"- disabling SMB"); rT.reason += "maxDelta "+convert_bg(maxDelta, profile)+" > 20% of BG "+convert_bg(bg, profile)+": SMB disabled; "; enableSMB = false; } +// Calculate carbsReq (carbs required to avoid a hypo) console.error("BG projected to remain above",convert_bg(min_bg, profile),"for",minutesAboveMinBG,"minutes"); if ( minutesAboveThreshold < 240 || minutesAboveMinBG < 60 ) { console.error("BG projected to remain above",convert_bg(threshold,profile),"for",minutesAboveThreshold,"minutes"); @@ -862,6 +879,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ rT.carbsReq = carbsReq; rT.reason += carbsReq + " add'l carbs req w/in " + minutesAboveThreshold + "m; "; } + +// Begin core dosing logic: check for situations requiring low or high temps, and return appropriate temp after first match + // don't low glucose suspend if IOB is already super negative and BG is rising faster than predicted if (bg < threshold && iob_data.iob < -profile.current_basal*20/60 && minDelta > 0 && minDelta > expectedDelta) { rT.reason += "IOB "+iob_data.iob+" < " + round(-profile.current_basal*20/60,2); From f14f6d86a7d1a90bef82b2c753251f919e54f16e Mon Sep 17 00:00:00 2001 From: Jakob Sandberg Date: Wed, 4 Dec 2019 09:15:34 -0800 Subject: [PATCH 11/66] add test to ensure we correctly fall back to dateString property --- tests/get-last-glucose.test.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/get-last-glucose.test.js b/tests/get-last-glucose.test.js index d1a7430bd..91a5167de 100644 --- a/tests/get-last-glucose.test.js +++ b/tests/get-last-glucose.test.js @@ -30,4 +30,8 @@ describe('getLastGlucose', function ( ) { glucose_status.short_avgdelta.should.equal(4.44); glucose_status.long_avgdelta.should.equal(2.86); }); + it('should fall back to dateString property', function () { + var glucose_status = getLastGlucose([{dateString: "2019-12-04T08:54:19.288-0800", sgv: 100}, {date: 1467942544500, sgv: 95}, {date: 1467942244000, sgv: 85}, {date: 1467941944000, sgv: 70}]); + glucose_status.date.should.equal(1575478459288); + }); }); From 87c5813510e019cf35e5c636103a21e7b6754847 Mon Sep 17 00:00:00 2001 From: Jakob Sandberg Date: Wed, 4 Dec 2019 09:16:45 -0800 Subject: [PATCH 12/66] fix bug by using correct glucose object when parsing current date --- lib/glucose-get-last.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/glucose-get-last.js b/lib/glucose-get-last.js index 0eb8909ae..a9a136ccb 100644 --- a/lib/glucose-get-last.js +++ b/lib/glucose-get-last.js @@ -6,7 +6,7 @@ var getLastGlucose = function (data) { }); var now = data[0]; - var now_date = now.date || Date.parse(now.display_time) || Date.parse(then.dateString); + var now_date = now.date || Date.parse(now.display_time) || Date.parse(now.dateString); var change; var last_deltas = []; var short_deltas = []; From b32880a6e139eac203222248160a0ec82940acf8 Mon Sep 17 00:00:00 2001 From: Jakob Sandberg Date: Wed, 4 Dec 2019 09:19:40 -0800 Subject: [PATCH 13/66] encapsulate logic for accessing entry date into a single function --- lib/glucose-get-last.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/glucose-get-last.js b/lib/glucose-get-last.js index a9a136ccb..afa6d39e1 100644 --- a/lib/glucose-get-last.js +++ b/lib/glucose-get-last.js @@ -1,3 +1,7 @@ +function getDateFromEntry(entry) { + return entry.date || Date.parse(entry.display_time) || Date.parse(entry.dateString); +} + var getLastGlucose = function (data) { data = data.map(function prepGlucose (obj) { //Support the NS sgv field to avoid having to convert in a custom way @@ -6,7 +10,7 @@ var getLastGlucose = function (data) { }); var now = data[0]; - var now_date = now.date || Date.parse(now.display_time) || Date.parse(now.dateString); + var now_date = getDateFromEntry(now); var change; var last_deltas = []; var short_deltas = []; @@ -23,7 +27,7 @@ var getLastGlucose = function (data) { // only use data from the same device as the most recent BG data point if (typeof data[i] !== 'undefined' && data[i].glucose > 38 && data[i].device === now.device) { var then = data[i]; - var then_date = then.date || Date.parse(then.display_time) || Date.parse(then.dateString); + var then_date = getDateFromEntry(then); var avgdelta = 0; var minutesago; if (typeof then_date !== 'undefined' && typeof now_date !== 'undefined') { From e9f6922ca2a84fad404b77410de655b516af92f7 Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Thu, 5 Dec 2019 19:32:34 -0800 Subject: [PATCH 14/66] Mraa ccprog install (#1322) * new script to install mraa+ccprog on oref0 dev * comment space * typofix * git pull to update ccprog if it already exists * cd to absolute path for ccprog * install ccprog along with mraa * Delete bin/mraa-ccprog-install.sh in favor of oref0-mraa-install from #1302 --- bin/oref0-mraa-install.sh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/bin/oref0-mraa-install.sh b/bin/oref0-mraa-install.sh index 037ad9421..372c32114 100755 --- a/bin/oref0-mraa-install.sh +++ b/bin/oref0-mraa-install.sh @@ -10,3 +10,12 @@ mkdir -p mraa/build && cd mraa/build && cmake .. -DBUILDSWIGNODE=OFF -DCMAKE_INS echo "Running ldconfig..." bash -c "grep -q i386-linux-gnu /etc/ld.so.conf || echo /usr/local/lib/i386-linux-gnu/ >> /etc/ld.so.conf && ldconfig" echo "MRAA installed. Please reboot before using." + +mkdir -p ~/src +if [ -d "$HOME/src/ccprog/" ]; then + echo "$HOME/src/ccprog/ already exists; updating" + cd $HOME/src/ccprog/ && git pull || echo "Could not git pull ccprog" +else + cd ~/src && git clone https://github.com/ps2/ccprog.git || echo "Could not clone ccprog" +fi +cd $HOME/src/ccprog/ && make ccprog || echo "Could not make ccprog" From d0d8a8407023bf2f214842c52bd3b7b3759dffff Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Thu, 5 Dec 2019 19:33:10 -0800 Subject: [PATCH 15/66] update too-flat language (#1327) * flat CGM data isn't really an error, just a reason to dose neutrally * add actual delta and avgdelta values to unchanged reason * include current BG too --- lib/determine-basal/determine-basal.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index a5e7e416f..34633f890 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -153,7 +153,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ if ( glucose_status.last_cal && glucose_status.last_cal < 3 ) { rT.reason = "CGM was just calibrated"; } else { - rT.reason = "Error: CGM data is unchanged for 5m and too flat for the past ~15m and ~45m"; + rT.reason = "CGM data is unchanged ("+bg+"+"+glucose_status.delta+") for 5m w/ "+glucose_status.short_avgdelta+" mg/dL ~15m change & "+glucose_status.long_avgdelta+" mg/dL ~45m change"; } } // Then, for all such error conditions, cancel any running high temp or shorten any long zero temp, and return. From 8f4845ca8f58a0df22f51be4635f74961c758c58 Mon Sep 17 00:00:00 2001 From: Jakob Date: Sun, 8 Dec 2019 23:09:01 -0800 Subject: [PATCH 16/66] skip meter bg when getting last glucose (#1328) * Add test for meter bg values when getting last glucose * skip meter bg when getting last glucose --- lib/glucose-get-last.js | 4 +++- tests/get-last-glucose.test.js | 8 ++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/lib/glucose-get-last.js b/lib/glucose-get-last.js index 0eb8909ae..8abf224fa 100644 --- a/lib/glucose-get-last.js +++ b/lib/glucose-get-last.js @@ -1,5 +1,7 @@ var getLastGlucose = function (data) { - data = data.map(function prepGlucose (obj) { + data = data.filter(function(obj) { + return obj.glucose || obj.sgv; + }).map(function prepGlucose (obj) { //Support the NS sgv field to avoid having to convert in a custom way obj.glucose = obj.glucose || obj.sgv; return obj; diff --git a/tests/get-last-glucose.test.js b/tests/get-last-glucose.test.js index d1a7430bd..c782c0d57 100644 --- a/tests/get-last-glucose.test.js +++ b/tests/get-last-glucose.test.js @@ -30,4 +30,12 @@ describe('getLastGlucose', function ( ) { glucose_status.short_avgdelta.should.equal(4.44); glucose_status.long_avgdelta.should.equal(2.86); }); + it('should skip meter BG', function () { + var glucose_status = getLastGlucose([{date: 1467942845000, glucose: null, mbg: 100}, {date: 1467942544500, sgv: 95}, {date: 1467942244000, sgv: 85}, {date: 1467941944000, sgv: 70}]); + //console.log(glucose_status); + glucose_status.delta.should.equal(10); + glucose_status.glucose.should.equal(95); + glucose_status.short_avgdelta.should.equal(11.25); + glucose_status.long_avgdelta.should.equal(0); + }); }); From 9d4c73960d72327a63d6184b52c2fdf2e89f353b Mon Sep 17 00:00:00 2001 From: Scott Date: Mon, 9 Dec 2019 18:01:07 -0800 Subject: [PATCH 17/66] upgrade setuptools to avoid install_requires error per #1303 --- bin/openaps-packages.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bin/openaps-packages.sh b/bin/openaps-packages.sh index f605d56c8..47046e955 100755 --- a/bin/openaps-packages.sh +++ b/bin/openaps-packages.sh @@ -33,6 +33,8 @@ if ! nodejs --version | grep -e 'v8\.' -e 'v1[02468]\.' &> /dev/null ; then ##sudo apt-get install gcc g++ make fi +# upgrade setuptools to avoid "'install_requires' must be a string" error +sudo pip install setuptools -U # no need to die if this fails sudo pip install -U openaps || die "Couldn't install openaps toolkit" sudo pip install -U openaps-contrib || die "Couldn't install openaps-contrib" sudo openaps-install-udev-rules || die "Couldn't run openaps-install-udev-rules" From f841fa705518ef1330a0c29874a4921a00f81090 Mon Sep 17 00:00:00 2001 From: Scott Date: Mon, 27 May 2019 20:27:32 -0700 Subject: [PATCH 18/66] set up xdrip-js with DEXCOM_CGM_TX_ID --- bin/oref0-setup.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index 8be43304c..66bfd3114 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -992,6 +992,7 @@ if prompt_yn "" N; then cd $HOME/src/Logger sudo apt-get install -y bluez-tools sudo npm run global-install + cgm-transmitter $DEXCOM_CGM_TX_ID touch /tmp/reboot-required fi From 303833cf9952fe1e4777f18ffb27d267dfcf7627 Mon Sep 17 00:00:00 2001 From: Jens Heuschkel Date: Sun, 15 Dec 2019 22:52:13 +0100 Subject: [PATCH 19/66] Add a shutdown function for pi based rigs (#1332) * Add a shutdown function for pi based rigs Added code to shutdown a pi based rig when battery level is low. I tested this code on my pi rig with explorer HAT several times using my lab bench power supply and one time with a standard 18650 cell. * Changed shutdown from +5 to now --- bin/oref0-cron-every-15min.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/bin/oref0-cron-every-15min.sh b/bin/oref0-cron-every-15min.sh index d4ed8c850..ea79a3405 100755 --- a/bin/oref0-cron-every-15min.sh +++ b/bin/oref0-cron-every-15min.sh @@ -15,6 +15,11 @@ if is_edison; then sudo ~/src/EdisonVoltage/voltage json batteryVoltage battery | jq .batteryVoltage | awk '{if ($1<=3050)system("sudo shutdown -h now")}' & fi +# proper shutdown of pi rigs once the battery level is below 2 % (should be more than enough to shut down on a standard 18600 ~2Ah cell) +if is_pi; then + sudo ~/src/openaps-menu/scripts/getvoltage.sh | tee ~/myopenaps/monitor/edison-battery.json | jq .battery | awk '{if ($1<2)system("sudo shutdown -h now")}' & +fi + # temporarily disable hotspot for 1m every 15m to allow it to try to connect via wifi again ( touch /tmp/disable_hotspot From c01674b97406135caf0d936440eee85c08fd8751 Mon Sep 17 00:00:00 2001 From: tzachi-dar Date: Mon, 23 Dec 2019 23:29:28 +0200 Subject: [PATCH 20/66] Ping to default gateways (per network) to see if local connection is alive. (#1339) This helps to distinguish between the case that pi does not have connection to the phone, and the case that connection to the phone exists but phone has no external IP connection. Also in the case that both WiFi and BT tethering is enabled one can see which of them is used (the one with the lower metric). Here is an example to the output (when both WiFi and BT are enabled.): At Mon 23 Dec 2019 04:50:08 PM IST my local Bluetooth IP is: 192.168.44.226 At Mon 23 Dec 2019 04:50:10 PM IST ping to default gateway 192.168.44.1 (bnep0 metric = 0) passed At Mon 23 Dec 2019 04:50:10 PM IST ping to default gateway 192.168.44.1 (bnep0 metric = 214) passed At Mon 23 Dec 2019 04:50:11 PM IST ping to default gateway 192.168.3.1 (wlan0 metric = 302) passed At Mon 23 Dec 2019 04:50:11 PM IST my public IP is: 37.26.149.235 Signed-off-by: Tzachi Dar --- bin/oref0-online.sh | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/bin/oref0-online.sh b/bin/oref0-online.sh index e21f4a0f3..bf64a24d9 100755 --- a/bin/oref0-online.sh +++ b/bin/oref0-online.sh @@ -43,6 +43,7 @@ main() { else echo "At $(date) my Bluetooth PAN is not connected" fi + ping_default_gateways echo -n "At $(date) my public IP is: " if check_ip; then stop_hotspot @@ -145,6 +146,31 @@ function check_ip { fi } +# network_name ip metric +function ping_to_default_gw { +ping $2 -c 1 > /dev/null + if [[ $? == 0 ]] ; then + echo At $(date) ping to default gateway $2 '('$1' metric = '$3')' passed ; + else + echo At $(date) ping to default gateway $2 '('$1' metric = '$3')' failed ; + fi +} + +function ping_default_gateways { +# Here is an example to the output of the netstat command that we parse. +# route -n +# Kernel IP routing table +# Destination Gateway Genmask Flags Metric Ref Use Iface +# 0.0.0.0 192.168.44.1 0.0.0.0 UG 0 0 0 bnep0 +# 0.0.0.0 192.168.44.1 0.0.0.0 UG 214 0 0 bnep0 +# 0.0.0.0 192.168.3.1 0.0.0.0 UG 302 0 0 wlan0 +# 192.168.3.0 0.0.0.0 255.255.255.0 U 302 0 0 wlan0 +# 192.168.44.0 0.0.0.0 255.255.255.0 U 214 0 0 bnep0 +route -n | grep ^0.0.0.0 |awk '{print $8 " " $2 " " $5}'| uniq | while read -r line ; do + ping_to_default_gw $line +done +} + function has_ip { ifconfig | grep -A1 $1 | grep -q "inet " } From 2808a1465ce6d726d96e4ee528c253b466e118ed Mon Sep 17 00:00:00 2001 From: tzachi-dar Date: Sun, 29 Dec 2019 00:06:48 +0200 Subject: [PATCH 21/66] Kill ns-loop if it lasts for more than 10 minutes. (#1340) --- bin/oref0-cron-every-minute.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/bin/oref0-cron-every-minute.sh b/bin/oref0-cron-every-minute.sh index 48a38c10c..d2dcbcdf3 100755 --- a/bin/oref0-cron-every-minute.sh +++ b/bin/oref0-cron-every-minute.sh @@ -49,6 +49,7 @@ sudo wpa_cli -i wlan0 scan & killall-g oref0-pump-loop 1800 killall -g --older-than 30m openaps-report killall-g oref0-g4-loop 600 + killall-g oref0-ns-loop 600 ) & # kill pump-loop after 5 minutes of not writing to pump-loop.log From 95443cb070ce234a77fa950bc6617a76efa70f98 Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Tue, 31 Dec 2019 14:06:39 -0800 Subject: [PATCH 22/66] simple oref0-simulator (#1325) * there is no inputs.retrospective, so always default to 24h before last glucose * move variables up from rT.reason to rT * simple oref0-simulator * apply deviations to glucose.json if suggested.json invalid * write to log.json; only keep/process ~400 glucose and pumphistory records * only add UAM to ISF if ISFLength < 10 * print 5m non-average deviation and BG * chmod +x * make functions * unchanging BG of 39 is legit (LOW) * add support for carb entries * Get profile from NS in format OpenAPS understands Still WIP * Deal with `sens` not having `timeAsSeconds` * Remove debug print * Deal with carbration not having timeAsSeconds * More defence against lack of timeAsSeconds * Refactor to add normalize_entry function For now it just makes certain timeAsSeconds is present. Move all such normalization to use this function. * Move more common operations to normalize_entry * Reformat by Black, no code changes * Add timezone information to generated profile * Add autosens limits to generated profile Without it strange things can happen, like autotune trying to change ISF from 56 to 700 * Deal better with Profile Switch not having valid profile * Provide hint on how to specify profile name * Add ability to write profile files directly * Deal with profile files not existing yet * Don't use very new python features * Deal with entries not having 'time' field * Normalize all timed entries * Try to support AAPS with local profile * whitespace * define delta before use * update determine-basal arguments * advance clock when suggested.json is invalid * don't advance clock twice * use consistent timezone format * calculate glucose stats * rm *.json on init; default dev, noise, and carbs to zero * default noise to 10 to make something interesting happen * add oref0-simulator and oref0-backtest * copy all examples recursively * pumpprofile.json for running autotune * wrapper script for backtesting with oref0-simulator * another TODO * TODO * download preferences.json from Nightscout devicestatus.json endpoint * comment out Texttable dependency stuff for now and output valid json to stdout * download profile.json from Nightscout profile.json endpoint * support real entries not on 5m boundaries * download historical glucose data from Nightscout entries.json for the day prior * default bolusIncrement to 0.1 is profile.bolus_increment isn't defined (for simulator) * only use simulated results for interim stats * download actual glucose data & compare stats; fix sens/profile * print stats for actuals while running * more compact stats * output cleanup * allow oref0-simulator to run retrospective autosens * download ns-treatments.json for initial autosens * comment out more Texttable() dependencies * use python-2.7 compatible IOError instead of FileNotFoundError * keep and use all-glucose.json values for easy stats * ignore CGM error readings * pass device for fakecgm simulator detection * bypass CGM data is unchanged (tooflat) detection when in simulator mode * don't run stats twice at end * support specifying DIR, and default to datestamped /tmp/oref0-simulator dir * TODO * read a --preferences override file for testing different preferences * remove debugging * log backtest and autotune commands * tee -a to append * basic file mode support * use mydate for OSX compatibiilty * use mydate for OSX compatibiilty * use reduce to merge, for jq 1.5 compatibility * escape % for OSX printf compatibility * escape % for OSX printf compatibility * preferences isn't nested, so a simple add will suffice * support autosens-override.json * support --autosens-override * support --profile and --preferences in autotunelog file mode Co-authored-by: viq --- bin/get_profile.py | 434 +++++++++++++++++++++++++ bin/glucose-stats.awk | 18 + bin/oref0-backtest.sh | 265 +++++++++++++++ bin/oref0-detect-sensitivity.js | 9 +- bin/oref0-simulator.sh | 162 +++++++++ examples/glucose.json | 12 +- examples/settings/pumpprofile.json | 81 +++++ lib/autotune-prep/categorize.js | 9 +- lib/autotune/index.js | 2 +- lib/determine-basal/determine-basal.js | 28 +- lib/glucose-get-last.js | 1 + package.json | 2 + 12 files changed, 1008 insertions(+), 15 deletions(-) create mode 100755 bin/get_profile.py create mode 100644 bin/glucose-stats.awk create mode 100755 bin/oref0-backtest.sh create mode 100755 bin/oref0-simulator.sh create mode 100644 examples/settings/pumpprofile.json diff --git a/bin/get_profile.py b/bin/get_profile.py new file mode 100755 index 000000000..2f3276d7d --- /dev/null +++ b/bin/get_profile.py @@ -0,0 +1,434 @@ +#!/usr/bin/env python +""" +Module to ease work with nightscout profiles. +By default lists all profiles found, and supports following sub-commands: +* profiles - list defined profiles +* display - display named (or default) profile + (in nightscout or OpenAPS format) +* write - write to disk profile in OpenAPS format + +Bunch of things inspired by https://github.com/MarkMpn/AutotuneWeb/ +""" + +# Make it work on both python 2 and 3 +# Probably a bit wide, but I'm still learning +from __future__ import absolute_import, with_statement, print_function, unicode_literals + +# Built-in modules +import argparse +from datetime import datetime +import json +import os.path +import logging +import sys + +# External modules +import requests +#from texttable import Texttable + +# logging.basicConfig(level=logging.INFO) +logging.basicConfig(level=logging.DEBUG) + +PROFILE_FILES = ['autotune.json', 'profile.json', 'pumpprofile.json'] +PROFILE_KEYS = [ + 'autosens_max', 'autosens_min', 'basalprofile', 'bg_targets', 'carb_ratio', + 'carb_ratios', 'dia', 'isfProfile', 'min_5m_carbimpact', 'timezone' +] +TIMED_ENTRIES = ['carbratio', 'sens', 'basal', 'target_low', 'target_high'] + + +def get_profiles(nightscout, token): + """ + Get profiles available in nightscout + """ + r_url = nightscout + "/api/v1/profile.json" + if token is not None: + r_url = r_url + "?" + token + r = requests.get(r_url) + return r.json() + + +def get_current_profile(nightscout, token, profile_name): + """ + Try to get the active profile + """ + r_url = nightscout + "/api/v1/profile.json" + if token is not None: + r_url = r_url + "?" + token + p_list = requests.get(r_url).json() + logging.debug("profile list: %s", p_list) + default_profile = p_list[0]["defaultProfile"] + if profile_name is None: + p_url = ( + nightscout + + "/api/v1/treatments.json?find[eventType][$eq]=Profile Switch&count=1" + ) + if token is not None: + p_url = p_url + "?" + token + p_switch = requests.get(p_url).json() + logging.debug("p_switch: %s", p_switch) + if p_switch: + try: + sw_prof = json.loads(p_switch[0]["profileJson"]) + logging.debug("sw_prof: %s", sw_prof) + if sw_prof: + profile = sw_prof + profile["name"] = p_switch[0]["profile"] + if profile["timezone"] is not None: + return profile + else: + profile["timezone"] = default_profile["timezone"] + return profile + except KeyError: + logging.debug("default profile: %s", default_profile) + profile["timezone"] = p_list[0]["store"][default_profile]["timezone"] + return profile +# sys.exit( +# """Latest 'Profile Switch' event doesn't contain profile, """ + +# """please specify profile name to use with --name flag.""") + p_list[0]["store"][default_profile]["name"] = default_profile + try: + if not p_list[0]["store"][default_profile]["units"]: + p_list[0]["store"][default_profile]["units"] = p_list[0][ + "units"] + except KeyError: + p_list[0]["store"][profile_name]["units"] = p_list[0]["units"] + return p_list[0]["store"][default_profile] + p_list[0]["store"][profile_name]["name"] = profile_name + try: + if not p_list[0]["store"][profile_name]["units"]: + p_list[0]["store"][profile_name]["units"] = p_list[0]["units"] + except KeyError: + p_list[0]["store"][profile_name]["units"] = p_list[0]["units"] + return p_list[0]["store"][profile_name] + + +def profiles(nightscout, token): + """ + print list of profiles available in nightscout + """ + p_list = get_profiles(nightscout, token) + default_profile = p_list[0]["defaultProfile"] + profile_list = p_list[0]["store"].keys() + print("Default profile: {}".format(default_profile)) + print("Available profiles:") + for profile in profile_list: + print("\t" + profile) + + +def display(nightscout, token, profile_name, profile_format): + """ + Display contents of a profile, in requested format + """ + profile = get_current_profile(nightscout, token, profile_name) + if profile_format == "nightscout": + # display_nightscout(p_list, profile_name) + logging.debug("Displaying profile {}".format(profile["name"])) + print(json.dumps(profile, indent=4)) + elif profile_format == "text": + display_text(profile) + else: + print(json.dumps(ns_to_oaps(profile), indent=4)) + + +def write(nightscout, token, profile_name, directory): + """ + Write profile in OpenAPS format to a directory + """ + profile = ns_to_oaps(get_current_profile(nightscout, token, profile_name)) + logging.debug("Checking for directory: %s", directory) + if not os.path.isdir(directory): + sys.exit( + "Please provide an existing directory to write profile files to") + # Check whether there's already a profile file with settings we don't have + for profile_file in PROFILE_FILES: + try: + with open(os.path.join(directory, profile_file), 'r') as p: + old_profile = json.loads(p.read()) + for key in old_profile.keys(): + logging.debug("Checking key %s from profile file %s", key, + profile_file) + if key not in PROFILE_KEYS: + logging.error( + "Existing profile file %s contains key %s we wouldn't set!", + profile_file, key) + sys.exit( + "Existing profile contains a key we wouldn't set!") + except IOError: + pass + for profile_file in PROFILE_FILES: + with open(os.path.join(directory, profile_file), 'w') as f: + f.write(json.dumps(profile, indent=4)) + + +def normalize_entry(entry): + """ + Clean up an entry before further processing + """ + try: + if entry["timeAsSeconds"]: + pass + except KeyError: + entry_time = datetime.strptime(entry["time"], "%H:%M") + entry[ + "timeAsSeconds"] = 3600 * entry_time.hour + 60 * entry_time.minute + try: + if entry["time"]: + pass + except KeyError: + entry_hour = int(entry['timeAsSeconds'] / 3600) + entry_minute = int(entry['timeAsSeconds'] % 60) + entry["time"] = str(entry_hour).rjust( + 2, '0') + ":" + str(entry_minute).rjust(2, '0') + + entry["start"] = entry["time"] + ":00" + entry["minutes"] = int(entry["timeAsSeconds"]) / 60 + return entry + + +def ns_to_oaps(ns_profile): + """ + Convert nightscout profile to OpenAPS format + """ + oaps_profile = {} + # XXX If addint any new entries, make sure to update PROFILE_KEYS at the top + # Not represented in nightscout + oaps_profile["min_5m_carbimpact"] = 8.0 + oaps_profile["autosens_min"] = 0.7 + oaps_profile["autosens_max"] = 1.2 + oaps_profile["dia"] = float(ns_profile["dia"]) + oaps_profile["timezone"] = ns_profile["timezone"] + + # Create a list of dicts with basal profile + oaps_profile["basalprofile"] = [] + for entry_type in TIMED_ENTRIES: + for entry in ns_profile[entry_type]: + normalize_entry(entry) + for basal_item in ns_profile["basal"]: + oaps_profile["basalprofile"].append({ + "i": + len(oaps_profile["basalprofile"]), + "minutes": + basal_item["minutes"], + "start": + basal_item["start"], + "rate": + float(basal_item["value"]), + }) + + # Create a dict of dicts with target levels + oaps_profile["bg_targets"] = { + "units": ns_profile["units"], + "user_preferred_units": ns_profile["units"], + "targets": [], + } + targets = {} + for low in ns_profile["target_low"]: + low = normalize_entry(low) + targets.setdefault(low["time"], {}) + targets[low["time"]]["low"] = { + "i": len(targets), + "start": low["start"], + "offset": float(low["timeAsSeconds"]), + "low": float(low["value"]), + } + for high in ns_profile["target_high"]: + high = normalize_entry(high) + targets.setdefault(high["time"], {}) + targets[high["time"]]["high"] = {"high": float(high["value"])} + for time in sorted(targets.keys()): + oaps_profile["bg_targets"]["targets"].append({ + "i": + len(oaps_profile["bg_targets"]["targets"]), + "start": + targets[time]["low"]["start"], + "offset": + targets[time]["low"]["offset"], + "low": + targets[time]["low"]["low"], + "min_bg": + targets[time]["low"]["low"], + "high": + targets[time]["high"]["high"], + "max_bg": + targets[time]["high"]["high"], + }) + + # Create a dics of dicts with insulin sensitivity profile + oaps_profile["isfProfile"] = {"first": 1, "sensitivities": []} + isf_p = {} + for sens in ns_profile["sens"]: + sens = normalize_entry(sens) + isf_p.setdefault(sens["time"], {}) + isf_p[sens["time"]] = { + "sensitivity": float(sens["value"]), + "start": sens["start"], + "offset": sens["minutes"], + } + for time in sorted(isf_p.keys()): + oaps_profile["isfProfile"]["sensitivities"].append({ + "i": + len(oaps_profile["isfProfile"]["sensitivities"]), + "sensitivity": + isf_p[time]["sensitivity"], + "offset": + isf_p[time]["offset"], + "start": + isf_p[time]["start"], + }) + + # Create a dict of dicts for carb ratio + oaps_profile["carb_ratios"] = { + "first": 1, + "units": "grams", + "schedule": [] + } + cr_p = {} + for cr in ns_profile["carbratio"]: + cr = normalize_entry(cr) + cr_p.setdefault(cr["time"], {}) + cr_p[cr["time"]] = { + "start": cr["start"], + "offset": cr["minutes"], + "ratio": float(cr["value"]), + } + for time in sorted(cr_p.keys()): + oaps_profile["carb_ratios"]["schedule"].append({ + "i": + len(oaps_profile["carb_ratios"]["schedule"]), + "start": + cr_p[time]["start"], + "offset": + cr_p[time]["offset"], + "ratio": + cr_p[time]["ratio"], + }) + oaps_profile["carb_ratio"] = oaps_profile["carb_ratios"]["schedule"][0][ + "ratio"] + + sorted_profile = {} + for key in sorted(oaps_profile.keys()): + sorted_profile[key] = oaps_profile[key] + + # return oaps_profile + return sorted_profile + + +def display_nightscout(profile_data, profile_name): + """ + Display profile the way it comes from nightscout + """ + print("Displaying profile {}".format(profile_name)) + print(json.dumps(profile_data[0]["store"][profile_name], indent=4)) + + +def display_text(p_data): + """ + Display profile in text format + """ + # p_data = profile_data[0]["store"][profile_name] + logging.debug("Data keys: %s", p_data.keys()) + + # Single value data + #singletons = Texttable() + #singletons.set_deco(Texttable.HEADER) + #singletons.set_cols_align(["c", "c", "c", "c"]) + #singletons.add_rows([ + #["Profile name", "Timezone", "Units", "DIA"], + #[ + #p_data["name"], + #p_data["timezone"], + #p_data["units"], + #p_data["dia"] + #], + #]) + #print(singletons.draw() + "\n") + + times = {} + tgt_low = {v["time"]: v["value"] for v in p_data["target_low"]} + tgt_high = {v["time"]: v["value"] for v in p_data["target_high"]} + carb_ratio = {v["time"]: v["value"] for v in p_data["carbratio"]} + sens = {v["time"]: v["value"] for v in p_data["sens"]} + basal = {v["time"]: v["value"] for v in p_data["basal"]} + logging.debug(tgt_high, tgt_low, carb_ratio, sens, basal) + for (time, basal) in basal.items(): + times.setdefault(time, {}) + times[time]["basal"] = basal + for (time, sens) in sens.items(): + times.setdefault(time, {}) + times[time]["sens"] = sens + for (time, c_r) in carb_ratio.items(): + times.setdefault(time, {}) + times[time]["carbratio"] = c_r + for (time, tgt_h) in tgt_high.items(): + times.setdefault(time, {}) + times[time]["tgt_high"] = tgt_h + for (time, tgt_l) in tgt_low.items(): + times.setdefault(time, {}) + times[time]["tgt_low"] = tgt_l + logging.debug("Times: %s", times) + + times_list = [["Time", "Basal", "ISF", "CR", "Target Low", "Target High"]] + for time in sorted(times.keys()): + times_list.append([ + time, + times[time].get("basal", ""), + times[time].get("sens", ""), + times[time].get("carbratio", ""), + times[time].get("tgt_low", ""), + times[time].get("tgt_high", ""), + ]) + # times_table = Texttable() + #times_table.set_cols_align(["c", "c", "c", "c", "c", "c"]) + #times_table.add_rows(times_list) + #print(times_table.draw() + "\n") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Get nightscout profile.") + parser.add_argument( + "--nightscout", + help="Nightscout URL", + required=True, + nargs="?", + const="http://127.0.0.1:1337", + default="http://127.0.0.1:1337", + ) + parser.add_argument("--token", help="Authenticaton token") + + subparsers = parser.add_subparsers(help="Sub-command to run", + dest="subparser") + + parser_profiles = subparsers.add_parser("profiles", + help="List all profile names") + + parser_display = subparsers.add_parser("display", help="Display a profile") + parser_display.add_argument("--name", + help="Which profile to display", + nargs="?", + dest="profile_name") + parser_display.add_argument( + "--format", + default="nightscout", + nargs="?", + dest="profile_format", + choices=["nightscout", "openaps", "text"], + help="What format to display profile in", + ) + + parser_write = subparsers.add_parser( + "write", help="Write profile in OpenAPS format to a directory") + parser_write.add_argument("--directory", + help="What directory to write files to", + required=True) + parser_write.add_argument("--name", + help="Which profile to display", + nargs="?", + dest="profile_name") + + logging.debug(vars(parser.parse_args())) + + # https://stackoverflow.com/questions/4575747/get-selected-subcommand-with-argparse/44948406#44948406 + # I have no idea what it does, but it seems to do the trick + kwargs = vars(parser.parse_args()) + globals()[kwargs.pop("subparser")](**kwargs) diff --git a/bin/glucose-stats.awk b/bin/glucose-stats.awk new file mode 100644 index 000000000..f5a8a90f6 --- /dev/null +++ b/bin/glucose-stats.awk @@ -0,0 +1,18 @@ +BEGIN { + min=1000 + if (!min_bg) { min_bg=70 } + if (!max_bg) { max_bg=180 } +} +{ sum+=$1; count++ } +($1 < 39) { next } +($1 < min) { min=$1 } +($1 > max) { max=$1 } +($1 <= max_bg && $1 >= min_bg) { inrange++ } +($1 > max_bg) { high++ } +($1 < min_bg) { low++ } +END { print "Count: " count; + printf "Min / Max / Average: %.0f / %.0f / %.1f\n", min, max, sum/count + printf "%%TIR / high / low (%.0f-%.0f): ", min_bg, max_bg + #print "%TIR / high / low (" min_bg "-" max_bg "): " \ + printf "%.1f%% / %.1f%% / %.1f%%\n", inrange/(high+inrange+low)*100, high/(high+inrange+low)*100, low/(high+inrange+low)*100 +} diff --git a/bin/oref0-backtest.sh b/bin/oref0-backtest.sh new file mode 100755 index 000000000..b32097008 --- /dev/null +++ b/bin/oref0-backtest.sh @@ -0,0 +1,265 @@ +#!/bin/bash + +# usage: $0 + +source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1) + +function stats { + echo Simulated: + cat all-glucose.json | jq '.[] | select (.device=="fakecgm") | .sgv' | awk -f ~/src/oref0/bin/glucose-stats.awk + echo Actual: + cat ns-entries.json | jq .[].sgv | awk -f ~/src/oref0/bin/glucose-stats.awk +} + +# defaults +DIR="/tmp/oref0-simulator.$(mydate +%s)" +NIGHTSCOUT_HOST="" +START_DATE="" +END_DATE="" +START_DAYS_AGO=1 # Default to yesterday if not otherwise specified +END_DAYS_AGO=1 # Default to yesterday if not otherwise specified +UNKNOWN_OPTION="" + + +# handle input arguments +for i in "$@" +do +case $i in + -d=*|--dir=*) + DIR="${i#*=}" + # ~/ paths have to be expanded manually + DIR="${DIR/#\~/$HOME}" + # If DIR is a symlink, get actual path: + if [[ -L $DIR ]] ; then + directory="$(readlink $DIR)" + else + directory="$DIR" + fi + shift # past argument=value + ;; + -n=*|--ns-host=*) + NIGHTSCOUT_HOST="${i#*=}" + shift # past argument=value + ;; + -s=*|--start-date=*) + START_DATE="${i#*=}" + START_DATE=`mydate --date="$START_DATE" +%Y-%m-%d` + shift # past argument=value + ;; + -e=*|--end-date=*) + END_DATE="${i#*=}" + END_DATE=`mydate --date="$END_DATE" +%Y-%m-%d` + shift # past argument=value + ;; + -t=*|--start-days-ago=*) + START_DAYS_AGO="${i#*=}" + shift # past argument=value + ;; + -d=*|--end-days-ago=*) + END_DAYS_AGO="${i#*=}" + shift # past argument=value + ;; + -p=*|--preferences=*) + PREF="${i#*=}" + # ~/ paths have to be expanded manually + PREF="${PREF/#\~/$HOME}" + # If PREF is a symlink, get actual path: + if [[ -L $PREF ]] ; then + preferences="$(readlink $PREF)" + else + preferences="$PREF" + fi + shift + ;; + -r=*|--profile=*) + PROF="${i#*=}" + # ~/ paths have to be expanded manually + PROF="${PROF/#\~/$HOME}" + # If PROF is a symlink, get actual path: + if [[ -L $PROF ]] ; then + profile="$(readlink $PROF)" + else + profile="$PROF" + fi + shift + ;; + -a=*|--autosens-override=*) + AS_OVER="${i#*=}" + # ~/ paths have to be expanded manually + AS_OVER="${AS_OVER/#\~/$HOME}" + # If AS_OVER is a symlink, get actual path: + if [[ -L $AS_OVER ]] ; then + as_override="$(readlink $AS_OVER)" + else + as_override="$AS_OVER" + fi + shift + ;; + *) + # unknown option + OPT=${i#*=} + # ~/ paths have to be expanded manually + OPT="${OPT/#\~/$HOME}" + # If OPT is a symlink, get actual path: + if [[ -L $OPT ]] ; then + autotunelog="$(readlink $OPT)" + else + autotunelog="$OPT" + fi + if ls $autotunelog; then + shift + else + echo "Option $OPT unknown" + UNKNOWN_OPTION="yes" + fi + ;; +esac +done + +# remove any trailing / from NIGHTSCOUT_HOST +NIGHTSCOUT_HOST=$(echo $NIGHTSCOUT_HOST | sed 's/\/$//g') + +if [[ -z "$NIGHTSCOUT_HOST" ]] && [[ -z "$autotunelog" ]]; then + # nightscout mode: download data from Nightscout + echo "Usage: NS mode: $0 [--dir=/tmp/oref0-simulator] --ns-host=https://mynightscout.herokuapp.com [--start-days-ago=number_of_days] [--end-days-ago=number_of_days] [--start-date=YYYY-MM-DD] [--end-date=YYYY-MM-DD] [--preferences=/path/to/preferences.json] [--autosens-override=/path/to/autosens-override.json]" + # file mode: for backtesting from autotune.*.log files specified on the command-line via glob, as an alternative to NS + echo "Usage: file mode: $0 [--dir=/tmp/oref0-simulator] /path/to/autotune*.log [--profile=/path/to/profile.json] [--preferences=/path/to/preferences.json] [--autosens-override=/path/to/autosens-override.json]" + exit 1 +fi +if [[ -z "$START_DATE" ]]; then + # Default start date of yesterday + START_DATE=`mydate --date="$START_DAYS_AGO days ago" +%Y-%m-%d` +fi +if [[ -z "$END_DATE" ]]; then + # Default end-date as this morning at midnight in order to not get partial day samples for now + # (ISF/CSF adjustments are still single values across each day) + END_DATE=`mydate --date="$END_DAYS_AGO days ago" +%Y-%m-%d` +fi + +if [[ -z "$UNKNOWN_OPTION" ]] ; then # everything is ok + if [[ -z "$NIGHTSCOUT_HOST" ]]; then + echo "Running oref0-backtest --dir=$DIR $autotunelog" | tee -a $DIR/commands.log + else + echo "Running oref0-backtest --dir=$DIR --ns-host=$NIGHTSCOUT_HOST --start-date=$START_DATE --end-date=$END_DATE" | tee -a $DIR/commands.log + fi +else + echo "Unknown options. Exiting" + exit 1 +fi + +oref0-simulator init $DIR +cd $DIR +mkdir -p autotune + +# nightscout mode: download data from Nightscout +if ! [[ -z "$NIGHTSCOUT_HOST" ]]; then + # download profile.json from Nightscout profile.json endpoint, and also copy over to pumpprofile.json + ~/src/oref0/bin/get_profile.py --nightscout $NIGHTSCOUT_HOST display --format openaps 2>/dev/null > profile.json.new + ls -la profile.json.new + if jq -e .dia profile.json.new; then + jq -rs 'reduce .[] as $item ({}; . * $item)' profile.json profile.json.new | jq '.sens = .isfProfile.sensitivities[0].sensitivity' > profile.json.new.merged + ls -la profile.json.new.merged + if jq -e .dia profile.json.new.merged; then + mv profile.json.new.merged profile.json + else + echo Bad profile.json.new.merged + fi + else + echo Bad profile.json.new from get_profile.py + fi + + # download preferences.json from Nightscout devicestatus.json endpoint and overwrite profile.json with it + for i in $(seq 0 10); do + curl $NIGHTSCOUT_HOST/api/v1/devicestatus.json | jq .[$i].preferences > preferences.json.new + if jq -e .max_iob preferences.json.new; then + mv preferences.json.new preferences.json + jq -s '.[0] + .[1]' profile.json preferences.json > profile.json.new + if jq -e .max_iob profile.json.new; then + mv profile.json.new profile.json + echo Successfully merged preferences.json into profile.json + break + else + echo Bad profile.json.new from preferences.json merge attempt $1 + fi + fi + done +fi + +# read a --profile file (overriding NS profile if it exists) +if [[ -e $profile ]]; then + jq -s '.[0] + .[1]' profile.json $profile > profile.json.new + if jq -e .max_iob profile.json.new; then + mv profile.json.new profile.json + echo Successfully merged $profile into profile.json + else + echo Unable to merge $profile into profile.json + fi +fi + +# read a --preferences file to override the one from nightscout (for testing impact of different preferences) +if [[ -e $preferences ]]; then + jq -s '.[0] + .[1]' profile.json $preferences > profile.json.new + if jq -e .max_iob profile.json.new; then + mv profile.json.new profile.json + echo Successfully merged $preferences into profile.json + else + echo Unable to merge $preferences into profile.json + fi +fi + +cp profile.json settings/ +cp profile.json pumpprofile.json +cp pumpprofile.json settings/ + +if [[ -e $as_override ]]; then + echo Overriding autosens with: + cat $as_override + cp $as_override autosens-override.json +fi + +if ! [[ -z "$NIGHTSCOUT_HOST" ]]; then + # download historical glucose data from Nightscout entries.json for the day leading up to $START_DATE at 4am + query="find%5Bdate%5D%5B%24gte%5D=$(to_epochtime "$START_DATE -24 hours" |nonl; echo 000)&find%5Bdate%5D%5B%24lte%5D=$(to_epochtime "$START_DATE +4 hours" |nonl; echo 000)&count=1500" + echo Query: $NIGHTSCOUT_HOST entries/sgv.json $query + ns-get host $NIGHTSCOUT_HOST entries/sgv.json $query > ns-entries.json || die "Couldn't download ns-entries.json" + ls -la ns-entries.json || die "No ns-entries.json downloaded" + if jq -e .[0].sgv ns-entries.json; then + mv ns-entries.json glucose.json + cp glucose.json all-glucose.json + cat glucose.json | jq .[0].dateString > clock.json + fi + # download historical treatments data from Nightscout treatments.json for the day leading up to $START_DATE at 4am + query="find%5Bcreated_at%5D%5B%24gte%5D=`mydate --date="$START_DATE -24 hours" -Iminutes`&find%5Bcreated_at%5D%5B%24lte%5D=`mydate --date="$START_DATE +4 hours" -Iminutes`" + echo Query: $NIGHTSCOUT_HOST treatments.json $query + ns-get host $NIGHTSCOUT_HOST treatments.json $query > ns-treatments.json || die "Couldn't download ns-treatments.json" + ls -la ns-treatments.json || die "No ns-treatments.json downloaded" + if jq -e .[0].created_at ns-treatments.json; then + mv ns-treatments.json pumphistory.json + fi + + # download actual glucose data from Nightscout entries.json for the simulated time period + query="find%5Bdate%5D%5B%24gte%5D=$(to_epochtime "$START_DATE +4 hours" |nonl; echo 000)&find%5Bdate%5D%5B%24lte%5D=$(to_epochtime "$END_DATE +28 hours" |nonl; echo 000)&count=9999999" + echo Query: $NIGHTSCOUT_HOST entries/sgv.json $query + ns-get host $NIGHTSCOUT_HOST entries/sgv.json $query > ns-entries.json || die "Couldn't download ns-entries.json" + ls -la ns-entries.json || die "No ns-entries.json downloaded" +fi + +# file mode: run simulator from deviations from an autotune log file +if ! [[ -z "$autotunelog" ]]; then + echo cat $autotunelog | tee -a $DIR/commands.log + cat $autotunelog | grep "dev: " | awk '{print $13 "," $20}' | while IFS=',' read dev carbs; do + ~/src/oref0/bin/oref0-simulator.sh $dev 0 $carbs $DIR + done + exit 0 +fi + +if ! [[ -z "$NIGHTSCOUT_HOST" ]]; then + echo oref0-autotune --dir=$DIR --ns-host=$NIGHTSCOUT_HOST --start-date=$START_DATE --end-date=$END_DATE | tee -a $DIR/commands.log + oref0-autotune --dir=$DIR --ns-host=$NIGHTSCOUT_HOST --start-date=$START_DATE --end-date=$END_DATE | grep "dev: " | awk '{print $13 "," $20}' | while IFS=',' read dev carbs; do + ~/src/oref0/bin/oref0-simulator.sh $dev 0 $carbs $DIR + done + exit 0 +fi + +echo Error: neither autotunelog nor NIGHTSCOUT_HOST set +exit 1 diff --git a/bin/oref0-detect-sensitivity.js b/bin/oref0-detect-sensitivity.js index 3146afeb9..f2a171ce4 100755 --- a/bin/oref0-detect-sensitivity.js +++ b/bin/oref0-detect-sensitivity.js @@ -77,10 +77,16 @@ if (!module.parent) { } } + // TODO: add support for a proper --retrospective flag if anything besides oref0-simulator needs this + var retrospective = false; var temptarget_data = { }; if (typeof temptarget_input !== 'undefined') { try { - temptarget_data = JSON.parse(fs.readFileSync(temptarget_input, 'utf8')); + if (temptarget_input == "retrospective") { + retrospective = true; + } else { + temptarget_data = JSON.parse(fs.readFileSync(temptarget_input, 'utf8')); + } } catch (e) { console.error("Warning: could not parse "+temptarget_input); } @@ -101,6 +107,7 @@ if (!module.parent) { , glucose_data: glucose_data , basalprofile: basalprofile , temptargets: temptarget_data + , retrospective: retrospective //, clock: clock_data }; console.error("Calculating sensitivity using 8h of non-exluded data"); diff --git a/bin/oref0-simulator.sh b/bin/oref0-simulator.sh new file mode 100755 index 000000000..f945cc69b --- /dev/null +++ b/bin/oref0-simulator.sh @@ -0,0 +1,162 @@ +#!/bin/bash + +# usage: $0 + +source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1) + +function init { + #DIR=/tmp/oref0-simulator + #if ! [[ -z "$1" ]]; then DIR=$1; fi + echo Initializing $DIR + mkdir -p $DIR || die "Couldn't mkdir -p $DIR" + cd $DIR || die "Couldn't cd $DIR" + rm *.json + cp -r ~/src/oref0/examples/* ./ + #for file in pumphistory profile clock autosens glucose basal_profile carbhistory temp_basal; do + #echo -n "${file}.json: " + #if ! file_is_recent_and_min_size ${file}.json || ! jq -C -c . ${file}.json; then + #echo $PWD/${file}.json is too old, does not exist, or is invalid: copying from ~/src/oref0/examples/ + #cp ~/src/oref0/examples/${file}.json ./ + #fi + #done + pwd && ls -la + #echo + exit 0 +} + +function main { + + jq .isfProfile profile.json > isf.json + # only run autosens every "20m" + if [[ -e autosens-override.json ]]; then + cp autosens-override.json autosens.json + elif egrep T[0-2][0-9]:[024][0-4]: clock.json; then + oref0-detect-sensitivity glucose.json pumphistory.json isf.json basal_profile.json profile.json carbhistory.json retrospective > autosens.json + fi + oref0-calculate-iob pumphistory.json profile.json clock.json autosens.json > iob.json + # calculate naive IOB without autosens + oref0-calculate-iob pumphistory.json profile.json clock.json > naive_iob.json + #cat naive_iob.json | jq -c .[0] + oref0-meal pumphistory.json profile.json clock.json glucose.json basal_profile.json carbhistory.json > meal.json + # calculate naive BGI and deviation without autosens + oref0-determine-basal naive_iob.json temp_basal.json glucose.json profile.json --meal meal.json --microbolus --currentTime $(echo $(mydate -d $(cat clock.json | tr -d '"') +%s)000) > naive_suggested.json + cat naive_suggested.json | jq -C -c '. | del(.predBGs) | del(.reason)' + oref0-determine-basal iob.json temp_basal.json glucose.json profile.json --auto-sens autosens.json --meal meal.json --microbolus --currentTime $(echo $(mydate -d $(cat clock.json | tr -d '"') +%s)000) > suggested.json + jq . -c suggested.json >> log.json + cat suggested.json | jq -C -c '. | del(.predBGs) | del(.reason)' + cat suggested.json | jq -C -c .reason + #cat suggested.json | jq -C -c .predBGs + echo -n "ZT: " && jq -C -c .predBGs.ZT suggested.json + echo -n "IOB: " && jq -C -c .predBGs.IOB suggested.json + echo -n "UAM: " && jq -C -c .predBGs.UAM suggested.json + echo -n "COB: " && jq -C -c .predBGs.COB suggested.json + + if jq -e .units suggested.json > /dev/null; then + # if suggested.json delivers an SMB, put it into pumphistory.json + jq '. | [ { timestamp: .deliverAt, amount: .units, duration: 0, _type: "Bolus" } ]' suggested.json > newrecords.json + # truncate to 400 pumphistory records + # TODO: decide whether to save old pumphistory + jq -s '[.[][]] | .[0:400]' newrecords.json pumphistory.json > pumphistory.json.new + mv pumphistory.json.new pumphistory.json + fi + + if jq -e .duration suggested.json > /dev/null; then + # if suggested.json sets a new temp, put it into temp_basal.json and pumphistory.json + jq '. | { rate: .rate, duration: .duration, temp: "absolute" }' suggested.json > temp_basal.json + jq '. | [ { timestamp: .deliverAt, rate: .rate, temp: "absolute", _type: "TempBasal" } ]' suggested.json > newrecords.json + jq '. | [ { timestamp: .deliverAt, "duration (min)": .duration, _type: "TempBasalDuration" } ]' suggested.json >> newrecords.json + jq -s '[.[][]] | .[0:400]' newrecords.json pumphistory.json > pumphistory.json.new + mv pumphistory.json.new pumphistory.json + else + # otherwise, advance the clock 5m on the currently running temp + jq '. | .duration=.duration-5 | { rate: .rate, duration: .duration, temp: "absolute" }' temp_basal.json > temp_basal.json.new + mv temp_basal.json.new temp_basal.json + fi + #cat temp_basal.json | jq -c + + + if [ -z $deviation ]; then + # if deviation is unspecified, randomly decay the current deviation + deviation=".deviation / 6 * ($RANDOM/32767)" + echo -n "Deviation unspecified, using $deviation" + else + echo -n Using deviation of $deviation + fi + if [ -z $noise ]; then + # this adds a random +/- $noise mg/dL every run (the 0.5 is to work with |floor) + noise=3 + fi + noiseformula="2*$noise*$RANDOM/32767 - $noise + 0.5" + echo " and noise of +/- $noise ($noiseformula)" + if ( jq -e .bg naive_suggested.json && jq -e .BGI naive_suggested.json && jq -e .deviation naive_suggested.json ) >/dev/null; then + jq ".bg + .BGI + $deviation + $noiseformula |floor| [ { date: $(echo $(mydate -d $(cat clock.json | tr -d '"') +%s)000), glucose: ., sgv: ., dateString: \"$(mydate -d $(cat clock.json | tr -d '"') -Iseconds )\", device: \"fakecgm\" } ] " naive_suggested.json > newrecord.json + else + if [[ $deviation == *".deviation"* ]]; then + adjustment=$noiseformula + else + adjustment="$deviation + $noiseformula" + fi + echo "Invalid suggested.json: updating glucose.json + $adjustment" + jq '.[0].glucose + '"$adjustment"' |floor| [ { date: '$(echo $(mydate -d $(cat clock.json | tr -d '"')+5minutes +%s)000)', glucose: ., sgv: ., dateString: "'$(mydate -d $(cat clock.json | tr -d '"') -Iseconds )'", device: "fakecgm" } ] ' glucose.json | tee newrecord.json + fi + if jq -e '.[0].glucose < 39' newrecord.json > /dev/null; then + echo "Glucose < 39 invalid" + echo '[ { "date": '$(echo $(mydate -d $(cat clock.json | tr -d '"') +%s)000)', "glucose": 39, "sgv": 39, "dateString": "'$(mydate -d $(cat clock.json | tr -d '"')+5minutes -Iseconds )'", "device": "fakecgm" } ] ' | tee newrecord.json + fi + # write a new glucose entry to glucose.json, and truncate it to 432 records (36 hours) + jq -s '[.[][]] | .[0:432]' newrecord.json glucose.json > glucose.json.new + mv glucose.json.new glucose.json + # keep all glucose records for easy stats + jq -s '[.[][]]' newrecord.json all-glucose.json > all-glucose.json.new + mv all-glucose.json.new all-glucose.json + + # if there are any new carbs, add them to carbhistory.json + addcarbs $carbs + + # advance the clock by 5m + if jq -e .deliverAt suggested.json >/dev/null; then + echo '"'$(mydate -d "$(cat suggested.json | jq .deliverAt | tr -d '"')+5 minutes" -Iseconds)'"' > clock.json + else + echo '"'$(mydate -d "$(cat clock.json | tr -d '"')+5minutes" -Iseconds)'"' > clock.json + fi +} + +function addcarbs { + # if a carbs argument is provided, write the carb entry to carbhistory.json + carbs=$1 + if ! [ -z "$carbs" ] && [ "$carbs" -gt 0 ]; then + echo '[ { "carbs": '$carbs', "insulin": null, "created_at": "'$(mydate -d $(cat clock.json | tr -d '"')+5minutes -Iseconds )'", "enteredBy": "oref0-simulator" } ] ' | tee newrecord.json + + # write the new record to carbhistory.json, and truncate it to 100 records + jq -s '[.[][]] | .[0:100]' newrecord.json carbhistory.json > carbhistory.json.new + mv carbhistory.json.new carbhistory.json + fi +} + +function stats { + echo Simulated: + cat all-glucose.json | jq '.[] | select (.device=="fakecgm") | .sgv' | awk -f ~/src/oref0/bin/glucose-stats.awk + #cat glucose.json | jq .[].sgv | awk -f ~/src/oref0/bin/glucose-stats.awk + echo Actual: + cat ns-entries.json | jq .[].sgv | awk -f ~/src/oref0/bin/glucose-stats.awk +} + +if [[ $1 == *"init"* ]]; then + DIR=/tmp/oref0-simulator + if ! [[ -z "$2" ]]; then DIR=$2; fi + init +else + DIR=/tmp/oref0-simulator + if ! [[ -z "$4" ]]; then DIR=$4; fi + cd $DIR && ls glucose.json >/dev/null || init + deviation=$1 + if [ -z "$1" ]; then deviation=0; fi + noise=$2 + if [ -z "$2" ]; then noise=10; fi + carbs=$3 + if [ -z "$3" ]; then carbs=0; fi + echo Running oref-simulator with deviation $deviation, noise $noise, and carbs $carbs in dir $DIR + main + stats +fi + diff --git a/examples/glucose.json b/examples/glucose.json index 9cadcbe71..af8dc8af5 100644 --- a/examples/glucose.json +++ b/examples/glucose.json @@ -1,7 +1,7 @@ [ { "date": 1527924300000, - "dateString": "2018-06-02T00:25:00-07:00", + "dateString": "2018-06-02T00:25:00-0700", "sgv": 101, "device": "fakecgm", "type": "sgv", @@ -9,7 +9,7 @@ }, { "date": 1527924000000, - "dateString": "2018-06-02T00:20:00-07:00", + "dateString": "2018-06-02T00:20:00-0700", "sgv": 102, "device": "fakecgm", "type": "sgv", @@ -17,7 +17,7 @@ }, { "date": 1527923700000, - "dateString": "2018-06-02T00:15:00-07:00", + "dateString": "2018-06-02T00:15:00-0700", "sgv": 105, "device": "fakecgm", "type": "sgv", @@ -25,7 +25,7 @@ }, { "date": 1527923400000, - "dateString": "2018-06-02T00:10:00-07:00", + "dateString": "2018-06-02T00:10:00-0700", "sgv": 105, "device": "fakecgm", "type": "sgv", @@ -33,7 +33,7 @@ }, { "date": 1527923100000, - "dateString": "2018-06-02T00:05:00-07:00", + "dateString": "2018-06-02T00:05:00-0700", "sgv": 102, "device": "fakecgm", "type": "sgv", @@ -41,7 +41,7 @@ }, { "date": 1527922800000, - "dateString": "2018-06-02T00:00:00-07:00", + "dateString": "2018-06-02T00:00:00-0700", "sgv": 100, "device": "fakecgm", "type": "sgv", diff --git a/examples/settings/pumpprofile.json b/examples/settings/pumpprofile.json new file mode 100644 index 000000000..a79a4119d --- /dev/null +++ b/examples/settings/pumpprofile.json @@ -0,0 +1,81 @@ +{ + "carb_ratios": { + "schedule": [ + { + "x": 0, + "i": 0, + "offset": 0, + "ratio": 10, + "r": 10, + "start": "00:00:00" + } + ], + "units": "grams" + }, + "carb_ratio": 10, + "isfProfile": { + "first": 1, + "sensitivities": [ + { + "endOffset": 1440, + "offset": 0, + "x": 0, + "sensitivity": 50, + "start": "00:00:00", + "i": 0 + } + ], + "user_preferred_units": "mg/dL", + "units": "mg/dL" + }, + "sens": 50, + "bg_targets": { + "first": 1, + "targets": [ + { + "max_bg": 100, + "min_bg": 100, + "x": 0, + "offset": 0, + "low": 100, + "start": "00:00:00", + "high": 100, + "i": 0 + } + ], + "user_preferred_units": "mg/dL", + "units": "mg/dL" + }, + "max_bg": 100, + "min_bg": 100, + "out_units": "mg/dL", + "max_basal": 4, + "min_5m_carbimpact": 8, + "maxCOB": 120, + "max_iob": 6, + "max_daily_safety_multiplier": 4, + "current_basal_safety_multiplier": 5, + "autosens_max": 2, + "autosens_min": 0.5, + "remainingCarbsCap": 90, + "enableUAM": true, + "enableSMB_with_bolus": true, + "enableSMB_with_COB": true, + "enableSMB_with_temptarget": false, + "enableSMB_after_carbs": true, + "maxSMBBasalMinutes": 75, + "curve": "rapid-acting", + "useCustomPeakTime": false, + "insulinPeakTime": 75, + "dia": 6, + "current_basal": 1.0, + "basalprofile": [ + { + "minutes": 0, + "rate": 1.0, + "start": "00:00:00", + "i": 0 + } + ], + "max_daily_basal": 1.0 +} diff --git a/lib/autotune-prep/categorize.js b/lib/autotune-prep/categorize.js index f3bbf081a..bd824e876 100644 --- a/lib/autotune-prep/categorize.js +++ b/lib/autotune-prep/categorize.js @@ -146,6 +146,7 @@ function categorizeBGDatums(opts) { } var BG; + var delta; var avgDelta; // TODO: re-implement interpolation to avoid issues here with gaps // calculate avgDelta as last 4 datapoints to better catch more rises after COB hits zero @@ -156,6 +157,7 @@ function categorizeBGDatums(opts) { //process.stderr.write("!"); continue; } + delta = (BG - bucketedData[i+1].glucose); avgDelta = (BG - bucketedData[i+4].glucose)/4; } else { console.error("Could not find glucose data"); } @@ -215,6 +217,7 @@ function categorizeBGDatums(opts) { glucoseDatum.BGI = BGI; // calculating deviation var deviation = avgDelta-BGI; + dev5m = delta-BGI; //console.error(deviation,avgDelta,BG,bucketedData[i].glucose); // set positive deviations to zero if BG is below 80 @@ -224,6 +227,7 @@ function categorizeBGDatums(opts) { // rounding and storing deviation deviation = deviation.toFixed(2); + dev5m = dev5m.toFixed(2); glucoseDatum.deviation = deviation; @@ -361,7 +365,8 @@ function categorizeBGDatums(opts) { // debug line to print out all the things var BGDateArray = BGDate.toString().split(" "); BGTime = BGDateArray[4]; - console.error(absorbing.toString(),"mealCOB:",mealCOB.toFixed(1),"mealCarbs:",mealCarbs,"basalBGI:",basalBGI.toFixed(1),"BGI:",BGI.toFixed(1),"IOB:",iob.iob.toFixed(1),"at",BGTime,"dev:",deviation,"avgDelta:",avgDelta,type); + // console.error(absorbing.toString(),"mealCOB:",mealCOB.toFixed(1),"mealCarbs:",mealCarbs,"basalBGI:",basalBGI.toFixed(1),"BGI:",BGI.toFixed(1),"IOB:",iob.iob.toFixed(1),"at",BGTime,"dev:",deviation,"avgDelta:",avgDelta,type); + console.error(absorbing.toString(),"mealCOB:",mealCOB.toFixed(1),"mealCarbs:",mealCarbs,"BGI:",BGI.toFixed(1),"IOB:",iob.iob.toFixed(1),"at",BGTime,"dev:",dev5m,"avgDev:",deviation,"avgDelta:",avgDelta,type,BG,myCarbs); } IOBInputs = { @@ -409,7 +414,7 @@ function categorizeBGDatums(opts) { console.error("and selecting the lowest 50%, leaving", basalGlucoseData.length, "basal+UAM ones"); } - if (2*ISFLength < UAMLength) { + if (2*ISFLength < UAMLength && ISFLength < 10) { console.error("Adding",UAMLength,"UAM deviations to",ISFLength,"ISF ones"); ISFGlucoseData = ISFGlucoseData.concat(UAMGlucoseData); // if too much data is excluded as UAM, add in the UAM deviations to ISF, but then discard the highest 50% diff --git a/lib/autotune/index.js b/lib/autotune/index.js index 9fa3ab56d..5b165c919 100644 --- a/lib/autotune/index.js +++ b/lib/autotune/index.js @@ -468,7 +468,7 @@ function tuneAllTheThings (inputs) { var p50ratios = Math.round( percentile(ratios, 0.50) * 1000)/1000; var fullNewISF = ISF; if (ISFGlucose.length < 10) { - // leave ISF unchanged if fewer than 5 ISF data points + // leave ISF unchanged if fewer than 10 ISF data points console.error ("Only found",ISFGlucose.length,"ISF data points, leaving ISF unchanged at",ISF); } else { // calculate what adjustments to ISF would have been necessary to bring median deviation to zero diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index 34633f890..1d2ed6e98 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -146,10 +146,20 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ if (bg <= 10 || bg === 38 || noise >= 3) { //Dexcom is in ??? mode or calibrating, or xDrip reports high noise rT.reason = "CGM is calibrating, in ??? state, or noise is high"; } + var tooflat=false; + if (bg > 60 && glucose_status.delta == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1) { + if (glucose_status.device == "fakecgm") { + console.error("CGM data is unchanged ("+bg+"+"+glucose_status.delta+") for 5m w/ "+glucose_status.short_avgdelta+" mg/dL ~15m change & "+glucose_status.long_avgdelta+" mg/dL ~45m change"); + console.error("Simulator mode detected (",glucose_status.device,"): continuing anyway"); + } else { + tooflat=true; + } + } + if (minAgo > 12 || minAgo < -5) { // Dexcom data is too old, or way in the future rT.reason = "If current system time "+systemTime+" is correct, then BG data is too old. The last BG data was read "+minAgo+"m ago at "+bgTime; // if BG is too old/noisy, or is changing less than 1 mg/dL/5m for 45m, cancel any high temps and shorten any long zero temps - } else if ( bg > 60 && glucose_status.delta == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1 ) { + } else if ( tooflat ) { if ( glucose_status.last_cal && glucose_status.last_cal < 3 ) { rT.reason = "CGM was just calibrated"; } else { @@ -157,7 +167,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } } // Then, for all such error conditions, cancel any running high temp or shorten any long zero temp, and return. - if (bg <= 10 || bg === 38 || noise >= 3 || minAgo > 12 || minAgo < -5 || ( bg > 60 && glucose_status.delta == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1 ) ) { + if (bg <= 10 || bg === 38 || noise >= 3 || minAgo > 12 || minAgo < -5 || tooflat ) { if (currenttemp.rate > basal) { // high temp is running rT.reason += ". Replacing high temp basal of "+currenttemp.rate+" with neutral temp of "+basal; rT.deliverAt = deliverAt; @@ -796,7 +806,12 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ rT.COB=meal_data.mealCOB; rT.IOB=iob_data.iob; - rT.reason="COB: " + meal_data.mealCOB + ", Dev: " + convert_bg(deviation, profile) + ", BGI: " + convert_bg(bgi, profile) + ", ISF: " + convert_bg(sens, profile) + ", CR: " + round(profile.carb_ratio, 2) + ", Target: " + convert_bg(target_bg, profile) + ", minPredBG " + convert_bg(minPredBG, profile) + ", minGuardBG " + convert_bg(minGuardBG, profile) + ", IOBpredBG " + convert_bg(lastIOBpredBG, profile); + rT.BGI=bgi; + rT.deviation=convert_bg(deviation, profile); + rT.ISF=convert_bg(sens, profile); + rT.CR=round(profile.carb_ratio, 2); + rT.target_bg=convert_bg(target_bg, profile); + rT.reason="minPredBG " + convert_bg(minPredBG, profile) + ", minGuardBG " + convert_bg(minGuardBG, profile) + ", IOBpredBG " + convert_bg(lastIOBpredBG, profile); if (lastCOBpredBG > 0) { rT.reason += ", COBpredBG " + convert_bg(lastCOBpredBG, profile); } @@ -1072,7 +1087,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ maxBolus = round( profile.current_basal * profile.maxSMBBasalMinutes / 60 ,1); } // bolus 1/2 the insulinReq, up to maxBolus, rounding down to nearest bolus increment - var roundSMBTo = 1 / profile.bolus_increment; + bolusIncrement = 0.1; + if (profile.bolus_increment) { bolusIncrement=profile.bolus_increment }; + var roundSMBTo = 1 / bolusIncrement; var microBolus = Math.floor(Math.min(insulinReq/2,maxBolus)*roundSMBTo)/roundSMBTo; // calculate a long enough zero temp to eventually correct back up to target var smbTarget = target_bg; @@ -1080,7 +1097,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ durationReq = round(60*worstCaseInsulinReq / profile.current_basal); // if insulinReq > 0 but not enough for a microBolus, don't set an SMB zero temp - if (insulinReq > 0 && microBolus < profile.bolus_increment) { + if (insulinReq > 0 && microBolus < bolusIncrement) { durationReq = 0; } @@ -1115,6 +1132,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var nextBolusSeconds = round((SMBInterval - lastBolusAge) * 60, 0) % 60; //console.error(naive_eventualBG, insulinReq, worstCaseInsulinReq, durationReq); console.error("naive_eventualBG",naive_eventualBG+",",durationReq+"m "+smbLowTempReq+"U/h temp needed; last bolus",lastBolusAge+"m ago; maxBolus: "+maxBolus); + if (lastBolusAge > SMBInterval) { if (microBolus > 0) { rT.units = microBolus; diff --git a/lib/glucose-get-last.js b/lib/glucose-get-last.js index 8dddf7a73..9053c579c 100644 --- a/lib/glucose-get-last.js +++ b/lib/glucose-get-last.js @@ -82,6 +82,7 @@ var getLastGlucose = function (data) { , long_avgdelta: Math.round( long_avgdelta * 100 ) / 100 , date: now_date , last_cal: last_cal + , device: now.device }; }; diff --git a/package.json b/package.json index ce0f5080f..3db784b27 100644 --- a/package.json +++ b/package.json @@ -42,6 +42,7 @@ "oref0-autotune-export-to-xlsx": "./bin/oref0-autotune-export-to-xlsx.py", "oref0-autotune-prep": "./bin/oref0-autotune-prep.js", "oref0-autotune-recommends-report": "./bin/oref0-autotune-recommends-report.sh", + "oref0-backtest": "./bin/oref0-backtest.sh", "oref0-bash-common-functions.sh": "./bin/oref0-bash-common-functions.sh", "oref0-bluetoothup": "./bin/oref0-bluetoothup.sh", "oref0-calculate-iob": "./bin/oref0-calculate-iob.js", @@ -86,6 +87,7 @@ "oref0-set-system-clock": "./bin/oref0-set-system-clock.sh", "oref0-set-local-temptarget": "./bin/oref0-set-local-temptarget.js", "oref0-setup": "./bin/oref0-setup.sh", + "oref0-simulator": "./bin/oref0-simulator.sh", "oref0-truncate-git-history": "./bin/oref0-truncate-git-history.sh", "oref0-upload-entries": "./bin/oref0-upload-entries.sh", "oref0-upload-profile": "./bin/oref0-upload-profile.js", From eaac5c0433830e78cef59e15853e2ce9415a0e8b Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Tue, 31 Dec 2019 14:26:24 -0800 Subject: [PATCH 23/66] ignore null glucose records (may fix #1265) (#1324) --- lib/glucose-get-last.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/glucose-get-last.js b/lib/glucose-get-last.js index 9053c579c..bca3bfbc0 100644 --- a/lib/glucose-get-last.js +++ b/lib/glucose-get-last.js @@ -8,7 +8,9 @@ var getLastGlucose = function (data) { }).map(function prepGlucose (obj) { //Support the NS sgv field to avoid having to convert in a custom way obj.glucose = obj.glucose || obj.sgv; - return obj; + if ( obj.glucose !== null ) { + return obj; + } }); var now = data[0]; From c94b4453291605c1e1d775560693d81cfb886aff Mon Sep 17 00:00:00 2001 From: tepidjuice Date: Wed, 1 Jan 2020 09:27:39 +1100 Subject: [PATCH 24/66] Warn users that the status screen is hard coded for the myopenaps directory (#1330) --- bin/oref0-setup.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index 66bfd3114..1a28fac00 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -1116,6 +1116,7 @@ if prompt_yn "" N; then fi systemctl enable pi-buttons && systemctl restart pi-buttons echo "Installing openaps-menu..." + test "$directory" != "/$HOME/myopenaps" && (echo You are using a non-standard openaps directory. For the statusmenu to work correctly you need to set the openapsDir variable in index.js) cd $HOME/src && git clone git://github.com/openaps/openaps-menu.git || (cd openaps-menu && git checkout master && git pull) cd $HOME/src/openaps-menu && sudo npm install cp $HOME/src/openaps-menu/openaps-menu.service /etc/systemd/system/ && systemctl enable openaps-menu From 66482794b3c02c79174e055e6aa40e845b298844 Mon Sep 17 00:00:00 2001 From: Jens Heuschkel Date: Tue, 31 Dec 2019 23:28:13 +0100 Subject: [PATCH 25/66] Add public ip information to status file (#1331) * Add public ip information to status file I want to show the public ip in my system menu on the disply and don't want to do an extra ping :) This code is tested on my pi rig with explorer-HAT (for around two weeks). * update hasPublicIp to publicIP This file now contains the actual IP, so updating its name to better reflect that. Co-authored-by: Scott Leibrand --- bin/oref0-online.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/oref0-online.sh b/bin/oref0-online.sh index bf64a24d9..a0624bc06 100755 --- a/bin/oref0-online.sh +++ b/bin/oref0-online.sh @@ -138,11 +138,11 @@ function check_ip { PUBLIC_IP=$(curl --compressed -4 -s -m 15 checkip.amazonaws.com | awk -F '[, ]' '{print $NF}' | egrep "^[12]*[0-9]*[0-9]\.[12]*[0-9]*[0-9]\.[12]*[0-9]*[0-9]\.[12]*[0-9]*[0-9]$") if [[ -z $PUBLIC_IP ]]; then echo not found - rm /tmp/hasPublicIp 2> /dev/null + rm /tmp/publicIP 2> /dev/null return 1 else echo $PUBLIC_IP - touch /tmp/hasPublicIp + echo $PUBLIC_IP > /tmp/publicIP fi } From 6a1a662678fe818edd5573845a0d6d0ffe8a60d9 Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Tue, 31 Dec 2019 14:28:44 -0800 Subject: [PATCH 26/66] if disk has less than 10MB free, delete something and logrotate (#1337) * find and delete the oldest log file every minute to avoid full diskdelete the oldest log file * start deleting at 10MB free --- bin/oref0-cron-every-minute.sh | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/bin/oref0-cron-every-minute.sh b/bin/oref0-cron-every-minute.sh index d2dcbcdf3..28f2916f6 100755 --- a/bin/oref0-cron-every-minute.sh +++ b/bin/oref0-cron-every-minute.sh @@ -128,6 +128,17 @@ if [[ ! -z "$PUSHOVER_TOKEN" && ! -z "$PUSHOVER_USER" ]]; then oref0-pushover $PUSHOVER_TOKEN $PUSHOVER_USER 2>&1 >> /var/log/openaps/pushover.log & fi +# if disk has less than 10MB free, delete something and logrotate +cd /var/log/openaps/ && df . | awk '($4 < 10000) {print $4}' | while read line; do + # find the oldest log file + ls -t | tail -1 +done | while read file; do + # delete the oldest log file + rm $file + # attempt a logrotate + logrotate /etc/logrotate.conf -f +done + # check if 5 minutes have passed, and if yes, turn of the screen to save power ttyport="$(get_pref_string .ttyport)" upSeconds="$(cat /proc/uptime | grep -o '^[0-9]\+')" From c16a841b06a0661b0b7be6c1e4d23691cb7ddaae Mon Sep 17 00:00:00 2001 From: Scott Date: Tue, 31 Dec 2019 14:38:41 -0800 Subject: [PATCH 27/66] whitespace cleanup --- bin/oref0-online.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/bin/oref0-online.sh b/bin/oref0-online.sh index a0624bc06..f711c0466 100755 --- a/bin/oref0-online.sh +++ b/bin/oref0-online.sh @@ -149,10 +149,10 @@ function check_ip { # network_name ip metric function ping_to_default_gw { ping $2 -c 1 > /dev/null - if [[ $? == 0 ]] ; then - echo At $(date) ping to default gateway $2 '('$1' metric = '$3')' passed ; + if [[ $? == 0 ]] ; then + echo At $(date) ping to default gateway $2 '('$1' metric = '$3')' passed ; else - echo At $(date) ping to default gateway $2 '('$1' metric = '$3')' failed ; + echo At $(date) ping to default gateway $2 '('$1' metric = '$3')' failed ; fi } @@ -167,7 +167,7 @@ function ping_default_gateways { # 192.168.3.0 0.0.0.0 255.255.255.0 U 302 0 0 wlan0 # 192.168.44.0 0.0.0.0 255.255.255.0 U 214 0 0 bnep0 route -n | grep ^0.0.0.0 |awk '{print $8 " " $2 " " $5}'| uniq | while read -r line ; do - ping_to_default_gw $line + ping_to_default_gw $line done } @@ -188,7 +188,7 @@ function bt_connect { else echo "oref0-bluetoothup already running" fi - + if ! test -f preferences.json \ || ! jq -e .bt_offline < preferences.json > /dev/null \ || ! ifconfig | egrep -q "bnep0" >/dev/null; then @@ -282,7 +282,7 @@ function stop_cycle { function bt_bnep0_cycle { echo -n "No IP address assigned, cycling the bnep0 interface" sudo ifdown bnep0; sudo ifup bnep0; - echo "...done" + echo "...done" } From 89a90b8e86525b3510710c22a70f698d184d1462 Mon Sep 17 00:00:00 2001 From: PieterGit <6500826+PieterGit@users.noreply.github.com> Date: Wed, 1 Jan 2020 19:15:29 +0100 Subject: [PATCH 28/66] Fix cgm with share2 via nightscout with tokenbased authentication with oref 0.7.x (regression) (#1341) * re-enable cgm inflow of share2 with token based authentication nightscout * syntax Co-authored-by: Scott Leibrand --- bin/oref0-get-ns-entries.js | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/bin/oref0-get-ns-entries.js b/bin/oref0-get-ns-entries.js index 08bb4b6d5..48f5379bf 100755 --- a/bin/oref0-get-ns-entries.js +++ b/bin/oref0-get-ns-entries.js @@ -64,7 +64,7 @@ if (!module.parent) { process.exit(1); } - if (apisecret.length != 40) { + if (apisecret != null && !apisecret.startsWith("token=") && apisecret.length != 40) { var shasum = crypto.createHash('sha1'); shasum.update(apisecret); apisecret = shasum.digest('hex'); @@ -155,15 +155,20 @@ if (!module.parent) { function loadFromNightscoutWithDate(lastDate, glucosedata) { - var headers = { - 'api-secret': apisecret - }; + // append the token secret to the end of the ns url, or add it to the headers if token based authentication is not used + var headers = {} ; + var tokenAuth = ""; + if (apisecret.startsWith("token=")) { + tokenAuth = "&" + apisecret; + } else { + headers = { 'api-secret': apisecret }; + } if (!_.isNil(lastDate)) { headers["If-Modified-Since"] = lastDate.toISOString(); } - var uri = nsurl + '/api/v1/entries/sgv.json?count=' + records; + var uri = nsurl + '/api/v1/entries/sgv.json?count=' + records + tokenAuth; var options = { uri: uri , json: true From 5b753ad285164133fc8f37979d6e48bed2a3cf12 Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Sat, 4 Jan 2020 23:57:01 -0800 Subject: [PATCH 29/66] re-add openaps first-upload (mentioned in docs) (#1338) * replace openaps first-upload (mentioned in docs) * syntax * restore all but the mdt-cgm.json changes * try to reset clocks on enlite cgm failure --- bin/oref0-pump-loop.sh | 8 +++++++- lib/oref0-setup/mdt-cgm.json | 8 ++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/bin/oref0-pump-loop.sh b/bin/oref0-pump-loop.sh index 9abe96bc8..0d0771a0b 100755 --- a/bin/oref0-pump-loop.sh +++ b/bin/oref0-pump-loop.sh @@ -522,7 +522,13 @@ function if_mdt_get_bg { # helper function for if_mdt_get_bg function mdt_get_bg { - oref0-mdt-update 2>&1 | tee -a /var/log/openaps/cgm-loop.log >&3 + if oref0-mdt-update 2>&1 | tee -a /var/log/openaps/cgm-loop.log >&3; then + return 0 + else + # if Enlite data retrieval fails, run smb_reservoir_before function to see if time needs to be reset + smb_reservoir_before + return 1 + fi } # make sure we can talk to the pump and get a valid model number diff --git a/lib/oref0-setup/mdt-cgm.json b/lib/oref0-setup/mdt-cgm.json index 13e0e1eb2..df096c91a 100644 --- a/lib/oref0-setup/mdt-cgm.json +++ b/lib/oref0-setup/mdt-cgm.json @@ -101,5 +101,13 @@ "json_default": "True" }, "name": "nightscout/glucose.json" + }, + { + "//": "May be run directly by the user (mentioned in docs)", + "type": "alias", + "first-upload": { + "command": "! bash -c \"cat nightscout/glucose.json | json 1 > nightscout/recent-missing-entries.json && openaps report invoke nightscout/uploaded-entries.json\"" + }, + "name": "first-upload" } ] From 04daea220f4710c78665a197575985e61eeb5e3c Mon Sep 17 00:00:00 2001 From: Scott Date: Sun, 5 Jan 2020 20:41:07 -0800 Subject: [PATCH 30/66] wait for silence before running scripts that might use the radio like fakemeter --- bin/oref0-pump-loop.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/bin/oref0-pump-loop.sh b/bin/oref0-pump-loop.sh index 0d0771a0b..25d489018 100755 --- a/bin/oref0-pump-loop.sh +++ b/bin/oref0-pump-loop.sh @@ -103,6 +103,7 @@ main() { function run_script() { file=$1 + wait_for_silence $upto10s echo "Running plugin script ($file)... " timeout 60 $file echo "Completed plugin script ($file). " From 113a87123b764b5f2a4301003ff290bd5b27eac9 Mon Sep 17 00:00:00 2001 From: tzachi-dar Date: Thu, 9 Jan 2020 05:00:48 +0200 Subject: [PATCH 31/66] Remove unused functions. (#1347) Tested by running for 24 hours with and without SMB. Signed-off-by: Tzachi Dar --- bin/oref0-pump-loop.sh | 71 ------------------------------------------ 1 file changed, 71 deletions(-) diff --git a/bin/oref0-pump-loop.sh b/bin/oref0-pump-loop.sh index 25d489018..91cad2914 100755 --- a/bin/oref0-pump-loop.sh +++ b/bin/oref0-pump-loop.sh @@ -640,26 +640,6 @@ function invoke_reservoir_etc { check_battery 2>&3 >&4 || return 1 } -# Calculate new suggested temp basal and enact it -function enact { - rm enact/suggested.json - determine_basal && if (cat enact/suggested.json && grep -q duration enact/suggested.json); then ( - rm enact/enacted.json - ( mdt settempbasal enact/suggested.json && jq '. + {"received": true}' enact/suggested.json > enact/enacted.json ) 2>&3 >&4 - grep -q duration enact/enacted.json || ( mdt settempbasal enact/suggested.json && jq '. + {"received": true}' enact/suggested.json > enact/enacted.json ) ) 2>&1 | egrep -v "^ |subg_rfspy|handler" - fi - grep incorrectly enact/suggested.json && oref0-set-system-clock 2>&3 - echo -n "enact/enacted.json: " && cat enact/enacted.json | colorize_json -} - -# refresh pumphistory_24h if it's more than 5m old -function refresh_old_pumphistory { - (file_is_recent monitor/pumphistory-24h-zoned.json 5 100 \ - && echo -n "Pumphistory-24h less than 5m old. ") \ - || ( echo -n "Old pumphistory-24h, waiting for $upto30s seconds of silence: " && wait_for_silence $upto30s \ - && read_pumphistory ) -} - # refresh settings/profile if it's more than 1h old function refresh_old_profile { file_is_recent_and_min_size settings/profile.json 60 && echo -n "Profile less than 60m old; " \ @@ -713,57 +693,6 @@ function get_settings { fi } -function refresh_smb_temp_and_enact { - # set mtime of monitor/glucose.json to the time of its most recent glucose value - setglucosetimestamp - # only smb_enact_temp if we haven't successfully completed a pump_loop recently - # (no point in enacting a temp that's going to get changed after we see our last SMB) - if (jq '. | select(.duration > 20)' monitor/temp_basal.json | grep -q duration); then - echo -n "Temp duration >20m. " - elif ( find /tmp/ -mmin +10 | grep -q /tmp/pump_loop_completed ); then - echo "pump_loop_completed more than 10m ago: setting temp before refreshing pumphistory. " - smb_enact_temp - else - echo -n "pump_loop_completed less than 10m ago. " - fi -} - -function refresh_temp_and_enact { - # set mtime of monitor/glucose.json to the time of its most recent glucose value - setglucosetimestamp - # TODO: use pump_loop_completed logic as in refresh_smb_temp_and_enact - if ( (find monitor/ -newer monitor/temp_basal.json | grep -q glucose.json && echo -n "glucose.json newer than temp_basal.json. " ) \ - || (! file_is_recent_and_min_size monitor/temp_basal.json && echo "temp_basal.json more than 5m old. ")); then - echo -n Temp refresh - retry_fail invoke_temp_etc - echo ed - oref0-calculate-iob monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json || { echo "Couldn't calculate IOB"; fail "$@"; } - if (jq '. | select(.duration < 27)' monitor/temp_basal.json | grep -q duration); then - enact; else echo Temp duration 27m or more - fi - else - echo -n "temp_basal.json less than 5m old. " - fi -} - -function invoke_temp_etc { - check_clock 2>&3 >&4 || return 1 - check_tempbasal 2>&3 >&4 || return 1 - calculate_iob -} - -function refresh_pumphistory_and_enact { - # set mtime of monitor/glucose.json to the time of its most recent glucose value - setglucosetimestamp - if ((find monitor/ -newer monitor/pumphistory-24h-zoned.json | grep -q glucose.json && echo -n "glucose.json newer than pumphistory. ") \ - || (find enact/ -newer monitor/pumphistory-24h-zoned.json | grep -q enacted.json && echo -n "enacted.json newer than pumphistory. ") \ - || ((! file_is_recent monitor/pumphistory-zoned.json || ! find monitor/ -mmin +0 | grep -q pumphistory-zoned) && echo -n "pumphistory more than 5m old. ") ); then - { echo -n ": " && refresh_pumphistory_and_meal && enact; } - else - echo Pumphistory less than 5m old - fi -} - function refresh_profile { if [ -z $1 ]; then profileage=10 From 7d58ea9ad0081a9ee50405ce7a60dbd57e8fb732 Mon Sep 17 00:00:00 2001 From: tzachi-dar Date: Tue, 21 Jan 2020 17:36:10 +0200 Subject: [PATCH 32/66] Add quotation marks around egrep expression. (#1351) --- bin/oref0-pump-loop.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/oref0-pump-loop.sh b/bin/oref0-pump-loop.sh index 91cad2914..8af151386 100755 --- a/bin/oref0-pump-loop.sh +++ b/bin/oref0-pump-loop.sh @@ -765,7 +765,7 @@ function setglucosetimestamp { function check_reservoir() { set -o pipefail mdt reservoir 2>&3 | tee monitor/reservoir.json && nonl < monitor/reservoir.json \ - && egrep -q [0-9] monitor/reservoir.json + && egrep -q "[0-9]" monitor/reservoir.json } function check_model() { set -o pipefail From 90d5cebc511f576f180ffb1e0a85ddc165b555a5 Mon Sep 17 00:00:00 2001 From: Jeremy Cunningham <34543464+jpcunningh@users.noreply.github.com> Date: Tue, 21 Jan 2020 22:12:41 -0600 Subject: [PATCH 33/66] add timeout to oref0-get-ns-entries request call (#1349) --- bin/oref0-get-ns-entries.js | 1 + 1 file changed, 1 insertion(+) diff --git a/bin/oref0-get-ns-entries.js b/bin/oref0-get-ns-entries.js index 48f5379bf..9bacfefe2 100755 --- a/bin/oref0-get-ns-entries.js +++ b/bin/oref0-get-ns-entries.js @@ -172,6 +172,7 @@ if (!module.parent) { var options = { uri: uri , json: true + , timeout: 90000 , headers: headers }; From 78cd0a7759a7f8cc2e74bd10c507f33b1126ce8d Mon Sep 17 00:00:00 2001 From: Jon Cluck Date: Sun, 2 Feb 2020 12:48:59 -0500 Subject: [PATCH 34/66] Make sure .bash_profile exists (#1354) Fixes an issue with new installs where log shortcuts are not added to .bash_profile, because the `oref0-log-shortcuts.sh` script only works on existing files. --- bin/oref0-setup.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index 1a28fac00..71d9d4921 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -1051,6 +1051,8 @@ if prompt_yn "" N; then do_openaps_import $HOME/src/oref0/lib/oref0-setup/supermicrobolus.json echo "Adding OpenAPS log shortcuts" + # Make sure that .bash_profile exists first, then call script to add the log shortcuts + touch "$HOME/.bash_profile" oref0-log-shortcuts --add-to-profile="$HOME/.bash_profile" # Append NIGHTSCOUT_HOST and API_SECRET to $HOME/.bash_profile so that openaps commands can be executed from the command line From 3cddf42db663ca1383103f7bfeb3ec9780d766bd Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Sun, 2 Feb 2020 17:31:33 -0800 Subject: [PATCH 35/66] simulator support for BG target schedules (#1355) * rudimentary support for BG target schedules * also override .max_bg --- bin/oref0-backtest.sh | 6 ++++++ bin/oref0-simulator.sh | 15 +++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/bin/oref0-backtest.sh b/bin/oref0-backtest.sh index b32097008..86ee89c17 100755 --- a/bin/oref0-backtest.sh +++ b/bin/oref0-backtest.sh @@ -156,6 +156,7 @@ if ! [[ -z "$NIGHTSCOUT_HOST" ]]; then # download profile.json from Nightscout profile.json endpoint, and also copy over to pumpprofile.json ~/src/oref0/bin/get_profile.py --nightscout $NIGHTSCOUT_HOST display --format openaps 2>/dev/null > profile.json.new ls -la profile.json.new + grep bg profile.json.new if jq -e .dia profile.json.new; then jq -rs 'reduce .[] as $item ({}; . * $item)' profile.json profile.json.new | jq '.sens = .isfProfile.sensitivities[0].sensitivity' > profile.json.new.merged ls -la profile.json.new.merged @@ -167,6 +168,7 @@ if ! [[ -z "$NIGHTSCOUT_HOST" ]]; then else echo Bad profile.json.new from get_profile.py fi + grep bg profile.json # download preferences.json from Nightscout devicestatus.json endpoint and overwrite profile.json with it for i in $(seq 0 10); do @@ -198,10 +200,12 @@ fi # read a --preferences file to override the one from nightscout (for testing impact of different preferences) if [[ -e $preferences ]]; then + cat $preferences jq -s '.[0] + .[1]' profile.json $preferences > profile.json.new if jq -e .max_iob profile.json.new; then mv profile.json.new profile.json echo Successfully merged $preferences into profile.json + grep target_bg profile.json else echo Unable to merge $preferences into profile.json fi @@ -254,6 +258,8 @@ if ! [[ -z "$autotunelog" ]]; then fi if ! [[ -z "$NIGHTSCOUT_HOST" ]]; then + # sleep for 10s to allow multiple parallel runs to start up before loading up the CPUs + sleep 10 echo oref0-autotune --dir=$DIR --ns-host=$NIGHTSCOUT_HOST --start-date=$START_DATE --end-date=$END_DATE | tee -a $DIR/commands.log oref0-autotune --dir=$DIR --ns-host=$NIGHTSCOUT_HOST --start-date=$START_DATE --end-date=$END_DATE | grep "dev: " | awk '{print $13 "," $20}' | while IFS=',' read dev carbs; do ~/src/oref0/bin/oref0-simulator.sh $dev 0 $carbs $DIR diff --git a/bin/oref0-simulator.sh b/bin/oref0-simulator.sh index f945cc69b..a8707d34f 100755 --- a/bin/oref0-simulator.sh +++ b/bin/oref0-simulator.sh @@ -26,6 +26,21 @@ function init { function main { + # look up the currently active bg_target based on the current clock.json + target=$((cat profile.json | jq -r '.bg_targets.targets[] | [.start, .min_bg] | @csv'; echo -n \"; cat clock.json | awk -F T '{print $2}') | sort | grep -B1 '\"$' | head -1 | awk -F , '{print $2}') + if ! [ -z "$target" ]; then + cat profile.json | jq ". | .min_bg=$target | .max_bg=$target" > profile.json.new + echo setting target to $target + #grep min_bg profile.json.new + #grep target_bg profile.json.new + if jq -e .dia profile.json.new >/dev/null; then + mv profile.json.new profile.json + cp profile.json settings/ + cp profile.json pumpprofile.json + cp pumpprofile.json settings/ + fi + fi + jq .isfProfile profile.json > isf.json # only run autosens every "20m" if [[ -e autosens-override.json ]]; then From 16b98402bb2a222dc875742635ef009c6d6af2dd Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Sun, 2 Feb 2020 17:45:12 -0800 Subject: [PATCH 36/66] simulator bugfix and better logging (#1356) * print pretty stats on one line, and csv stats on another * check if temp_basal.json is empty, and write a valid one if so * more accurate logging --- bin/glucose-stats.awk | 12 ++++++------ bin/oref0-simulator.sh | 3 +++ lib/determine-basal/determine-basal.js | 2 +- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/bin/glucose-stats.awk b/bin/glucose-stats.awk index f5a8a90f6..8e78931b0 100644 --- a/bin/glucose-stats.awk +++ b/bin/glucose-stats.awk @@ -3,16 +3,16 @@ BEGIN { if (!min_bg) { min_bg=70 } if (!max_bg) { max_bg=180 } } -{ sum+=$1; count++ } +{ sum+=$1; count++; squares+=$1^2; } ($1 < 39) { next } ($1 < min) { min=$1 } ($1 > max) { max=$1 } ($1 <= max_bg && $1 >= min_bg) { inrange++ } ($1 > max_bg) { high++ } ($1 < min_bg) { low++ } -END { print "Count: " count; - printf "Min / Max / Average: %.0f / %.0f / %.1f\n", min, max, sum/count - printf "%%TIR / high / low (%.0f-%.0f): ", min_bg, max_bg - #print "%TIR / high / low (" min_bg "-" max_bg "): " \ - printf "%.1f%% / %.1f%% / %.1f%%\n", inrange/(high+inrange+low)*100, high/(high+inrange+low)*100, low/(high+inrange+low)*100 +END { # print "Count: " count; + printf "Count %.0f / Min %.0f / Max %.0f / Average %.1f / StdDev %.1f / ", count, min, max, sum/count, sqrt(squares/count-(sum/count)^2) + #printf "%%TIR / low / high (%.0f-%.0f): ", min_bg, max_bg + printf "%.1f%% TIR / %.1f%% low / %.1f%% high (%.0f-%.0f)\n", inrange/(high+inrange+low)*100, low/(high+inrange+low)*100, high/(high+inrange+low)*100, min_bg, max_bg + printf "%.0f,%.1f,%.1f,%.1f,%.1f", count, sum/count, low/(high+inrange+low)*100, high/(high+inrange+low)*100, sqrt(squares/count-(sum/count)^2) } diff --git a/bin/oref0-simulator.sh b/bin/oref0-simulator.sh index a8707d34f..405dfffb9 100755 --- a/bin/oref0-simulator.sh +++ b/bin/oref0-simulator.sh @@ -87,6 +87,9 @@ function main { jq '. | .duration=.duration-5 | { rate: .rate, duration: .duration, temp: "absolute" }' temp_basal.json > temp_basal.json.new mv temp_basal.json.new temp_basal.json fi + if ! [ -s temp_basal.json ]; then + echo '{"rate": 0, "duration": 0, "temp": "absolute"}' > temp_basal.json + fi #cat temp_basal.json | jq -c diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index 1d2ed6e98..dded5e57f 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -548,7 +548,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ cid = Math.min(remainingCATime*60/5/2,Math.max(0, meal_data.mealCOB * csf / ci )); } // duration (hours) = duration (5m) * 5 / 60 * 2 (to account for linear decay) - console.error("Carb Impact:",ci,"mg/dL per 5m; CI Duration:",round(cid*5/60*2,1),"hours; remaining CI (~2h peak):",round(remainingCIpeak,1),"mg/dL per 5m"); + console.error("Carb Impact:",ci,"mg/dL per 5m; CI Duration:",round(cid*5/60*2,1),"hours; remaining CI (",remainingCATime," peak):",round(remainingCIpeak,1),"mg/dL per 5m"); var minIOBPredBG = 999; var minCOBPredBG = 999; From 188487be282dda0d4f6d674548de6345c6a112cd Mon Sep 17 00:00:00 2001 From: tzachi-dar Date: Fri, 14 Feb 2020 04:22:38 +0200 Subject: [PATCH 37/66] Upload NS status when possible. (#1346) * Upload NS status when possible. Before this checkin, ns-loop created the status data, and uploaded it to nightscout (if possible). But in case of failure, the file will get overwritten. This means that in the case of no internet, OAPS decisions get lost. This checkin fixes it by adding timestamps to the files. When internet is available, the files will be uploaded according to the correct order. Files older than a day will be deleted. Testing: I have prevented uploading of files for a few hours, and when it was enabled again, data was shown correctly in nightscout site. Decreased timeout, and saw the files being deleted. Signed-off-by: Tzachi Dar * In the case that a status file does not contain iob delete it instead of uploading it. Signed-off-by: Tzachi Dar --- bin/ns-status.js | 3 ++- bin/oref0-ns-loop.sh | 22 ++++++++++++++++++---- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/bin/ns-status.js b/bin/ns-status.js index 4b86c80a7..2b063cf7c 100755 --- a/bin/ns-status.js +++ b/bin/ns-status.js @@ -142,7 +142,8 @@ if (!module.parent) { battery: safeRequire(cwd + battery_input), reservoir: safeRequire(cwd + reservoir_input), status: requireWithTimestamp(cwd + status_input) - } + }, + created_at: new Date() }; if (mmtune_input) { diff --git a/bin/oref0-ns-loop.sh b/bin/oref0-ns-loop.sh index ce10823a1..3e5e45166 100755 --- a/bin/oref0-ns-loop.sh +++ b/bin/oref0-ns-loop.sh @@ -171,6 +171,7 @@ function upload { # grep -q iob monitor/iob.json && find enact/ -mmin -5 -size +5c | grep -q suggested.json && openaps format-ns-status && grep -q iob upload/ns-status.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json upload/ns-status.json function upload_ns_status { + set -o pipefail #echo Uploading devicestatus grep -q iob monitor/iob.json || die "IOB not found" # set the timestamp on enact/suggested.json to match the deliverAt time @@ -180,17 +181,30 @@ function upload_ns_status { ls -la enact/suggested.json | awk '{print $6,$7,$8}' return 1 fi - format_ns_status && grep -q iob upload/ns-status.json || die "Couldn't generate ns-status.json" - ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json upload/ns-status.json | colorize_json '.[0].openaps.suggested | {BG: .bg, IOB: .IOB, rate: .rate, duration: .duration, units: .units}' || die "Couldn't upload devicestatus to NS" + ns_status_file_name=ns-status$(date +"%Y-%m-%d-%T").json + format_ns_status $ns_status_file_name && grep -q iob upload/$ns_status_file_name || die "Couldn't generate ns-status.json" + # Delete files older than 24 hours. + find upload -maxdepth 1 -mmin +1440 -type f -name "ns-status*.json" -delete + # Upload the files one by one according to their order. + ls upload/ns-status*.json | while read -r file_name ; do + if ! grep -q iob $file_name ; then + #echo deleteing file $file_name + rm $file_name + continue + fi + ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json $file_name | colorize_json '.[0].openaps.suggested | {BG: .bg, IOB: .IOB, rate: .rate, duration: .duration, units: .units}' || die "Couldn't upload devicestatus to NS" + rm $file_name + done } #ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json > upload/ns-status.json # ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --uploader monitor/edison-battery.json > upload/ns-status.json +# first parameter - ns_status file name function format_ns_status { if [ -s monitor/edison-battery.json ]; then - ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json --uploader monitor/edison-battery.json > upload/ns-status.json + ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json --uploader monitor/edison-battery.json > upload/$1 else - ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json > upload/ns-status.json + ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json > upload/$1 fi } From e7773010dc7312e919624bcb773ba081200d5bb2 Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Thu, 13 Feb 2020 18:23:06 -0800 Subject: [PATCH 38/66] deprecate adv_target_adjustments; reorder code for readability (#1344) --- lib/determine-basal/determine-basal.js | 129 ++++++++++--------------- lib/profile/index.js | 2 - tests/command-behavior.tests.sh | 2 - 3 files changed, 49 insertions(+), 84 deletions(-) diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index dded5e57f..fb3c9bebe 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -138,6 +138,20 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var bg = glucose_status.glucose; var noise = glucose_status.noise; +// Prep various delta variables. + var tick; + + if (glucose_status.delta > -0.5) { + tick = "+" + round(glucose_status.delta,0); + } else { + tick = round(glucose_status.delta,0); + } + //var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta); + var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta); + var minAvgDelta = Math.min(glucose_status.short_avgdelta, glucose_status.long_avgdelta); + var maxDelta = Math.max(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta); + + // Cancel high temps (and replace with neutral) or shorten long zero temps for various error conditions // 38 is an xDrip error state that usually indicates sensor failure @@ -270,38 +284,27 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } } -// If iob_data or its required properties are missing, return. -// This has to be checked after checking that we're not in one of the CGM-data-related error conditions handled above, -// and before attempting to use iob_data below. -// TODO: move this down to be just above // compare currenttemp to iob_data.lastTemp and cancel temp if they don't match - if (typeof iob_data === 'undefined' ) { - rT.error ='Error: iob_data undefined. '; - return rT; - } - - var iobArray = iob_data; - if (typeof(iob_data.length) && iob_data.length > 1) { - iob_data = iobArray[0]; - //console.error(JSON.stringify(iob_data[0])); - } - - if (typeof iob_data.activity === 'undefined' || typeof iob_data.iob === 'undefined' ) { - rT.error ='Error: iob_data missing some property. '; - return rT; +// Raise target for noisy / raw CGM data. + if (glucose_status.noise >= 2) { + // increase target at least 10% (default 30%) for raw / noisy data + var noisyCGMTargetMultiplier = Math.max( 1.1, profile.noisyCGMTargetMultiplier ); + // don't allow maxRaw above 250 + var maxRaw = Math.min( 250, profile.maxRaw ); + var adjustedMinBG = round(Math.min(200, min_bg * noisyCGMTargetMultiplier )); + var adjustedTargetBG = round(Math.min(200, target_bg * noisyCGMTargetMultiplier )); + var adjustedMaxBG = round(Math.min(200, max_bg * noisyCGMTargetMultiplier )); + process.stderr.write("Raising target_bg for noisy / raw CGM data, from "+target_bg+" to "+adjustedTargetBG+"; "); + min_bg = adjustedMinBG; + target_bg = adjustedTargetBG; + max_bg = adjustedMaxBG; } -// Prep various delta variables. TODO: make this happen earlier along with other variable prep - var tick; + // min_bg of 90 -> threshold of 65, 100 -> 70 110 -> 75, and 130 -> 85 + var threshold = min_bg - 0.5*(min_bg-40); - if (glucose_status.delta > -0.5) { - tick = "+" + round(glucose_status.delta,0); - } else { - tick = round(glucose_status.delta,0); - } - //var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta); - var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta); - var minAvgDelta = Math.min(glucose_status.short_avgdelta, glucose_status.long_avgdelta); - var maxDelta = Math.max(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta); +// If iob_data or its required properties are missing, return. +// This has to be checked after checking that we're not in one of the CGM-data-related error conditions handled above, +// and before attempting to use iob_data below. // Adjust ISF based on sensitivityRatio var profile_sens = round(profile.sens,1) @@ -318,6 +321,22 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } console.error("; CR:",profile.carb_ratio); + if (typeof iob_data === 'undefined' ) { + rT.error ='Error: iob_data undefined. '; + return rT; + } + + var iobArray = iob_data; + if (typeof(iob_data.length) && iob_data.length > 1) { + iob_data = iobArray[0]; + //console.error(JSON.stringify(iob_data[0])); + } + + if (typeof iob_data.activity === 'undefined' || typeof iob_data.iob === 'undefined' ) { + rT.error ='Error: iob_data missing some property. '; + return rT; + } + // Compare currenttemp to iob_data.lastTemp and cancel temp if they don't match, as a safety check // This should occur after checking that we're not in one of the CGM-data-related error conditions handled above, // and before returning (doing nothing) below if eventualBG is undefined. @@ -371,61 +390,11 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // and adjust it for the deviation above var eventualBG = naive_eventualBG + deviation; -// Raise target for noisy / raw CGM data. -// TODO: move this up to immediately after parsing targets above (once adv_target_adjustments is deprecated) - if (glucose_status.noise >= 2) { - // increase target at least 10% (default 30%) for raw / noisy data - var noisyCGMTargetMultiplier = Math.max( 1.1, profile.noisyCGMTargetMultiplier ); - // don't allow maxRaw above 250 - var maxRaw = Math.min( 250, profile.maxRaw ); - var adjustedMinBG = round(Math.min(200, min_bg * noisyCGMTargetMultiplier )); - var adjustedTargetBG = round(Math.min(200, target_bg * noisyCGMTargetMultiplier )); - var adjustedMaxBG = round(Math.min(200, max_bg * noisyCGMTargetMultiplier )); - process.stderr.write("Raising target_bg for noisy / raw CGM data, from "+target_bg+" to "+adjustedTargetBG+"; "); - min_bg = adjustedMinBG; - target_bg = adjustedTargetBG; - max_bg = adjustedMaxBG; - // adjust target BG range if configured to bring down high BG faster - // TODO: deprecate this - } else if ( bg > max_bg && profile.adv_target_adjustments && ! profile.temptargetSet ) { - // with target=100, as BG rises from 100 to 160, adjustedTarget drops from 100 to 80 - adjustedMinBG = round(Math.max(80, min_bg - (bg - min_bg)/3 ),0); - adjustedTargetBG =round( Math.max(80, target_bg - (bg - target_bg)/3 ),0); - adjustedMaxBG = round(Math.max(80, max_bg - (bg - max_bg)/3 ),0); - // if eventualBG, naive_eventualBG, and target_bg aren't all above adjustedMinBG, don’t use it - //console.error("naive_eventualBG:",naive_eventualBG+", eventualBG:",eventualBG); - if (eventualBG > adjustedMinBG && naive_eventualBG > adjustedMinBG && min_bg > adjustedMinBG) { - process.stderr.write("Adjusting targets for high BG: min_bg from "+min_bg+" to "+adjustedMinBG+"; "); - min_bg = adjustedMinBG; - } else { - process.stderr.write("min_bg unchanged: "+min_bg+"; "); - } - // if eventualBG, naive_eventualBG, and target_bg aren't all above adjustedTargetBG, don’t use it - if (eventualBG > adjustedTargetBG && naive_eventualBG > adjustedTargetBG && target_bg > adjustedTargetBG) { - process.stderr.write("target_bg from "+target_bg+" to "+adjustedTargetBG+"; "); - target_bg = adjustedTargetBG; - } else { - process.stderr.write("target_bg unchanged: "+target_bg+"; "); - } - // if eventualBG, naive_eventualBG, and max_bg aren't all above adjustedMaxBG, don’t use it - if (eventualBG > adjustedMaxBG && naive_eventualBG > adjustedMaxBG && max_bg > adjustedMaxBG) { - console.error("max_bg from "+max_bg+" to "+adjustedMaxBG); - max_bg = adjustedMaxBG; - } else { - console.error("max_bg unchanged: "+max_bg); - } - } - - // TODO: move this line to be 4 lines down - var expectedDelta = calculate_expected_delta(target_bg, eventualBG, bgi); if (typeof eventualBG === 'undefined' || isNaN(eventualBG)) { rT.error ='Error: could not calculate eventualBG. '; return rT; } - - // TODO: move this up to immediately after calculating targets - // min_bg of 90 -> threshold of 65, 100 -> 70 110 -> 75, and 130 -> 85 - var threshold = min_bg - 0.5*(min_bg-40); + var expectedDelta = calculate_expected_delta(target_bg, eventualBG, bgi); //console.error(reservoir_data); diff --git a/lib/profile/index.js b/lib/profile/index.js index 00ba074ca..9b6f5c322 100644 --- a/lib/profile/index.js +++ b/lib/profile/index.js @@ -18,7 +18,6 @@ function defaults ( ) { , low_temptarget_lowers_sensitivity: false // lower sensitivity for temptargets <= 99. , sensitivity_raises_target: true // raise BG target when autosens detects sensitivity , resistance_lowers_target: false // lower BG target when autosens detects resistance - , adv_target_adjustments: false // lower target automatically when BG and eventualBG are high , exercise_mode: false // when true, > 100 mg/dL high temp target adjusts sensitivityRatio for exercise_mode. This majorly changes the behavior of high temp targets from before. synonmym for high_temptarget_raises_sensitivity , half_basal_exercise_target: 160 // when temptarget is 160 mg/dL *and* exercise_mode=true, run 50% basal at this level (120 = 75%; 140 = 60%) // create maxCOB and default it to 120 because that's the most a typical body can absorb over 4 hours. @@ -81,7 +80,6 @@ function displayedDefaults () { profile.autosens_max = allDefaults.autosens_max; profile.autosens_min = allDefaults.autosens_min; profile.rewind_resets_autosens = allDefaults.rewind_resets_autosens; - profile.adv_target_adjustments = allDefaults.adv_target_adjustments; profile.exercise_mode = allDefaults.exercise_mode; profile.wide_bg_target_range = allDefaults.wide_bg_target_range; profile.sensitivity_raises_target = allDefaults.sensitivity_raises_target; diff --git a/tests/command-behavior.tests.sh b/tests/command-behavior.tests.sh index 16c620e38..277d509e3 100755 --- a/tests/command-behavior.tests.sh +++ b/tests/command-behavior.tests.sh @@ -474,7 +474,6 @@ EOT cat >profile.json <pumpprofile.json < Date: Thu, 13 Feb 2020 18:23:30 -0800 Subject: [PATCH 39/66] deprecate/remove support for max_bg and wide_bg_target_range (#1345) --- lib/profile/index.js | 4 ---- lib/profile/targets.js | 6 +----- tests/command-behavior.tests.sh | 6 ++---- tests/profile.test.js | 11 ----------- 4 files changed, 3 insertions(+), 24 deletions(-) diff --git a/lib/profile/index.js b/lib/profile/index.js index 9b6f5c322..dadb3897f 100644 --- a/lib/profile/index.js +++ b/lib/profile/index.js @@ -24,9 +24,6 @@ function defaults ( ) { // (If someone enters more carbs or stacks more; OpenAPS will just truncate dosing based on 120. // Essentially, this just limits AMA/SMB as a safety cap against excessive COB entry) , maxCOB: 120 - , wide_bg_target_range: false // by default use only the low end of the pump's BG target range as OpenAPS target - // by default the higher end of the target range is used only for avoiding bolus wizard overcorrections - // use wide_bg_target_range: true to force neutral temps over a wider range of eventualBGs , skip_neutral_temps: false // if true, don't set neutral temps , unsuspend_if_no_temp: false // if true, pump will un-suspend after a zero temp finishes , bolussnooze_dia_divisor: 2 // bolus snooze decays after 1/2 of DIA @@ -81,7 +78,6 @@ function displayedDefaults () { profile.autosens_min = allDefaults.autosens_min; profile.rewind_resets_autosens = allDefaults.rewind_resets_autosens; profile.exercise_mode = allDefaults.exercise_mode; - profile.wide_bg_target_range = allDefaults.wide_bg_target_range; profile.sensitivity_raises_target = allDefaults.sensitivity_raises_target; profile.unsuspend_if_no_temp = allDefaults.unsuspend_if_no_temp; profile.enableSMB_with_COB = allDefaults.enableSMB_with_COB; diff --git a/lib/profile/targets.js b/lib/profile/targets.js index fc91660ed..1eb171dd2 100644 --- a/lib/profile/targets.js +++ b/lib/profile/targets.js @@ -21,11 +21,7 @@ function lookup (inputs, profile) { } } - if (profile.wide_bg_target_range === true) { - console.error('Allowing wide eventualBG target range: ' + bgTargets.low + ' - ' + bgTargets.high ); - } else { - bgTargets.high = bgTargets.low; - } + bgTargets.high = bgTargets.low; var tempTargets = bgTargets; diff --git a/tests/command-behavior.tests.sh b/tests/command-behavior.tests.sh index 277d509e3..2741a64b0 100755 --- a/tests/command-behavior.tests.sh +++ b/tests/command-behavior.tests.sh @@ -569,8 +569,7 @@ EOT "sensitivity_raises_target": true, "suspend_zeros_iob": true, "unsuspend_if_no_temp": false, - "useCustomPeakTime": true, - "wide_bg_target_range": false + "useCustomPeakTime": true } EOT @@ -673,8 +672,7 @@ EOT "sensitivity_raises_target": true, "suspend_zeros_iob": true, "unsuspend_if_no_temp": false, - "useCustomPeakTime": true, - "wide_bg_target_range": false + "useCustomPeakTime": true } EOT diff --git a/tests/profile.test.js b/tests/profile.test.js index acdba8ae4..c0c572d64 100644 --- a/tests/profile.test.js +++ b/tests/profile.test.js @@ -43,17 +43,6 @@ describe('Profile', function ( ) { profile.carb_ratio.should.equal(20); }); - it('should should honour wide_bg_target_range', function () { - var profile = require('../lib/profile')(_.merge({}, baseInputs, {wide_bg_target_range: true})); - profile.max_iob.should.equal(0); - profile.dia.should.equal(3); - profile.sens.should.equal(100); - profile.current_basal.should.equal(1); - profile.max_bg.should.equal(100); - profile.min_bg.should.equal(100); - profile.carb_ratio.should.equal(20); - }); - var currentTime = new Date(); var creationDate = new Date(currentTime.getTime() - (5 * 60 * 1000)); From 38732f8ea4430466df30c6aae6ebc2db8d85799e Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Thu, 13 Feb 2020 19:22:16 -0800 Subject: [PATCH 40/66] target_bg preference (#1358) * deprecate/remove support for max_bg and wide_bg_target_range * support target_bg in preferences.json * populate target_bg from preferences.json to profile.json * allow target_bg to override profile schedule in simulator --- bin/oref0-simulator.sh | 6 +++++- lib/profile/index.js | 1 + lib/profile/targets.js | 4 ++++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/bin/oref0-simulator.sh b/bin/oref0-simulator.sh index 405dfffb9..a5aa54020 100755 --- a/bin/oref0-simulator.sh +++ b/bin/oref0-simulator.sh @@ -27,7 +27,11 @@ function init { function main { # look up the currently active bg_target based on the current clock.json - target=$((cat profile.json | jq -r '.bg_targets.targets[] | [.start, .min_bg] | @csv'; echo -n \"; cat clock.json | awk -F T '{print $2}') | sort | grep -B1 '\"$' | head -1 | awk -F , '{print $2}') + if grep target_bg profile.json; then + target=$(jq .target_bg profile.json) + else + target=$((cat profile.json | jq -r '.bg_targets.targets[] | [.start, .min_bg] | @csv'; echo -n \"; cat clock.json | awk -F T '{print $2}') | sort | grep -B1 '\"$' | head -1 | awk -F , '{print $2}') + fi if ! [ -z "$target" ]; then cat profile.json | jq ". | .min_bg=$target | .max_bg=$target" > profile.json.new echo setting target to $target diff --git a/lib/profile/index.js b/lib/profile/index.js index dadb3897f..df5b8e6c1 100644 --- a/lib/profile/index.js +++ b/lib/profile/index.js @@ -64,6 +64,7 @@ function defaults ( ) { // TODO: make maxRaw a preference here usable by oref0-raw in myopenaps-cgm-loop //, maxRaw: 200 // highest raw/noisy CGM value considered safe to use for looping , calc_glucose_noise: false + , target_bg: false // set to an integer value in mg/dL to override pump min_bg }; } diff --git a/lib/profile/targets.js b/lib/profile/targets.js index 1eb171dd2..741b83151 100644 --- a/lib/profile/targets.js +++ b/lib/profile/targets.js @@ -21,6 +21,10 @@ function lookup (inputs, profile) { } } + if (profile.target_bg) { + bgTargets.low = profile.target_bg; + } + bgTargets.high = bgTargets.low; var tempTargets = bgTargets; From 236b82e67b0e2f82bb4e57d4af54b150a54db85b Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Thu, 13 Feb 2020 19:22:49 -0800 Subject: [PATCH 41/66] Only install local hostspot if selected (#1323) * don't loop on CGM data changing less than 1 mg/dL/5m for 45m * typofix * fix merge conflict error * update tests to not be too flat * continue to low-temp normally on too-flat readings <= 60 mg/dL * only install/setup hostapd/dnsmasq if desired (to avoid breaking wifi) * typofix * treat hotspot_option as a string that must be "true" * set up xdrip-js with DEXCOM_CGM_TX_ID * whitespace --- bin/oref0-setup.sh | 69 ++++++++++++++++++++++++++++------------------ 1 file changed, 42 insertions(+), 27 deletions(-) diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index 71d9d4921..f3823a178 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -114,6 +114,9 @@ case $i in ;; -npm=*|--npm_install=*) npm_option="${i#*=}" + ;; + --hotspot=*) + hotspot_option="${i#*=}" shift ;; *) @@ -471,6 +474,11 @@ if [[ -z "$DIR" || -z "$serial" ]]; then echo fi + if prompt_yn "Do you want to be able to set up a local-only wifi hotspot for offline monitoring?" N; then + HOTSPOT=true + else + HOTSPOT=false + fi if [[ ! -z $BT_PEB ]]; then prompt_and_validate BT_PEB "For Pancreabble enter Pebble mac id (i.e. AA:BB:CC:DD:EE:FF) hit enter to skip" validate_bt_peb @@ -626,6 +634,9 @@ fi if [[ ! -z "$radiotags" ]]; then echo -n " --radiotags='$radiotags'" | tee -a $OREF0_RUNAGAIN fi +if [[ ! -z "$hotspot_option" ]]; then + echo -n " --hotspot='$hotspot_option'" | tee -a $OREF0_RUNAGAIN +fi echo; echo | tee -a $OREF0_RUNAGAIN chmod 755 $OREF0_RUNAGAIN @@ -913,33 +924,37 @@ if prompt_yn "" N; then else echo bluez version ${bluetoothdversion} already installed fi - echo Installing prerequisites and configs for local-only hotspot - apt-get install -y hostapd dnsmasq || die "Couldn't install hostapd dnsmasq" - test ! -f /etc/dnsmasq.conf.bak && mv /etc/dnsmasq.conf /etc/dnsmasq.conf.bak - cp $HOME/src/oref0/headless/dnsmasq.conf /etc/dnsmasq.conf || die "Couldn't copy dnsmasq.conf" - test ! -f /etc/hostapd/hostapd.conf.bak && mv /etc/hostapd/hostapd.conf /etc/hostapd/hostapd.conf.bak - cp $HOME/src/oref0/headless/hostapd.conf /etc/hostapd/hostapd.conf || die "Couldn't copy hostapd.conf" - sed -i.bak -e "s|DAEMON_CONF=$|DAEMON_CONF=/etc/hostapd/hostapd.conf|g" /etc/init.d/hostapd - cp $HOME/src/oref0/headless/interfaces.ap /etc/network/ || die "Couldn't copy interfaces.ap" - cp /etc/network/interfaces /etc/network/interfaces.client || die "Couldn't copy interfaces.client" - if [ ! -z "$BT_MAC" ]; then - printf 'Checking for the bnep0 interface in the interfaces.client file and adding if missing...' - # Make sure the bnep0 interface is in the /etc/networking/interface - (grep -qa bnep0 /etc/network/interfaces.client && printf 'skipped.\n') || (printf '\n%s\n\n' "iface bnep0 inet dhcp" >> /etc/network/interfaces.client && printf 'added.\n') - fi - #Stop automatic startup of hostapd & dnsmasq - update-rc.d -f hostapd remove - update-rc.d -f dnsmasq remove - # Edit /etc/hostapd/hostapd.conf for wifi using Hostname - sed -i.bak -e "s/ssid=OpenAPS/ssid=${HOSTNAME}/" /etc/hostapd/hostapd.conf - # Add Commands to /etc/rc.local - # Interrupt Kernel Messages - if ! grep -q 'sudo dmesg -n 1' /etc/rc.local; then - sed -i.bak -e '$ i sudo dmesg -n 1' /etc/rc.local - fi - # Add to /etc/rc.local to check if in hotspot mode and turn back to client mode during bootup - if ! grep -q 'cp /etc/network/interfaces.client /etc/network/interfaces' /etc/rc.local; then - sed -i.bak -e "$ i if [ -f /etc/network/interfaces.client ]; then\n\tif grep -q '#wpa-' /etc/network/interfaces; then\n\t\tsudo ifdown wlan0\n\t\tsudo cp /etc/network/interfaces.client /etc/network/interfaces\n\t\tsudo ifup wlan0\n\tfi\nfi" /etc/rc.local || die "Couldn't modify /etc/rc.local" + if [[ ${hotspot_option,,} =~ "true" ]]; then + echo Installing prerequisites and configs for local-only hotspot + apt-get install -y hostapd dnsmasq || die "Couldn't install hostapd dnsmasq" + test ! -f /etc/dnsmasq.conf.bak && mv /etc/dnsmasq.conf /etc/dnsmasq.conf.bak + cp $HOME/src/oref0/headless/dnsmasq.conf /etc/dnsmasq.conf || die "Couldn't copy dnsmasq.conf" + test ! -f /etc/hostapd/hostapd.conf.bak && mv /etc/hostapd/hostapd.conf /etc/hostapd/hostapd.conf.bak + cp $HOME/src/oref0/headless/hostapd.conf /etc/hostapd/hostapd.conf || die "Couldn't copy hostapd.conf" + sed -i.bak -e "s|DAEMON_CONF=$|DAEMON_CONF=/etc/hostapd/hostapd.conf|g" /etc/init.d/hostapd + cp $HOME/src/oref0/headless/interfaces.ap /etc/network/ || die "Couldn't copy interfaces.ap" + cp /etc/network/interfaces /etc/network/interfaces.client || die "Couldn't copy interfaces.client" + if [ ! -z "$BT_MAC" ]; then + printf 'Checking for the bnep0 interface in the interfaces.client file and adding if missing...' + # Make sure the bnep0 interface is in the /etc/networking/interface + (grep -qa bnep0 /etc/network/interfaces.client && printf 'skipped.\n') || (printf '\n%s\n\n' "iface bnep0 inet dhcp" >> /etc/network/interfaces.client && printf 'added.\n') + fi + #Stop automatic startup of hostapd & dnsmasq + update-rc.d -f hostapd remove + update-rc.d -f dnsmasq remove + # Edit /etc/hostapd/hostapd.conf for wifi using Hostname + sed -i.bak -e "s/ssid=OpenAPS/ssid=${HOSTNAME}/" /etc/hostapd/hostapd.conf + # Add Commands to /etc/rc.local + # Interrupt Kernel Messages + if ! grep -q 'sudo dmesg -n 1' /etc/rc.local; then + sed -i.bak -e '$ i sudo dmesg -n 1' /etc/rc.local + fi + # Add to /etc/rc.local to check if in hotspot mode and turn back to client mode during bootup + if ! grep -q 'cp /etc/network/interfaces.client /etc/network/interfaces' /etc/rc.local; then + sed -i.bak -e "$ i if [ -f /etc/network/interfaces.client ]; then\n\tif grep -q '#wpa-' /etc/network/interfaces; then\n\t\tsudo ifdown wlan0\n\t\tsudo cp /etc/network/interfaces.client /etc/network/interfaces\n\t\tsudo ifup wlan0\n\tfi\nfi" /etc/rc.local || die "Couldn't modify /etc/rc.local" + fi + else + echo Skipping local-only hotspot fi fi From c37b8c4ac1a51ce1a806b98bc8e407af8e001022 Mon Sep 17 00:00:00 2001 From: Jon Cluck Date: Tue, 25 Feb 2020 16:36:53 -0500 Subject: [PATCH 42/66] Installer fixes (#1366) * Bugfixes for 0.7.1 - Rework nodejs/npm install - Install openaps toolkit from github - Ask user to upgrade their Debian Jessie install... * Update openaps-install.sh * Update openaps-install.sh * Store MEDTRONIC_PUMP_ID and MEDTRONIC_FREQUENCY This stores MEDTRONIC_PUMP_ID and MEDTRONIC_FREQUENCY in the user's ~/.bash_profile, so they can run commands without having to manually set them at login time. --- bin/openaps-install.sh | 2 ++ bin/openaps-packages.sh | 31 ++++++++++++++++++------------- bin/oref0-setup.sh | 8 +++++++- 3 files changed, 27 insertions(+), 14 deletions(-) diff --git a/bin/openaps-install.sh b/bin/openaps-install.sh index 9d1e5a63a..dfad4db51 100755 --- a/bin/openaps-install.sh +++ b/bin/openaps-install.sh @@ -42,6 +42,8 @@ if cat /etc/os-release | grep 'PRETTY_NAME="Debian GNU/Linux 8 (jessie)"' &> /de echo "Acquire::Check-Valid-Until false;" | tee -a /etc/apt/apt.conf.d/10-nocheckvalid # Replace apt sources.list with archive.debian.org locations echo -e "deb http://security.debian.org/ jessie/updates main\n#deb-src http://security.debian.org/ jessie/updates main\n\ndeb http://archive.debian.org/debian/ jessie-backports main\n#deb-src http://archive.debian.org/debian/ jessie-backports main\n\ndeb http://archive.debian.org/debian/ jessie main contrib non-free\n#deb-src http://archive.debian.org/debian/ jessie main contrib non-free" > /etc/apt/sources.list + echo "Please consider upgrading your rig to Jubilinux 0.3.0 (Debian Stretch)!" + echo "Jubilinux 0.2.0, based on Debian Jessie, is no longer receiving security or software updates!" fi #Workaround for Jubilinux to install nodejs/npm from nodesource diff --git a/bin/openaps-packages.sh b/bin/openaps-packages.sh index 47046e955..401b783e7 100755 --- a/bin/openaps-packages.sh +++ b/bin/openaps-packages.sh @@ -12,30 +12,35 @@ apt-get install -y sudo sudo apt-get update && sudo apt-get -y upgrade sudo apt-get install -y git python python-dev software-properties-common python-numpy python-pip watchdog strace tcpdump screen acpid vim locate lm-sensors || die "Couldn't install packages" -# We require jq >= 1.5 for --slurpfile for merging preferences. Debian Jessie ships with 1.4 +# We require jq >= 1.5 for --slurpfile for merging preferences. Debian Jessie ships with 1.4. if cat /etc/os-release | grep 'PRETTY_NAME="Debian GNU/Linux 8 (jessie)"' &> /dev/null; then + echo "Please consider upgrading your rig to Jubilinux 0.3.0 (Debian Stretch)!" sudo apt-get -y -t jessie-backports install jq || die "Couldn't install jq from jessie-backports" else + # Debian Stretch & Buster ship with jq >= 1.5, so install from apt sudo apt-get -y install jq || die "Couldn't install jq" fi -# install/upgrade to latest node 8 if neither node 8 nor node 10+ LTS are installed +# Install/upgrade to latest version of node (v10) using apt if neither node 8 nor node 10+ LTS are installed if ! nodejs --version | grep -e 'v8\.' -e 'v1[02468]\.' &> /dev/null ; then - # nodesource doesn't support armv6 - if ! arch | grep -e 'armv6' &> /dev/null ; then - sudo bash -c "curl -sL https://deb.nodesource.com/setup_8.x | bash -" || die "Couldn't setup node 8" - sudo apt-get install -y nodejs=8.* || die "Couldn't install nodejs" - else - sudo apt-get install -y nodejs npm || die "Couldn't install nodejs and npm" - npm install npm@latest -g || die "Couldn't update npm" - fi - ## You may also need development tools to build native addons: - ##sudo apt-get install gcc g++ make + if getent passwd edison; then + # Only on the Edison, use nodesource setup script to add nodesource repository to sources.list.d, then install nodejs (npm is a part of the package) + curl -sL https://deb.nodesource.com/setup_8.x | bash - + sudo apt-get install -y nodejs=8.* || die "Couldn't install nodejs" + else + sudo apt-get install -y nodejs npm || die "Couldn't install nodejs and npm" + fi + + # Upgrade npm to the latest version using its self-updater + sudo npm install npm@latest -g || die "Couldn't update npm" + + ## You may also need development tools to build native addons: + ## sudo apt-get install gcc g++ make fi # upgrade setuptools to avoid "'install_requires' must be a string" error sudo pip install setuptools -U # no need to die if this fails -sudo pip install -U openaps || die "Couldn't install openaps toolkit" +sudo pip install -U --default-timeout=1000 git+https://github.com/openaps/openaps.git || die "Couldn't install openaps toolkit" sudo pip install -U openaps-contrib || die "Couldn't install openaps-contrib" sudo openaps-install-udev-rules || die "Couldn't run openaps-install-udev-rules" sudo activate-global-python-argcomplete || die "Couldn't run activate-global-python-argcomplete" diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index f3823a178..b609ba61b 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -1089,12 +1089,16 @@ if prompt_yn "" N; then if [[ -f $HOME/.profile ]]; then sed --in-place '/.*API_SECRET.*/d' $HOME/.profile sed --in-place '/.*NIGHTSCOUT_HOST.*/d' $HOME/.profile + sed --in-place '/.*MEDTRONIC_PUMP_ID.*/d' $HOME/.profile + sed --in-place '/.*MEDTRONIC_FREQUENCY.*/d' $HOME/.profile fi # Delete old copies of variables before replacing them sed --in-place '/.*NIGHTSCOUT_HOST.*/d' $HOME/.bash_profile sed --in-place '/.*API_SECRET.*/d' $HOME/.bash_profile sed --in-place '/.*DEXCOM_CGM_RECV_ID*/d' $HOME/.bash_profile + sed --in-place '/.*MEDTRONIC_PUMP_ID.*/d' $HOME/.bash_profile + sed --in-place '/.*MEDTRONIC_FREQUENCY.*/d' $HOME/.bash_profile #sed --in-place '/.*DEXCOM_CGM_TX_ID*/d' $HOME/.bash_profile # Then append the variables @@ -1104,9 +1108,11 @@ if prompt_yn "" N; then echo "export API_SECRET" >> $HOME/.bash_profile echo DEXCOM_CGM_RECV_ID="$BLE_SERIAL" >> $HOME/.bash_profile echo "export DEXCOM_CGM_RECV_ID" >> $HOME/.bash_profile + echo MEDTRONIC_PUMP_ID="$serial" >> $HOME/.bash_profile + echo MEDTRONIC_FREQUENCY='`cat $HOME/myopenaps/monitor/medtronic_frequency.ini`' >> $HOME/.bash_profile + #echo DEXCOM_CGM_TX_ID="$DEXCOM_CGM_TX_ID" >> $HOME/.bash_profile #echo "export DEXCOM_CGM_TX_ID" >> $HOME/.bash_profile - echo #Turn on i2c, install pi-buttons, and openaps-menu for hardware that has a screen and buttons (so far, only Explorer HAT and Radiofruit Bonnet) if grep -qa "Explorer HAT" /proc/device-tree/hat/product &> /dev/null || [[ "$hardwaretype" =~ "explorer-hat" ]] || [[ "$hardwaretype" =~ "radiofruit" ]]; then From 504a478c42d0183540a097546f927ef05386b2c9 Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Tue, 17 Mar 2020 14:58:49 -0700 Subject: [PATCH 43/66] shared-node (#1361) * Upload NS status when possible. Before this checkin, ns-loop created the status data, and uploaded it to nightscout (if possible). But in case of failure, the file will get overwritten. This means that in the case of no internet, OAPS decisions get lost. This checkin fixes it by adding timestamps to the files. When internet is available, the files will be uploaded according to the correct order. Files older than a day will be deleted. Testing: I have prevented uploading of files for a few hours, and when it was enabled again, data was shown correctly in nightscout site. Decreased timeout, and saw the files being deleted. Signed-off-by: Tzachi Dar * In the case that a status file does not contain iob delete it instead of uploading it. Signed-off-by: Tzachi Dar * Add code to run js code on a shared server. This saves initilaztion time. Worked for a day, which means testing only started. * Fix a typo. * Code to start shared node. * Add code that verifies that shared node is answering to requests and starting it if needed. * Add code to store program input in order to test it. * fix dashes to be underscores * Add json file to shared code. * Add code to capture data of json commands. * New json funciontality, and changes from tests. * Trival changes to white spaces handeling. * Minor refactoring to the code. Signed-off-by: Tzachi Dar * install socat on all platforms for shared-node * remove non-json debug output breaking json parsing * Merge branch 'tzachi-share-node-ns-status' of https://github.com/tzachi-dar/oref0 into tzachi-dar-tzachi-share-node-ns-status Conflicts: bin/oref0-ns-loop.sh * install socat on all platforms for shared-node * remove non-json debug output breaking json parsing * Remove temp files from the shared node. Signed-off-by: Tzachi Dar * Stop copying data to test_data. Signed-off-by: Tzachi Dar * Ad an update script and call it every 15 minutes. Co-authored-by: Tzachi Dar --- bin/mm-format-ns-treatments.sh | 3 +- bin/ns-status.js | 57 ++++++-- bin/oref0-bash-common-functions.sh | 39 +++++ bin/oref0-cron-every-15min.sh | 1 + bin/oref0-cron-every-minute.sh | 5 + bin/oref0-get-ns-entries.js | 3 +- bin/oref0-normalize-temps.js | 26 +++- bin/oref0-ns-loop.sh | 10 +- bin/oref0-setup.sh | 5 +- bin/oref0-shared-node-loop.sh | 21 +++ bin/oref0-shared-node.js | 226 +++++++++++++++++++++++++++++ bin/oref0-upgrade.sh | 20 +++ lib/require-utils.js | 56 ++++++- package.json | 1 + 14 files changed, 439 insertions(+), 34 deletions(-) create mode 100755 bin/oref0-shared-node-loop.sh create mode 100644 bin/oref0-shared-node.js create mode 100755 bin/oref0-upgrade.sh diff --git a/bin/mm-format-ns-treatments.sh b/bin/mm-format-ns-treatments.sh index 909474a61..8fac046c5 100755 --- a/bin/mm-format-ns-treatments.sh +++ b/bin/mm-format-ns-treatments.sh @@ -18,7 +18,8 @@ EOT # | json -e "this.type = 'mm://openaps/$self'" \ model=$(jq -r . $MODEL) -oref0-normalize-temps $HISTORY \ + +run_remote_command "oref0-normalize-temps $HISTORY" \ | jq '[ .[] | .medtronic = ( [ "mm://openaps/'$self'/", ( . | if ._type then ._type else .eventType end ) ] | join("") ) | .created_at = if .created_at then .created_at else .timestamp end diff --git a/bin/ns-status.js b/bin/ns-status.js index 2b063cf7c..6e67ace28 100755 --- a/bin/ns-status.js +++ b/bin/ns-status.js @@ -2,10 +2,11 @@ 'use strict'; var os = require("os"); +var fs = require('fs'); var requireUtils = require('../lib/require-utils'); -var safeRequire = requireUtils.safeRequire; var requireWithTimestamp = requireUtils.requireWithTimestamp; +var safeLoadFile = requireUtils.safeLoadFile; /* Prepare Status info to for upload to Nightscout @@ -23,7 +24,7 @@ var requireWithTimestamp = requireUtils.requireWithTimestamp; */ -function mmtuneStatus (status) { +function mmtuneStatus (status, cwd, mmtune_input) { var mmtune = requireWithTimestamp(cwd + mmtune_input); if (mmtune) { if (mmtune.scanDetails && mmtune.scanDetails.length) { @@ -35,7 +36,7 @@ function mmtuneStatus (status) { } } -function preferencesStatus (status) { +function preferencesStatus (status, cwd ,preferences_input) { var preferences = requireWithTimestamp(cwd + preferences_input); if (preferences) { status.preferences = preferences; @@ -47,8 +48,8 @@ function preferencesStatus (status) { } } -function uploaderStatus (status) { - var uploader = require(cwd + uploader_input); +function uploaderStatus (status, cwd, uploader_input) { + var uploader = JSON.parse(fs.readFileSync(cwd + uploader_input, 'utf8')); if (uploader) { if (typeof uploader === 'number') { status.uploader = { @@ -60,9 +61,12 @@ function uploaderStatus (status) { } } -if (!module.parent) { - var argv = require('yargs') + + +var ns_status = function ns_status(argv_params) { + + var argv = require('yargs')(argv_params) .usage("$0 [--uploader uploader.json] [mmtune.json] [--preferences preferences.json]") .option('preferences', { alias: 'p', @@ -77,10 +81,16 @@ if (!module.parent) { default: false }) .strict(true) + .fail(function (msg, err, yargs) { + if (err) { + return console.error('Error found', err); + } + return console.error('Parsing of command arguments failed', msg) + }) .help('help'); - var params = argv.argv; var inputs = params._; + var clock_input = inputs[0]; var iob_input = inputs[1]; var suggested_input = inputs[2]; @@ -94,9 +104,11 @@ if (!module.parent) { if (inputs.length < 7 || inputs.length > 8) { argv.showHelp(); - process.exit(1); + return; } + // TODO: For some reason the following line does not work (../package.json ia not found). + //var pjson = JSON.parse(fs.readFileSync('../package.json', 'utf8')); var pjson = require('../package.json'); var cwd = process.cwd() + '/'; @@ -138,28 +150,41 @@ if (!module.parent) { version: pjson.version }, pump: { - clock: safeRequire(cwd + clock_input), - battery: safeRequire(cwd + battery_input), - reservoir: safeRequire(cwd + reservoir_input), + clock: safeLoadFile(cwd + clock_input), + battery: safeLoadFile(cwd + battery_input), + reservoir: safeLoadFile(cwd + reservoir_input), status: requireWithTimestamp(cwd + status_input) }, created_at: new Date() }; if (mmtune_input) { - mmtuneStatus(status); + mmtuneStatus(status, cwd, mmtune_input); } if (preferences_input) { - preferencesStatus(status); + preferencesStatus(status, cwd ,preferences_input); } if (uploader_input) { - uploaderStatus(status); + uploaderStatus(status, cwd, uploader_input); } - console.log(JSON.stringify(status)); + return JSON.stringify(status); } catch (e) { return console.error("Could not parse input data: ", e); } } + +if (!module.parent) { + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + var result = ns_status(command); + if(result !== undefined) { + console.log(result); + } +} + +exports = module.exports = ns_status diff --git a/bin/oref0-bash-common-functions.sh b/bin/oref0-bash-common-functions.sh index b68ae2b4f..f4262d43f 100755 --- a/bin/oref0-bash-common-functions.sh +++ b/bin/oref0-bash-common-functions.sh @@ -8,6 +8,45 @@ self=$(basename $0) PREFERENCES_FILE="preferences.json" +function run_remote_command () { + set -o pipefail + out_file=$( mktemp /tmp/shared_node.XXXXXXXXXXXX) + #echo $out_file + echo -n $1 |socat -t90 - UNIX-CONNECT:/tmp/oaps_shared_node > $out_file || return 1 + #cat $out_file + jq -j .err $out_file >&2 + jq -j .stdout $out_file + return_val=$( jq -r .return_val $out_file) + rm $out_file + return $(( return_val )) +} + +function start_share_node_if_needed() { + # First check if node is alive + output="$(echo ping |socat -t90 - UNIX-CONNECT:/tmp/oaps_shared_node)" + echo $output + if [ "$output" = '{"err":"","stdout":"pong","return_val":0}' ]; then + echo shared node is alive + return 0 + fi + echo 'killing node so it will restart later' + node_pid="$(ps -ef | grep node | grep oref0-shared-node.js | grep -v grep | awk '{print $2 }')" + echo $node_pid + kill -9 $node_pid + # Node should start automaticly by oref0-shared-node-loop + # Waiting 90 seconds for it to start + for i in {1..90} + do + sleep 1 + output="$(echo ping |socat -t90 - UNIX-CONNECT:/tmp/oaps_shared_node)" + echo $output + if [ "$output" = '{"err":"","stdout":"pong","return_val":0}' ]; then + echo shared node is alive + return 0 + fi + done + die Waiting for shared node failed +} function overtemp { # check for CPU temperature above 85°C diff --git a/bin/oref0-cron-every-15min.sh b/bin/oref0-cron-every-15min.sh index ea79a3405..00a56e113 100755 --- a/bin/oref0-cron-every-15min.sh +++ b/bin/oref0-cron-every-15min.sh @@ -28,3 +28,4 @@ fi ) & oref0-version --check-for-updates > /tmp/oref0-updates.txt & +/root/src/oref0/bin/oref0-upgrade.sh diff --git a/bin/oref0-cron-every-minute.sh b/bin/oref0-cron-every-minute.sh index 28f2916f6..2874b2be4 100755 --- a/bin/oref0-cron-every-minute.sh +++ b/bin/oref0-cron-every-minute.sh @@ -112,6 +112,10 @@ if ! is_bash_process_running_named oref0-pump-loop; then oref0-pump-loop 2>&1 | tee -a /var/log/openaps/pump-loop.log | adddate openaps.pump-loop | uncolor |tee -a /var/log/openaps/openaps-date.log & fi +if ! is_bash_process_running_named oref0-shared-node-loop; then + oref0-shared-node-loop 2>&1 | tee -a /var/log/openaps/shared-node.log | adddate openaps.shared-node | uncolor |tee -a /var/log/openaps/openaps-date.log & +fi + if [[ ! -z "$BT_PEB" ]]; then if ! is_process_running_named "peb-urchin-status $BT_PEB"; then peb-urchin-status $BT_PEB 2>&1 | tee -a /var/log/openaps/urchin-loop.log | adddate openaps.urchin-loop | uncolor |tee -a /var/log/openaps/openaps-date.log & @@ -138,6 +142,7 @@ done | while read file; do # attempt a logrotate logrotate /etc/logrotate.conf -f done +start_share_node_if_needed # check if 5 minutes have passed, and if yes, turn of the screen to save power ttyport="$(get_pref_string .ttyport)" diff --git a/bin/oref0-get-ns-entries.js b/bin/oref0-get-ns-entries.js index 9bacfefe2..b0dcd36d9 100755 --- a/bin/oref0-get-ns-entries.js +++ b/bin/oref0-get-ns-entries.js @@ -49,7 +49,6 @@ if (!module.parent) { } var nsurl = params._.slice(1, 2).pop(); - if (nsurl && nsurl.charAt(nsurl.length - 1) == "/") nsurl = nsurl.substr(0, nsurl.length - 1); // remove trailing slash if it exists var apisecret = params._.slice(2, 3).pop(); var hours = Number(params._.slice(3, 4).pop()); @@ -63,6 +62,8 @@ if (!module.parent) { usage(); process.exit(1); } + // remove trailing slash if it exists + if (nsurl && nsurl.charAt(nsurl.length - 1) == "/") nsurl = nsurl.substr(0, nsurl.length - 1); if (apisecret != null && !apisecret.startsWith("token=") && apisecret.length != 40) { var shasum = crypto.createHash('sha1'); diff --git a/bin/oref0-normalize-temps.js b/bin/oref0-normalize-temps.js index 3320713b3..91fa49a52 100755 --- a/bin/oref0-normalize-temps.js +++ b/bin/oref0-normalize-temps.js @@ -17,9 +17,12 @@ var find_insulin = require('../lib/temps'); var find_bolus = require('../lib/bolus'); var describe_pump = require('../lib/pump'); +var fs = require('fs'); -if (!module.parent) { - var argv = require('yargs') + + +var oref0_normalize_temps = function oref0_normalize_temps(argv_params) { + var argv = require('yargs')(argv_params) .usage('$0 ') .demand(1) // error and show help if some other args given @@ -31,13 +34,12 @@ if (!module.parent) { if (params._.length > 1) { argv.showHelp(); - console.error('Too many arguments'); - process.exit(1); + return console.error('Too many arguments'); } var cwd = process.cwd() try { - var all_data = require(cwd + '/' + iob_input); + var all_data = JSON.parse(fs.readFileSync(cwd + '/' + iob_input)); } catch (e) { return console.error("Could not parse pumphistory: ", e); } @@ -50,6 +52,18 @@ if (!module.parent) { // treatments.sort(function (a, b) { return a.date > b.date }); - console.log(JSON.stringify(treatments)); + return JSON.stringify(treatments); +} + +if (!module.parent) { + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + var result = oref0_normalize_temps(command) + if(result !== undefined) { + console.log(result); + } } +exports = module.exports = oref0_normalize_temps diff --git a/bin/oref0-ns-loop.sh b/bin/oref0-ns-loop.sh index 3e5e45166..216bc2048 100755 --- a/bin/oref0-ns-loop.sh +++ b/bin/oref0-ns-loop.sh @@ -92,8 +92,7 @@ function glucose_fresh { } function find_valid_ns_glucose { - # TODO: use jq for this if possible - cat cgm/ns-glucose.json | json -c "minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38" + run_remote_command 'json -f cgm/ns-glucose.json -c "minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38"' } function ns_temptargets { @@ -202,9 +201,9 @@ function upload_ns_status { # first parameter - ns_status file name function format_ns_status { if [ -s monitor/edison-battery.json ]; then - ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json --uploader monitor/edison-battery.json > upload/$1 + run_remote_command 'ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json --uploader monitor/edison-battery.json' > upload/$1 else - ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json > upload/$1 + run_remote_command 'ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json' > upload/$1 fi } @@ -212,7 +211,8 @@ function format_ns_status { function upload_recent_treatments { #echo Uploading treatments format_latest_nightscout_treatments || die "Couldn't format latest NS treatments" - if test $(json -f upload/latest-treatments.json -a created_at eventType | wc -l ) -gt 0; then + + if test $(jq -r '.[] |.created_at + " " + .eventType' upload/latest-treatments.json | wc -l ) -gt 0; then ns-upload $NIGHTSCOUT_HOST $API_SECRET treatments.json upload/latest-treatments.json | colorize_json || die "Couldn't upload latest treatments to NS" else echo "No new treatments to upload" diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index b609ba61b..d2298967e 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -1062,6 +1062,9 @@ if prompt_yn "" N; then #Moved this out of the conditional, so that x12 models will work with smb loops sudo apt-get -y install bc ntpdate bash-completion || die "Couldn't install bc etc." + # now required on all platforms for shared-node + echo "Installing socat and ntp..." + apt-get install -y socat ntp cd $directory || die "Can't cd $directory" do_openaps_import $HOME/src/oref0/lib/oref0-setup/supermicrobolus.json @@ -1125,8 +1128,6 @@ if prompt_yn "" N; then sed -i.bak -e "s/#dtparam=i2c_arm=on/dtparam=i2c_arm=on/" /boot/config.txt egrep "^dtparam=i2c1=on" /boot/config.txt || echo "dtparam=i2c1=on,i2c1_baudrate=400000" >> /boot/config.txt echo "i2c-dev" > /etc/modules-load.d/i2c.conf - echo "Installing socat and ntp..." - apt-get install -y socat ntp echo "Installing pi-buttons..." systemctl stop pi-buttons cd $HOME/src && git clone git://github.com/bnielsen1965/pi-buttons.git diff --git a/bin/oref0-shared-node-loop.sh b/bin/oref0-shared-node-loop.sh new file mode 100755 index 000000000..66f60513f --- /dev/null +++ b/bin/oref0-shared-node-loop.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1) + +# Shared node loop. +main() { + echo + echo Starting Shared-Node-loop at $(date): + while true; do + + node ../src/oref0/bin/oref0-shared-node.js + echo Tough luck, shared node crashed. Starting it againg at $(date) + done +} + +usage "$@" < { + console.log('now listening'); + }); + + unixSocketServer.on('end', function() { + console.log("server 2 disconnected from port"); + }); + + unixSocketServer.on('connection', (s) => { + console.log('got connection!'); + s.allowHalfOpen = true; + s.on('end', function() { + console.log("server 2 disconnected from port"); + }); + + s.on('error', function(err) { + console.log("there was an error in the client and the error is: " + err.code); + }); + + s.on("data", function(data) { + //... do stuff with the data ... + console.log('read data', data.toString()); + var command = data.toString().split(' '); + + // Split by space except for inside quotes + // (https://stackoverflow.com/questions/16261635/javascript-split-string-by-space-but-ignore-space-in-quotes-notice-not-to-spli) + var command = data.toString().match(/\\?.|^$/g).reduce((p, c) => { + if (c === '"') { + p.quote ^= 1; + } else if (!p.quote && c === ' ') { + p.a.push(''); + } else { + p.a[p.a.length - 1] += c.replace(/\\(.)/, "$1"); + } + return p; + }, { + a: [''] + }).a; + + command = command.map(s => s.trim()); + + var result = 'unknown command'; + var return_val = 0; + + console.log('command = ', command); + + if (command[0] == 'ns-status') { + // remove the first parameter. + command.shift(); + try { + result = ns_status(command); + result = addNewlToResult(result); + } catch (err) { + return_val = 1; + console.log('exception when parsing ns_status ', err); + } + } else if (command[0] == 'oref0-normalize-temps') { + command.shift(); + try { + result = oref0_normalize_temps(command); + result = addNewlToResult(result); + } catch (err) { + return_val = 1; + console.log('exception when parsing oref0-normalize-temps ', err); + } + } else if (command[0] == 'ping') { + result = 'pong'; + } else if (command[0] == 'json') { + // remove the first parameter. + command.shift(); + try { + [result, return_val] = jsonWrapper(command); + result = addNewlToResult(result); + } catch (err) { + return_val = 1; + console.log('exception when running json_wrarpper ', err); + } + } else { + console.error('Unknown command = ', command); + return_val = 1; + } + s.write(JSON.stringify(createRetVal(result, return_val))); + s.end(); + }); + }); +} + +/** + * Return a function for the given JS code that returns. + * + * If no 'return' in the given javascript snippet, then assume we are a single + * statement and wrap in 'return (...)'. This is for convenience for short + * '-c ...' snippets. + */ +function funcWithReturnFromSnippet(js) { + // auto-"return" + if (js.indexOf('return') === -1) { + if (js.substring(js.length - 1) === ';') { + js = js.substring(0, js.length - 1); + } + js = 'return (' + js + ')'; + } + return (new Function(js)); +} + + +function addNewlToResult(result) { + if (result === undefined) { + // This preserves the oref0_normalize_temps behavior. + result = "" + } else { + result += "\n"; + } + return result; +} + +// The goal is to run something like: +// json -f monitor/status.1.json -c "minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38" +function jsonWrapper(argv_params) { + var argv = require('yargs')(argv_params) + .usage('$0 json -f monitor/status.1.json -c \"minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38\"') + .option('input_file', { + alias: 'f', + nargs: 1, + describe: "Input/Output file", + default: false + }) + .option('filtering_code', { + alias: 'c', + nargs: 1, + describe: "Conditional filtering", + default: false + }) + .strict(true) + .fail(function(msg, err, yargs) { + if (err) { + return [console.error('Error found', err), 1]; + } + return [console.error('Parsing of command arguments failed', msg), 1]; + }) + .help('help'); + var params = argv.argv; + var inputs = params._; + if (inputs.length > 0) { + return [console.error('Error: too many input parameters.'), 1]; + } + if (!params.input_file) { + return [console.error('Error: No input file.'), 1]; + } + if (!params.filtering_code) { + return [console.error('Error: No filtering_code'), 1]; + } + + var data = requireUtils.safeLoadFile(params.input_file); + if (!data) { + // file is empty. For this files json returns nothing + console.error('Error: No data loaded') + return ["", 1]; + } + if (!Array.isArray(data)) { + // file is not an array of json, we do not handle this. + console.error('Error: data is not an array.') + return ["", 1]; + } + + var condFuncs = funcWithReturnFromSnippet(params.filtering_code); + var filtered = []; + for (var i = 0; i < data.length; i++) { + if (condFuncs.call(data[i])) { + filtered.push(data[i]); + } + } + return [JSON.stringify(filtered, null, 2), 0]; +} + + +if (!module.parent) { + serverListen(); +} + +// Functions needed to simulate a stack node. +const util = require('util'); +const vm = require('vm'); + +function sleepFor(sleepDuration) { + var now = new Date().getTime(); + while (new Date().getTime() < now + sleepDuration) { + /* do nothing */ } +} diff --git a/bin/oref0-upgrade.sh b/bin/oref0-upgrade.sh new file mode 100755 index 000000000..7134951f5 --- /dev/null +++ b/bin/oref0-upgrade.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1) + +usage "$@" < Date: Wed, 13 May 2020 19:03:40 +0300 Subject: [PATCH 44/66] Fix pancreabble config paths (#1372) --- lib/oref0-setup/pancreabble.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/oref0-setup/pancreabble.json b/lib/oref0-setup/pancreabble.json index f2828998d..2803d98b6 100644 --- a/lib/oref0-setup/pancreabble.json +++ b/lib/oref0-setup/pancreabble.json @@ -24,10 +24,10 @@ "upload/urchin-data.json": { "use": "format_urchin_data", "reporter": "JSON", - "cgm_clock": "monitor/clock.json", + "cgm_clock": "monitor/clock-zoned.json", "action": "add", "device": "pbbl", - "glucose_history": "monitor/glucose-unzoned.json", + "glucose_history": "monitor/glucose.json", "status_text": "", "status_json": "upload/urchin-status.json" } From 5313363b5d564c274ba928b1ec7d32b0b64343b9 Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Tue, 23 Jun 2020 10:17:46 -0700 Subject: [PATCH 45/66] calculate effective temp basal rate for Omnipod Loop (#1364) --- lib/iob/history.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/iob/history.js b/lib/iob/history.js index 860b5f83e..1a18e27df 100644 --- a/lib/iob/history.js +++ b/lib/iob/history.js @@ -326,6 +326,11 @@ function calcTempTreatments (inputs, zeroTempDuration) { var temp = {}; temp.rate = current.rate; temp.duration = current.duration; + // Loop reports the amount of insulin actually delivered while the temp basal was running + // use that to calculate the effective temp basal rate + if (typeof current.amount !== 'undefined') { + temp.rate = current.amount / current.duration * 60; + } temp.timestamp = current.timestamp; temp.started_at = new Date(tz(temp.timestamp)); temp.date = temp.started_at.getTime(); From b51cfe2c3c2924698b0f6592d206cc9474292162 Mon Sep 17 00:00:00 2001 From: Foxy7 <37312469+Foxy7@users.noreply.github.com> Date: Sun, 30 Aug 2020 20:45:03 +0100 Subject: [PATCH 46/66] maxDelta bg % threshold as hidden preference (#1382) * Update index.js * Update determine-basal.js * Added 0.2 as default and 0.3 as safety limit * fixed code spacing * removed % --- lib/determine-basal/determine-basal.js | 14 +++++++++++--- lib/profile/index.js | 1 + 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index fb3c9bebe..ce37b04b2 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -837,9 +837,17 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ enableSMB = false; } // Disable SMB for sudden rises (often caused by calibrations or activation/deactivation of Dexcom's noise-filtering algorithm) - if ( maxDelta > 0.20 * bg ) { - console.error("maxDelta",convert_bg(maxDelta, profile),"> 20% of BG",convert_bg(bg, profile),"- disabling SMB"); - rT.reason += "maxDelta "+convert_bg(maxDelta, profile)+" > 20% of BG "+convert_bg(bg, profile)+": SMB disabled; "; +// Added maxDelta_bg_threshold as a hidden preference and included a cap at 0.3 as a safety limit +var maxDelta_bg_threshold; + if (typeof profile.maxDelta_bg_threshold === 'undefined') { + maxDelta_bg_threshold = 0.2; + } + if (typeof profile.maxDelta_bg_threshold !== 'undefined') { + maxDelta_bg_threshold = Math.min(profile.maxDelta_bg_threshold, 0.3); + } + if ( maxDelta > maxDelta_bg_threshold * bg ) { + console.error("maxDelta "+convert_bg(maxDelta, profile)+" > "+100 * maxDelta_bg_threshold +"% of BG "+convert_bg(bg, profile)+" - disabling SMB"); + rT.reason += "maxDelta "+convert_bg(maxDelta, profile)+" > "+100 * maxDelta_bg_threshold +"% of BG "+convert_bg(bg, profile)+": SMB disabled; "; enableSMB = false; } diff --git a/lib/profile/index.js b/lib/profile/index.js index df5b8e6c1..2b3cf5d84 100644 --- a/lib/profile/index.js +++ b/lib/profile/index.js @@ -51,6 +51,7 @@ function defaults ( ) { , maxUAMSMBBasalMinutes: 30 // maximum minutes of basal that can be delivered as a single SMB when IOB exceeds COB , SMBInterval: 3 // minimum interval between SMBs, in minutes. , bolus_increment: 0.1 // minimum bolus that can be delivered as an SMB + , maxDelta_bg_threshold: 0.2 // maximum change in bg to use SMB, above that will disable SMB , curve: "rapid-acting" // change this to "ultra-rapid" for Fiasp, or "bilinear" for old curve , useCustomPeakTime: false // allows changing insulinPeakTime , insulinPeakTime: 75 // number of minutes after a bolus activity peaks. defaults to 55m for Fiasp if useCustomPeakTime: false From 06f2ed486ee6800ab707baf92429af6ef17adfe7 Mon Sep 17 00:00:00 2001 From: Jeremy Cunningham <34543464+jpcunningh@users.noreply.github.com> Date: Sat, 5 Sep 2020 05:55:16 +0200 Subject: [PATCH 47/66] add mills to status upload to reduce NS client workload (#1385) --- bin/ns-status.js | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/bin/ns-status.js b/bin/ns-status.js index 6e67ace28..4d0543fe1 100755 --- a/bin/ns-status.js +++ b/bin/ns-status.js @@ -3,6 +3,7 @@ var os = require("os"); var fs = require('fs'); +var moment = require("moment"); var requireUtils = require('../lib/require-utils'); var requireWithTimestamp = requireUtils.requireWithTimestamp; @@ -129,6 +130,7 @@ var ns_status = function ns_status(argv_params) { if (iobArray && iobArray.length) { iob = iobArray[0]; iob.timestamp = iob.time; + iob.mills = moment(iob.time).valueOf(); delete iob.time; } @@ -141,6 +143,14 @@ var ns_status = function ns_status(argv_params) { } } + if (enacted && enacted.timestamp) { + enacted.mills = moment(enacted.timestamp).valueOf(); + } + + if (suggested && suggested.timestamp) { + suggested.mills = moment(suggested.timestamp).valueOf(); + } + var status = { device: 'openaps://' + os.hostname(), openaps: { From 283598684bf88f353faa9bc9f6ce3f2cd5d5efe0 Mon Sep 17 00:00:00 2001 From: Jeremy Cunningham <34543464+jpcunningh@users.noreply.github.com> Date: Sat, 12 Sep 2020 23:21:37 +0200 Subject: [PATCH 48/66] correct ns sgv query (#1386) --- bin/nightscout.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/nightscout.sh b/bin/nightscout.sh index 32f9f6b51..594439159 100755 --- a/bin/nightscout.sh +++ b/bin/nightscout.sh @@ -237,7 +237,7 @@ ns) | openaps use ${ZONE} select --date dateString --current now --gaps - ${FILE} | jq . ;; latest-entries-time) - PREVIOUS_TIME=$(ns-get host $NIGHTSCOUT_HOST entries.json 'find[type]=sgv' | jq .[0]) + PREVIOUS_TIME=$(ns-get host $NIGHTSCOUT_HOST entries.json 'find[type][$eq]=sgv' | jq .[0]) test -z "${PREVIOUS_TIME}" && echo -n 0 || echo $PREVIOUS_TIME | jq .dateString exit 0 ;; From 37a50e44bf2dfe7c0cd299f766b86264e1d850f9 Mon Sep 17 00:00:00 2001 From: Sarah King Date: Fri, 18 Sep 2020 16:29:14 -0500 Subject: [PATCH 49/66] Fix issue 1377, install correct version of py-parsedatetime (#1387) --- bin/oref0-setup.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index d2298967e..e3f88cf1b 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -702,7 +702,8 @@ if prompt_yn "" N; then echo Removing any existing git in $directory/.git rm -rf $directory/.git echo Removed any existing git - + echo "Uninstalling parsedatetime, reinstalling correct version" + pip uninstall -y parsedatetime && pip install -I parsedatetime===2.5 # TODO: delete this after openaps 0.2.2 release echo Checking openaps 0.2.2 installation with --nogit support if ! openaps --version 2>&1 | egrep "0.[2-9].[2-9]"; then From 5635387c1e3276b1955864181935230863acbbc9 Mon Sep 17 00:00:00 2001 From: Travis Cannell Date: Sat, 21 Nov 2020 20:10:27 -0800 Subject: [PATCH 50/66] equal sign typo (#1388) Recently I re-ran the setup script on DEV and ran into the parsedatetime error. I narrowed it down to what I think is a typo, there is a `===` when I believe `==` is what was meant. I modified this file locally and ran with no issues. --- bin/oref0-setup.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index e3f88cf1b..cd809f5e0 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -703,7 +703,7 @@ if prompt_yn "" N; then rm -rf $directory/.git echo Removed any existing git echo "Uninstalling parsedatetime, reinstalling correct version" - pip uninstall -y parsedatetime && pip install -I parsedatetime===2.5 + pip uninstall -y parsedatetime && pip install -I parsedatetime==2.5 # TODO: delete this after openaps 0.2.2 release echo Checking openaps 0.2.2 installation with --nogit support if ! openaps --version 2>&1 | egrep "0.[2-9].[2-9]"; then From 1d9e5f5526d566d16b19c971228c2912c3cb28dd Mon Sep 17 00:00:00 2001 From: Foxy7 <37312469+Foxy7@users.noreply.github.com> Date: Tue, 5 Jan 2021 23:16:41 +0000 Subject: [PATCH 51/66] Update app.py to show sgv via workaround (#1391) As per @renegadeandy pull request https://github.com/renegadeandy/oref0/pull/1 just tested it and it works nicely, so making PR for Dev. --- www/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/www/app.py b/www/app.py index c6cecb868..84c1333bb 100644 --- a/www/app.py +++ b/www/app.py @@ -59,7 +59,7 @@ def enacted(): @app.route("/glucose") def glucose(): - if os.path.getmtime(myopenaps_dir + "xdrip/glucose.json") > os.path.getmtime(myopenaps_dir + "monitor/glucose.json"): + if os.path.getmtime(myopenaps_dir + "xdrip/glucose.json") > os.path.getmtime(myopenaps_dir + "monitor/glucose.json") and os.path.getsize(myopenaps_dir + "xdrip/glucose.json") > 0: json_url = os.path.join(myopenaps_dir + "xdrip/glucose.json") else: json_url = os.path.join(myopenaps_dir + "monitor/glucose.json") From bec71edc72e9a59554a8227d625d8d3523925bae Mon Sep 17 00:00:00 2001 From: Jeremy Cunningham <34543464+jpcunningh@users.noreply.github.com> Date: Mon, 19 Apr 2021 21:55:31 -0500 Subject: [PATCH 52/66] Fix oref0-ns-loop.sh pushover_snooze NS authorization (#1393) Co-authored-by: Jeremy Cunningham --- bin/oref0-ns-loop.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/bin/oref0-ns-loop.sh b/bin/oref0-ns-loop.sh index 216bc2048..a47bdfa5e 100755 --- a/bin/oref0-ns-loop.sh +++ b/bin/oref0-ns-loop.sh @@ -41,7 +41,13 @@ EOT function pushover_snooze { URL=$NIGHTSCOUT_HOST/api/v1/devicestatus.json?count=100 - if snooze=$(curl -s $URL | jq '.[] | select(.snooze=="carbsReq") | select(.date>'$(date +%s -d "10 minutes ago")')' | jq -s .[0].date | noquotes); then + if [[ "${API_SECRET}" =~ "token=" ]]; then + URL="${URL}&${API_SECRET}" + else + CURL_AUTH='-H api-secret:'${API_SECRET} + fi + + if snooze=$(curl -s ${CURL_AUTH} ${URL} | jq '.[] | select(.snooze=="carbsReq") | select(.date>'$(date +%s -d "10 minutes ago")')' | jq -s .[0].date | noquotes); then #echo $snooze #echo date -Is -d @$snooze; echo touch -d $(date -Is -d @$snooze) monitor/pushover-sent From 839a2502fa47c46230d1575becaaa876f433fa7b Mon Sep 17 00:00:00 2001 From: petervanrijt Date: Thu, 22 Jul 2021 09:33:46 +0200 Subject: [PATCH 53/66] Improve error message on Autotune run without BG-data (#1404) * Better error handling for Autotune without BG-data Prevents technical error on the logging which users of Autotune(Web) see. * Improve error message on Autotune without BG-data Informs users on where to start investigation. --- bin/oref0-autotune-prep.js | 1 + bin/oref0-autotune.sh | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/bin/oref0-autotune-prep.js b/bin/oref0-autotune-prep.js index 2ce3082d5..3a70bb075 100755 --- a/bin/oref0-autotune-prep.js +++ b/bin/oref0-autotune-prep.js @@ -104,6 +104,7 @@ if (!module.parent) { var glucose_data = JSON.parse(fs.readFileSync(glucose_input, 'utf8')); } catch (e) { console.error("Warning: could not parse "+glucose_input); + return console.error("Warning: could not parse "+glucose_input", e); } var carb_data = { }; diff --git a/bin/oref0-autotune.sh b/bin/oref0-autotune.sh index ac3c53a36..393a87303 100755 --- a/bin/oref0-autotune.sh +++ b/bin/oref0-autotune.sh @@ -248,7 +248,7 @@ do cp profile.pump.json profile.json exit else - die "Could not run oref0-autotune-core autotune.$i.json profile.json profile.pump.json" + die "Could not run oref0-autotune-core autotune.$i.json profile.json profile.pump.json. Make sure Nightscout contains BG-values for the selected date range, Autotune(Web) does not work without BG-values. See documentation on the how-to check http://nightscout.github.io/nightscout/reports/#day-to-day ." fi else # Copy tuned profile produced by autotune to profile.json for use with next day of data From 7157ef497a14f15d643a2b93ceee244701e575d5 Mon Sep 17 00:00:00 2001 From: Sarah Withee <2601974+geekygirlsarah@users.noreply.github.com> Date: Sat, 16 Oct 2021 23:32:50 -0400 Subject: [PATCH 54/66] Fix SyntaxError for unexpected token on console.error (#1410) --- bin/oref0-autotune-prep.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bin/oref0-autotune-prep.js b/bin/oref0-autotune-prep.js index 3a70bb075..8977a570a 100755 --- a/bin/oref0-autotune-prep.js +++ b/bin/oref0-autotune-prep.js @@ -103,8 +103,7 @@ if (!module.parent) { try { var glucose_data = JSON.parse(fs.readFileSync(glucose_input, 'utf8')); } catch (e) { - console.error("Warning: could not parse "+glucose_input); - return console.error("Warning: could not parse "+glucose_input", e); + return console.error("Warning: could not parse "+glucose_input, e); } var carb_data = { }; From 1954758ca99a45ce9c3a37a60978207f20540aa4 Mon Sep 17 00:00:00 2001 From: Foxy7 <37312469+Foxy7@users.noreply.github.com> Date: Sat, 11 Dec 2021 03:14:17 +0000 Subject: [PATCH 55/66] rewind, prime and battery indicates change (#1409) * rewind, prime and battery indicates change * rewind, prime and battery indicates change * "fixed" prime only added filter for "fixed" primes only to count as site change. therefore manual primes will be ignored. --- bin/mm-format-ns-treatments.sh | 10 +++++++++- examples/profile.json | 3 +++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/bin/mm-format-ns-treatments.sh b/bin/mm-format-ns-treatments.sh index 8fac046c5..d104254dd 100755 --- a/bin/mm-format-ns-treatments.sh +++ b/bin/mm-format-ns-treatments.sh @@ -18,6 +18,11 @@ EOT # | json -e "this.type = 'mm://openaps/$self'" \ model=$(jq -r . $MODEL) +#load ns event preferences +PREF=${4-preferences.json} +rewind_indicates_cartridge_change=$(jq -r .rewind_indicates_cartridge_change $PREF) +prime_indicates_pump_site_change=$(jq -r .prime_indicates_pump_site_change $PREF) +battery_indicates_battery_change=$(jq -r .battery_indicates_battery_change $PREF) run_remote_command "oref0-normalize-temps $HISTORY" \ | jq '[ .[] @@ -25,10 +30,13 @@ run_remote_command "oref0-normalize-temps $HISTORY" \ | .created_at = if .created_at then .created_at else .timestamp end | .enteredBy = "openaps://medtronic/'$model'" | if .glucose and (.glucoseType | not) and .glucose > 0 then .glucoseType = .enteredBy else . end + | if ._type == "Rewind" and "'$rewind_indicates_cartridge_change'" == "true" then .eventType = "Insulin Change" else . end + | if ._type == "Prime" and .type == "fixed" and "'$prime_indicates_pump_site_change'" == "true" then .eventType = "Site Change" else . end + | if ._type == "Battery" and "'$battery_indicates_battery_change'" == "true" then .eventType = "Pump Battery Change" else . end | .eventType = if .eventType then .eventType else "Note" end | if ._type == "AlarmSensor" and .alarm_description then .notes = .alarm_description else . end | ( if .notes then .notes else "" end ) as $note - | if ( .eventType == "Note" ) and ( .alarm_description | not ) then .notes = ( [ ._type, "'" $model "'", $note ] | join("") ) else . end + | if ( .eventType == "Note" or .eventType == "Insulin Change" or .eventType == "Site Change" or .eventType == "Pump Battery Change" ) and ( .alarm_description | not ) then .notes = ( [ ._type, "'" $model "'", $note ] | join("") ) else . end ]' \ > $OUTPUT diff --git a/examples/profile.json b/examples/profile.json index a79a4119d..125c7e96f 100644 --- a/examples/profile.json +++ b/examples/profile.json @@ -63,6 +63,9 @@ "enableSMB_with_COB": true, "enableSMB_with_temptarget": false, "enableSMB_after_carbs": true, + "prime_indicates_pump_site_change": false, + "rewind_indicates_cartridge_change": false, + "battery_indicates_battery_change": false, "maxSMBBasalMinutes": 75, "curve": "rapid-acting", "useCustomPeakTime": false, From 1ec3528c1f78fa567698bcf8c85b513e279d819e Mon Sep 17 00:00:00 2001 From: James Babcock Date: Fri, 10 Dec 2021 19:17:52 -0800 Subject: [PATCH 56/66] Bugfixes (#1411) * Add config settings for low-battery shutdown thresholds Adds a pair of config settings for low-battery shutdown thresholds. One config setting is for Edison (denominated in millivolts), the other is for Pi (given as percentage). This config setting is needed for USB-powered Pi rigs, where the battery level indicator just returns a random number, which might occasionally be zero. * If unable to update with git pull, prompt before aborting This is mainly for convenience during development; a checkout in some weird branch doesn't necessarily return success when you "git pull", but that doesn't necessarily mean you want to abort oref0-runagain.sh. * Fix bug that would clobber sshd_config Depending on the initial contents of sshd_config, this would sometimes try to add a line to it, but instead completely overwrite it to contain only that line. * Extend timeout on JS syntax check unit tests On Pi Zero hardware, these tests would sometimes time out because starting the nodejs interpreter is slow (even with a fixed nodejs interpreter). Extend the timeout to 4s, from the default of 2s, so they pass. * Clean up require() usage in IOB unit test, making it pass without timeout on Pi Zero * Add bash-unit-test-temp to gitignore * Check for bad (super slow) RPi nodejs versions, install nvm version if found * Tweak log messages about battery level --- .gitignore | 3 + bin/openaps-install.sh | 2 +- bin/oref0-cron-every-15min.sh | 16 +++++- bin/oref0-setup.sh | 56 +++++++++++++++--- lib/profile/index.js | 6 +- tests/check-syntax.test.js | 3 +- tests/iob.test.js | 105 +++++++++++++++++----------------- 7 files changed, 125 insertions(+), 66 deletions(-) diff --git a/.gitignore b/.gitignore index 4638eb4bc..a24874fdb 100644 --- a/.gitignore +++ b/.gitignore @@ -25,3 +25,6 @@ bin/__pycache__ package-lock.json *.pyc + +bash-unit-test-temp + diff --git a/bin/openaps-install.sh b/bin/openaps-install.sh index dfad4db51..e268146ae 100755 --- a/bin/openaps-install.sh +++ b/bin/openaps-install.sh @@ -58,7 +58,7 @@ apt-get -o Acquire::ForceIPv4=true update && apt-get -o Acquire::ForceIPv4=true apt-get -o Acquire::ForceIPv4=true update && apt-get -o Acquire::ForceIPv4=true install -y sudo strace tcpdump screen acpid vim python-pip locate ntpdate ntp #check if edison user exists before trying to add it to groups -grep "PermitRootLogin yes" /etc/ssh/sshd_config || echo "PermitRootLogin yes" > /etc/ssh/sshd_config +grep "PermitRootLogin yes" /etc/ssh/sshd_config || echo "PermitRootLogin yes" >>/etc/ssh/sshd_config if getent passwd edison > /dev/null; then echo "Adding edison to sudo users" diff --git a/bin/oref0-cron-every-15min.sh b/bin/oref0-cron-every-15min.sh index 00a56e113..b2b5a4a38 100755 --- a/bin/oref0-cron-every-15min.sh +++ b/bin/oref0-cron-every-15min.sh @@ -12,12 +12,24 @@ assert_cwd_contains_ini # proper shutdown once the EdisonVoltage very low (< 3050mV; 2950 is dead) if is_edison; then - sudo ~/src/EdisonVoltage/voltage json batteryVoltage battery | jq .batteryVoltage | awk '{if ($1<=3050)system("sudo shutdown -h now")}' & + BATTERY_VOLTAGE="$(sudo ~/src/EdisonVoltage/voltage json batteryVoltage battery | jq .batteryVoltage)" + echo "Battery voltage is $BATTERY_VOLTAGE." + BATTERY_CUTOFF=$(get_pref_float .edison_battery_shutdown_voltage 3050) + if (( "$BATTERY_VOLTAGE" <= "$BATTERY_CUTOFF" )); then + echo "Critically low battery! Shutting down." + sudo shutdown -h now + fi fi # proper shutdown of pi rigs once the battery level is below 2 % (should be more than enough to shut down on a standard 18600 ~2Ah cell) if is_pi; then - sudo ~/src/openaps-menu/scripts/getvoltage.sh | tee ~/myopenaps/monitor/edison-battery.json | jq .battery | awk '{if ($1<2)system("sudo shutdown -h now")}' & + BATTERY_PERCENT="$(sudo ~/src/openaps-menu/scripts/getvoltage.sh | tee ~/myopenaps/monitor/edison-battery.json | jq .battery)" + BATTERY_CUTOFF=$(get_pref_float .pi_battery_shutdown_percent 2) + echo "Battery level is $BATTERY_PERCENT percent" + if (( "$BATTERY_PERCENT" < "$BATTERY_CUTOFF" )); then + echo "Critically low battery! Shutting down." + sudo shutdown -h now + fi fi # temporarily disable hotspot for 1m every 15m to allow it to try to connect via wifi again diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index cd809f5e0..e996aa410 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -292,6 +292,47 @@ function move_mmtune () { fi } +function install_or_upgrade_nodejs () { + # install/upgrade to latest node 8 if neither node 8 nor node 10+ LTS are installed + if ! nodejs --version | grep -e 'v8\.' -e 'v1[02468]\.' >/dev/null; then + echo Installing node 8 + # Use nodesource setup script to add nodesource repository to sources.list.d + sudo bash -c "curl -sL https://deb.nodesource.com/setup_8.x | bash -" || die "Couldn't setup node 8" + # Install nodejs and npm from nodesource + sudo apt-get install -y nodejs=8.* || die "Couldn't install nodejs" + fi + + # Check that the nodejs you have installed is not broken. In particular, we're + # checking for a problem with nodejs binaries that are present in the apt-get + # repo for RaspiOS builds from mid-2021 and earlier, where the node interpreter + # works, but has a 10x slower startup than expected (~30s on Pi Zero W + # hardware, as opposed to ~3s using a statically-linked binary of the same + # binary sourced from nvm). + sudo apt-get install -y time + NODE_EXECUTION_TIME="$(\time --format %e node -e 'true' 2>&1)" + if [ 1 -eq "$(echo "$NODE_EXECUTION_TIME > 10" |bc)" ]; then + echo "Your installed nodejs ($(node --version)) is very slow to start (took ${NODE_EXECUTION_TIME}s)" + echo "This is a known problem with certain versions of Raspberry Pi OS." + + if prompt_yn "Install a new nodejs version using nvm?" Y; then + echo "Installing nvm and using it to replace the system-provided nodejs" + + # Download nvm + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash + # Run nvm, adding its aliases to this shell + source ~/.nvm/nvm.sh + # Use nvm to install nodejs + nvm install 10.24.1 + # Symlink node into /usr/local/bin, where it will shadow /usr/bin/node + ln -s ~/.nvm/versions/node/v10.24.1/bin/node /usr/local/bin/node + + NEW_NODE_EXECUTION_TIME="$(\time --format %e node -e 'true' 2>&1)" + echo "New nodejs took ${NEW_NODE_EXECUTION_TIME}s to start" + fi + else + echo "Your installed nodejs version is OK." + fi +} if ! validate_cgm "${CGM}"; then DIR="" # to force a Usage prompt @@ -689,14 +730,7 @@ if prompt_yn "" N; then echo Running apt-get autoclean sudo apt-get autoclean - # install/upgrade to latest node 8 if neither node 8 nor node 10+ LTS are installed - if ! nodejs --version | grep -e 'v8\.' -e 'v1[02468]\.' ; then - echo Installing node 8 - # Use nodesource setup script to add nodesource repository to sources.list.d - sudo bash -c "curl -sL https://deb.nodesource.com/setup_8.x | bash -" || die "Couldn't setup node 8" - # Install nodejs and npm from nodesource - sudo apt-get install -y nodejs=8.* || die "Couldn't install nodejs" - fi + install_or_upgrade_nodejs # Attempting to remove git to make install --nogit by default for existing users echo Removing any existing git in $directory/.git @@ -742,7 +776,11 @@ if prompt_yn "" N; then mkdir -p $HOME/src/ if [ -d "$HOME/src/oref0/" ]; then echo "$HOME/src/oref0/ already exists; pulling latest" - (cd $HOME/src/oref0 && git fetch && git pull) || die "Couldn't pull latest oref0" + (cd $HOME/src/oref0 && git fetch && git pull) || ( + if ! prompt_yn "Couldn't pull latest oref0. Continue anyways?"; then + die "Failed to update oref0." + fi + ) else echo -n "Cloning oref0: " (cd $HOME/src && git clone git://github.com/openaps/oref0.git) || die "Couldn't clone oref0" diff --git a/lib/profile/index.js b/lib/profile/index.js index 2b3cf5d84..fc15f1b94 100644 --- a/lib/profile/index.js +++ b/lib/profile/index.js @@ -66,7 +66,9 @@ function defaults ( ) { //, maxRaw: 200 // highest raw/noisy CGM value considered safe to use for looping , calc_glucose_noise: false , target_bg: false // set to an integer value in mg/dL to override pump min_bg - }; + , edison_battery_shutdown_voltage: 3050 + , pi_battery_shutdown_percent: 2 + } } function displayedDefaults () { @@ -87,6 +89,8 @@ function displayedDefaults () { profile.enableUAM = allDefaults.enableUAM; profile.curve = allDefaults.curve; profile.offline_hotspot = allDefaults.offline_hotspot; + profile.edison_battery_shutdown_voltage = allDefaults.edison_battery_shutdown_voltage; + profile.pi_battery_shutdown_percent = allDefaults.pi_battery_shutdown_percent; console.error(profile); return profile diff --git a/tests/check-syntax.test.js b/tests/check-syntax.test.js index 9f52e1240..ba0da571c 100644 --- a/tests/check-syntax.test.js +++ b/tests/check-syntax.test.js @@ -93,8 +93,9 @@ describe("Syntax checks", function() { var type = getFileFormat(file); if(type !== "unknown") { it(file, function() { + this.timeout(4000); checkFile(file, type); }); } }); -}); \ No newline at end of file +}); diff --git a/tests/iob.test.js b/tests/iob.test.js index e11476a04..260924536 100644 --- a/tests/iob.test.js +++ b/tests/iob.test.js @@ -3,6 +3,7 @@ require('should'); var moment = require('moment'); +var iob = require('../lib/iob'); describe('IOB', function() { @@ -34,13 +35,13 @@ describe('IOB', function() { }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(2); //rightAfterBolus.bolussnooze.should.equal(2); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(1.45); //hourLater.bolussnooze.should.be.lessThan(.5); hourLater.iob.should.be.greaterThan(0); @@ -49,7 +50,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (3 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -83,14 +84,14 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(2); //rightAfterBolus.bolussnooze.should.equal(2); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(1.6); hourLater.iob.should.be.greaterThan(1.3); @@ -101,7 +102,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (5 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); }); @@ -135,13 +136,13 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.75); //hourLater.bolussnooze.should.be.lessThan(0.75); hourLater.iob.should.be.greaterThan(0); @@ -150,7 +151,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (5 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -186,13 +187,13 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.77); //hourLater.bolussnooze.should.be.lessThan(0.36); hourLater.iob.should.be.greaterThan(0.72); @@ -203,7 +204,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (5 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -239,13 +240,13 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.81); //hourLater.bolussnooze.should.be.lessThan(0.5); hourLater.iob.should.be.greaterThan(0.76); @@ -257,7 +258,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (5 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -293,13 +294,13 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.59); //hourLater.bolussnooze.should.be.lessThan(0.23); @@ -311,7 +312,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (6 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -345,20 +346,20 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.8); //hourLater.bolussnooze.should.be.lessThan(.8); hourLater.iob.should.be.greaterThan(0); var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (5 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -395,13 +396,13 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (4 * 60 * 60 * 1000)).toISOString(); - var hourLaterWith5 = require('../lib/iob')(hourLaterInputs)[0]; + var hourLaterWith5 = iob(hourLaterInputs)[0]; console.error(hourLaterWith5.iob); hourLaterInputs.profile.dia = 3; - var hourLaterWith4 = require('../lib/iob')(hourLaterInputs)[0]; + var hourLaterWith4 = iob(hourLaterInputs)[0]; console.error(hourLaterWith4.iob); @@ -427,7 +428,7 @@ describe('IOB', function() { //var snoozeInputs = inputs; //snoozeInputs.clock = new Date(now + (20 * 60 * 1000)).toISOString(); - //var snooze = require('../lib/iob')(snoozeInputs)[0]; + //var snooze = iob(snoozeInputs)[0]; //snooze.bolussnooze.should.equal(0); //}); @@ -476,7 +477,7 @@ describe('IOB', function() { }; var iobInputs = inputs; - var iobNow = require('../lib/iob')(iobInputs)[0]; + var iobNow = iob(iobInputs)[0]; //console.log(iobNow); iobNow.iob.should.be.lessThan(1); @@ -535,7 +536,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = moment('2016-06-13 01:30:00.000'); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.5); hourLater.iob.should.be.greaterThan(0.4); }); @@ -599,7 +600,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = moment('2016-06-13 00:45:00.000'); //new Date(now + (30 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.8); hourLater.iob.should.be.greaterThan(0.7); @@ -640,7 +641,7 @@ describe('IOB', function() { } }; - var hourLater = require('../lib/iob')(inputs)[0]; + var hourLater = iob(inputs)[0]; var timestampEarly2 = startingPoint.clone().subtract(29, 'minutes'); var timestampEarly3 = startingPoint.clone().subtract(28, 'minutes'); @@ -669,7 +670,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = moment('2016-06-13 00:30:00.000'); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; var inputs = { clock: timestamp, @@ -713,7 +714,7 @@ describe('IOB', function() { } }; - var hourLaterWithOverlap = require('../lib/iob')(inputs)[0]; + var hourLaterWithOverlap = iob(inputs)[0]; hourLater.iob.should.be.greaterThan(hourLaterWithOverlap.iob - 0.05); hourLater.iob.should.be.lessThan(hourLaterWithOverlap.iob + 0.05); @@ -774,7 +775,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = moment('2016-06-14 00:45:00.000'); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(1); hourLater.iob.should.be.greaterThan(0.8); @@ -832,7 +833,7 @@ describe('IOB', function() { var iobInputs = inputs; // Calculate IOB with inputs that will be the same as - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -867,7 +868,7 @@ describe('IOB', function() { iobInputs = inputs; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -933,7 +934,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -968,7 +969,7 @@ describe('IOB', function() { iobInputs = inputs; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -1035,7 +1036,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -1090,7 +1091,7 @@ describe('IOB', function() { iobInputs = inputs; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -1146,7 +1147,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -1180,7 +1181,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -1237,7 +1238,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -1268,7 +1269,7 @@ describe('IOB', function() { iobInputs = inputs; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -1324,7 +1325,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -1353,7 +1354,7 @@ describe('IOB', function() { } }; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -1411,7 +1412,7 @@ describe('IOB', function() { }; var hourLaterInputs = inputs; - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.equal(0); }); @@ -1460,7 +1461,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(1); hourLater.iob.should.be.greaterThan(0); @@ -1512,7 +1513,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0); hourLater.iob.should.be.greaterThan(-1); @@ -1542,7 +1543,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.equal(0); }); @@ -1579,7 +1580,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.equal(0); }); @@ -1612,25 +1613,25 @@ describe('IOB', function() { }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(1); //hourLater.bolussnooze.should.be.lessThan(.5); hourLater.iob.should.be.greaterThan(0); var after3hInputs = inputs; after3hInputs.clock = new Date(now + (3 * 60 * 60 * 1000)).toISOString(); - var after3h = require('../lib/iob')(after3hInputs)[0]; + var after3h = iob(after3hInputs)[0]; after3h.iob.should.be.greaterThan(0); var after4hInputs = inputs; after4hInputs.clock = new Date(now + (4 * 60 * 60 * 1000)).toISOString(); - var after4h = require('../lib/iob')(after4hInputs)[0]; + var after4h = iob(after4hInputs)[0]; after4h.iob.should.equal(0); }); From c454ecd84e35832ef42d49bef3db89ddb97ce08a Mon Sep 17 00:00:00 2001 From: Kyrylo Chekanov Date: Sat, 18 Dec 2021 05:57:36 +0200 Subject: [PATCH 57/66] per #1401 set highest sensitivity on low TT and halfBasal (#1403) getting multiplication less or equal to 0 means that we have a really low target with a really low halfBasalTarget with low TT and lowTTlowersSensitivity we need autosens_max as a value we use multiplication instead of the division to avoid "division by zero error" --- lib/determine-basal/determine-basal.js | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index ce37b04b2..93eebec9b 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -244,7 +244,15 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // e.g.: Sensitivity ratio set to 0.8 based on temp target of 120; Adjusting basal from 1.65 to 1.35; ISF from 58.9 to 73.6 //sensitivityRatio = 2/(2+(target_bg-normalTarget)/40); var c = halfBasalTarget - normalTarget; - sensitivityRatio = c/(c+target_bg-normalTarget); + // getting multiplication less or equal to 0 means that we have a really low target with a really low halfBasalTarget + // with low TT and lowTTlowersSensitivity we need autosens_max as a value + // we use multiplication instead of the division to avoid "division by zero error" + if (c * (c + target_bg-normalTarget) <= 0.0) { + sensitivityRatio = profile.autosens_max; + } + else { + sensitivityRatio = c/(c+target_bg-normalTarget); + } // limit sensitivityRatio to profile.autosens_max (1.2x by default) sensitivityRatio = Math.min(sensitivityRatio, profile.autosens_max); sensitivityRatio = round(sensitivityRatio,2); From fe6e492bc764bd4f84ce7a0b2585526f6eb44ba8 Mon Sep 17 00:00:00 2001 From: Foxy7 <37312469+Foxy7@users.noreply.github.com> Date: Sat, 18 Dec 2021 04:13:31 +0000 Subject: [PATCH 58/66] replace git:// with https:// (#1398) --- README.md | 2 +- bin/openaps-install.sh | 2 +- bin/openaps-src.sh | 10 +++++----- bin/oref0-setup.sh | 10 +++++----- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 3644bdfed..2da8ca234 100644 --- a/README.md +++ b/README.md @@ -87,7 +87,7 @@ API_SECRET="..." NIGHTSCOUT_HOST=localhost:1337 ns-upload-entries 1.5 is installed @@ -1069,7 +1069,7 @@ if prompt_yn "" N; then echo "EdisonVoltage already installed" else echo "Installing EdisonVoltage" - cd $HOME/src && git clone -b master git://github.com/cjo20/EdisonVoltage.git || (cd EdisonVoltage && git checkout master && git pull) + cd $HOME/src && git clone -b master https://github.com/cjo20/EdisonVoltage.git || (cd EdisonVoltage && git checkout master && git pull) cd $HOME/src/EdisonVoltage make voltage fi @@ -1084,7 +1084,7 @@ if prompt_yn "" N; then echo Checking for BT Pebble Mac if [[ ! -z "$BT_PEB" ]]; then sudo pip install --default-timeout=1000 libpebble2 - sudo pip install --default-timeout=1000 --user git+git://github.com/mddub/pancreabble.git + sudo pip install --default-timeout=1000 --user git+https://github.com/mddub/pancreabble.git oref0-bluetoothup sudo rfcomm bind hci0 $BT_PEB do_openaps_import $HOME/src/oref0/lib/oref0-setup/pancreabble.json @@ -1169,7 +1169,7 @@ if prompt_yn "" N; then echo "i2c-dev" > /etc/modules-load.d/i2c.conf echo "Installing pi-buttons..." systemctl stop pi-buttons - cd $HOME/src && git clone git://github.com/bnielsen1965/pi-buttons.git + cd $HOME/src && git clone https://github.com/bnielsen1965/pi-buttons.git echo "Make and install pi-buttons..." cd pi-buttons cd src && make && sudo make install && sudo make install_service @@ -1180,7 +1180,7 @@ if prompt_yn "" N; then systemctl enable pi-buttons && systemctl restart pi-buttons echo "Installing openaps-menu..." test "$directory" != "/$HOME/myopenaps" && (echo You are using a non-standard openaps directory. For the statusmenu to work correctly you need to set the openapsDir variable in index.js) - cd $HOME/src && git clone git://github.com/openaps/openaps-menu.git || (cd openaps-menu && git checkout master && git pull) + cd $HOME/src && git clone https://github.com/openaps/openaps-menu.git || (cd openaps-menu && git checkout master && git pull) cd $HOME/src/openaps-menu && sudo npm install cp $HOME/src/openaps-menu/openaps-menu.service /etc/systemd/system/ && systemctl enable openaps-menu fi From d15d8d483c859f6c62ecd0d2dfab162fe3e3e730 Mon Sep 17 00:00:00 2001 From: Steven Bell Date: Fri, 17 Dec 2021 22:47:09 -0600 Subject: [PATCH 59/66] Enable SMB when BG greater than specified value (#1365) * Add high_bg enable SMB toggle Still need to implement UTs, but this is an initial commit to get eyes on and daytime testing. * Add logging Also move the enableSMB_always at the end as it originally was * Implement enableSMB_high_bg with dev * Update determine-basal.js Fix profile variable name for high bg target Co-authored-by: Steven Bell --- lib/determine-basal/determine-basal.js | 26 +++++++++++++++++++++++--- lib/profile/index.js | 2 ++ 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index 93eebec9b..79ec61208 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -52,7 +52,9 @@ function enable_smb( profile, microBolusAllowed, meal_data, - target_bg + bg, + target_bg, + high_bg ) { // disable SMB when a high temptarget is set if (! microBolusAllowed) { @@ -105,7 +107,19 @@ function enable_smb( console.error("SMB enabled for temptarget of",convert_bg(target_bg, profile)); } return true; - } + } + + // enable SMB if high bg is found + if (profile.enableSMB_high_bg === true && high_bg !== null && bg >= high_bg) { + console.error("Checking BG to see if High for SMB enablement."); + console.error("Current BG", bg, " | High BG ", high_bg); + if (meal_data.bwFound) { + console.error("Warning: High BG SMB enabled within 6h of using Bolus Wizard: be sure to easy bolus 30s before using Bolus Wizard"); + } else { + console.error("High BG detected. Enabling SMB."); + } + return true; + } console.error("SMB disabled (no enableSMB preferences active or no condition satisfied)"); return false; @@ -215,12 +229,16 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var target_bg; var min_bg; var max_bg; + var high_bg; if (typeof profile.min_bg !== 'undefined') { min_bg = profile.min_bg; } if (typeof profile.max_bg !== 'undefined') { max_bg = profile.max_bg; } + if (typeof profile.enableSMB_high_bg_target !== 'undefined') { + high_bg = profile.enableSMB_high_bg_target; + } if (typeof profile.min_bg !== 'undefined' && typeof profile.max_bg !== 'undefined') { target_bg = (profile.min_bg + profile.max_bg) / 2; } else { @@ -434,7 +452,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ profile, microBolusAllowed, meal_data, - target_bg + bg, + target_bg, + high_bg ); // enable UAM (if enabled in preferences) diff --git a/lib/profile/index.js b/lib/profile/index.js index fc15f1b94..f4bdae9f7 100644 --- a/lib/profile/index.js +++ b/lib/profile/index.js @@ -45,6 +45,8 @@ function defaults ( ) { // if the CGM sensor reads falsely high and doesn't come down as actual BG does , enableSMB_always: false // always enable supermicrobolus (unless disabled by high temptarget) , enableSMB_after_carbs: false // enable supermicrobolus for 6h after carbs, even with 0 COB + , enableSMB_high_bg: false // enable SMBs when a high BG is detected, based on the high BG target (adjusted or profile) + , enableSMB_high_bg_target: 110 // set the value enableSMB_high_bg will compare against to enable SMB. If BG > than this value, SMBs should enable. // *** WARNING *** DO NOT USE enableSMB_always or enableSMB_after_carbs with Libre or similar. , allowSMB_with_high_temptarget: false // allow supermicrobolus (if otherwise enabled) even with high temp targets , maxSMBBasalMinutes: 30 // maximum minutes of basal that can be delivered as a single SMB with uncovered COB From d8af80b1b347456cb24e53025aec780c53c01ae6 Mon Sep 17 00:00:00 2001 From: tzachi-dar Date: Sat, 18 Dec 2021 07:37:09 +0200 Subject: [PATCH 60/66] Add a script to delete device status entries that are older than 2 months. (#1232) * Add a script to delete device status entries that are older than 2 months. Signed-off-by: Tzachi Dar * Fixes to delete old device status: 1) Create a local backup of the data. 2) Add number of days to delete as a parameter. 3) Add a nightly mode (parameters are given as enviorment variables). * Write the backedup records in one line. * New implmentation: Delete all entries in one command. * Fix enviorment variable for nightly mode. --- bin/ns-delete-old-devicestatus.sh | 60 +++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100755 bin/ns-delete-old-devicestatus.sh diff --git a/bin/ns-delete-old-devicestatus.sh b/bin/ns-delete-old-devicestatus.sh new file mode 100755 index 000000000..d7c5c8573 --- /dev/null +++ b/bin/ns-delete-old-devicestatus.sh @@ -0,0 +1,60 @@ +#!/bin/bash + +source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1) + +usage "$@" < - No-op version, find out what delete would do. +$self delete - move entries from NIGHTSCOUT_HOST devicestatus collection to "$HOME/myopenaps/backup +$self nightly - move entries from NIGHTSCOUT_HOST devicestatus collection to "$HOME/myopenaps/backup +EOF + +function write_backup() { +json -a -o jsony-0 >> $BACKUP_DIR/devicestatus.txt +} + +export API_SECRET +test -n "$3" && API_SECRET=$(nightscout hash-api-secret $3) +test -n "$4" && NUM_DAYS=$4 +BACKUP_DIR="$HOME/myopenaps"/backup +mkdir -p $BACKUP_DIR + +ENDPOINT=$2/api/v1/devicestatus + +if [ $1 = "nightly" ]; then + test -n "$2" && NUM_DAYS=$2 + ENDPOINT=$NIGHTSCOUT_HOST/api/v1/devicestatus +fi + +if [[ -z "$API_SECRET" || -z "$NUM_DAYS" ]] ; then + test -z "$API_SECRET" && echo API_SECRET undefined. + test -z "$NUM_DAYS" && echo NUM_DAYS undefined. + print_usage + exit 1; +fi + +date_string=$(date -d "-$NUM_DAYS days" +%Y-%m-%d) +fetch_cmd="curl --compressed -s -g $ENDPOINT.json?find\[created_at\]\[\\"\$"lte\]=$date_string\&count=100000" +delete_cmd="curl -X DELETE -H \"API-SECRET: $API_SECRET\" -s -g $ENDPOINT.json?find\[created_at\]\[\\"\$"lte\]=$date_string\&count=100000" + +case "$1" in + --find) + echo $fetch_cmd + echo $delete_cmd + ;; + delete) + #echo $fetch_cmd + #echo $delete_cmd + eval $fetch_cmd | write_backup + eval $delete_cmd + ;; + nightly) + #echo $fetch_cmd + #echo $delete_cmd + eval $fetch_cmd | write_backup + eval $delete_cmd + ;; + *|help|--help|-h) + print_usage + exit 1; + ;; +esac From cb133de2c790eccf02ae8648e67c3d00c6590b5c Mon Sep 17 00:00:00 2001 From: Haroon Ghori <65193601+hsghori@users.noreply.github.com> Date: Sat, 18 Dec 2021 10:53:46 -0800 Subject: [PATCH 61/66] Fix python script and allow autotune prep/core to write to file directly (#1394) --- bin/oref0-autotune-core.js | 16 +++- bin/oref0-autotune-prep.js | 14 ++- bin/oref0-autotune.py | 171 ++++++++++++++++++------------------- requirements.txt | 3 + 4 files changed, 107 insertions(+), 97 deletions(-) create mode 100644 requirements.txt diff --git a/bin/oref0-autotune-core.js b/bin/oref0-autotune-core.js index 439e8ac0d..be51dae7b 100755 --- a/bin/oref0-autotune-core.js +++ b/bin/oref0-autotune-core.js @@ -5,7 +5,7 @@ Uses the output of oref0-autotune-prep.js - Calculates adjustments to basal schedule, ISF, and CSF + Calculates adjustments to basal schedule, ISF, and CSF Released under MIT license. See the accompanying LICENSE.txt file for full terms and conditions @@ -19,13 +19,17 @@ THE SOFTWARE. */ - var autotune = require('../lib/autotune'); var stringify = require('json-stable-stringify'); if (!module.parent) { var argv = require('yargs') - .usage("$0 ") + .usage("$0 [--output-file=]") + .option('output-file', { + alias: 'o', + describe: 'File to write output', + default: null, + }) .demand(3) .strict(true) .help('help'); @@ -65,6 +69,10 @@ if (!module.parent) { }; var autotune_output = autotune(inputs); - console.log(stringify(autotune_output, { space: ' '})); + if (params["output-file"]) { + fs.writeFileSync(params["output-file"], stringify(autotune_output, {space: ' '})); + } else { + console.log(stringify(autotune_output, { space: ' '})); + } } diff --git a/bin/oref0-autotune-prep.js b/bin/oref0-autotune-prep.js index 8977a570a..4c781cb5c 100755 --- a/bin/oref0-autotune-prep.js +++ b/bin/oref0-autotune-prep.js @@ -27,7 +27,7 @@ var moment = require('moment'); if (!module.parent) { var argv = require('yargs') - .usage("$0 [] [--categorize_uam_as_basal] [--tune-insulin-curve]") + .usage("$0 [] [--categorize_uam_as_basal] [--tune-insulin-curve] [--output-file=]") .option('categorize_uam_as_basal', { alias: 'u', boolean: true, @@ -40,6 +40,11 @@ if (!module.parent) { describe: "Tune peak time and end time", default: false }) + .option('output-file', { + alias: 'o', + describe: 'Output file to write output', + default: null, + }) .strict(true) .help('help'); @@ -66,7 +71,6 @@ if (!module.parent) { console.log('{ "error": "Could not parse input data" }'); return console.error("Could not parse input data: ", e); } - var pumpprofile_data = { }; if (typeof pumpprofile_input !== 'undefined') { try { @@ -129,6 +133,10 @@ if (!module.parent) { }; var prepped_glucose = generate(inputs); - console.log(JSON.stringify(prepped_glucose)); + if (params['output-file']) { + fs.writeFileSync(params['output-file'], JSON.stringify(prepped_glucose)) + } else { + console.log(JSON.stringify(prepped_glucose)); + } } diff --git a/bin/oref0-autotune.py b/bin/oref0-autotune.py index c936641c1..a6ec91989 100755 --- a/bin/oref0-autotune.py +++ b/bin/oref0-autotune.py @@ -2,15 +2,15 @@ # Python version of oref0-autotune.sh # Original bash code: scottleibrand, pietergit, beached, danamlewis -# This script sets up an easy test environment for autotune, allowing the user to vary parameters +# This script sets up an easy test environment for autotune, allowing the user to vary parameters # like start/end date and number of runs. # -# Required Inputs: +# Required Inputs: # DIR, (--dir=) # NIGHTSCOUT_HOST, (--ns-host=) # Optional Inputs: -# END_DATE, (--end-date=) +# END_DATE, (--end-date=) # if no end date supplied, assume we want a months worth or until day before current day # NUMBER_OF_RUNS (--runs=) # if no number of runs designated, then default to 5 @@ -25,29 +25,22 @@ import datetime import os, errno import logging +import pytz from subprocess import call import shutil +import six -DIR = '' -NIGHTSCOUT_HOST = '' -START_DATE = datetime.datetime.today() - datetime.timedelta(days=1) -END_DATE = datetime.datetime.today() -NUMBER_OF_RUNS = 1 -EXPORT_EXCEL = None -TERMINAL_LOGGING = True -RECOMMENDS_REPORT = True - def get_input_arguments(): parser = argparse.ArgumentParser(description='Autotune') - + # Required # NOTE: As the code runs right now, this directory needs to exist and as well as the subfolders: autotune, settings parser.add_argument('--dir', '-d', type=str, required=True, - help='(--dir=)') + help='(--dir=)') parser.add_argument('--ns-host', '-n', type=str, @@ -73,56 +66,46 @@ def get_input_arguments(): '-x', type=str, metavar='EXPORT_EXCEL', - help='(--xlsx=)') + help='(--xlsx=)') parser.add_argument('--log', '-l', - type=str, + type=bool, + default=True, metavar='TERMINAL_LOGGING', help='(--log )') - + return parser.parse_args() def assign_args_to_variables(args): # TODO: Input checking. - - global DIR, NIGHTSCOUT_HOST, START_DATE, END_DATE, NUMBER_OF_RUNS, \ - EXPORT_EXCEL, TERMINAL_LOGGING, RECOMMENDS_REPORT - + # On Unix and Windows, return the argument with an initial component of # ~ or ~user replaced by that user's home directory. - DIR = os.path.expanduser(args.dir) - - NIGHTSCOUT_HOST = args.ns_host - - START_DATE = args.start_date - - if args.end_date is not None: - END_DATE = args.end_date - - if args.runs is not None: - NUMBER_OF_RUNS = args.runs - - if args.xlsx is not None: - EXPORT_EXCEL = args.xlsx - - if args.log is not None: - RECOMMENDS_REPORT = args.logs - -def get_nightscout_profile(nightscout_host): + directory = os.path.expanduser(args.dir) + nightscout_host = args.ns_host + start_date = args.start_date + end_date = args.end_date or datetime.datetime.today() + number_of_runs = args.runs or 1 + export_excel = args.xlsx + recommends_report = args.log + + return directory, nightscout_host, start_date, end_date, number_of_runs, export_excel, recommends_report + +def get_nightscout_profile(nightscout_host, directory): #TODO: Add ability to use API secret for Nightscout. res = requests.get(nightscout_host + '/api/v1/profile.json') - with open(os.path.join(autotune_directory, 'nightscout.profile.json'), 'w') as f: # noqa: F821 - f.write(res.text) + with open(os.path.join(directory, 'autotune', 'nightscout.profile.json'), 'w') as f: # noqa: F821 + f.write(six.ensure_str(res.text, encoding='utf-8')) def get_openaps_profile(directory): shutil.copy(os.path.join(directory, 'settings', 'pumpprofile.json'), os.path.join(directory, 'autotune', 'profile.pump.json')) - + # If a previous valid settings/autotune.json exists, use that; otherwise start from settings/profile.json - + # This allows manual users to be able to run autotune by simply creating a settings/pumpprofile.json file. # cp -up settings/pumpprofile.json settings/profile.json shutil.copy(os.path.join(directory, 'settings', 'pumpprofile.json'), os.path.join(directory, 'settings', 'profile.json')) - + # TODO: Get this to work. For now, just copy from settings/profile.json each time. # If a previous valid settings/autotune.json exists, use that; otherwise start from settings/profile.json # cp settings/autotune.json autotune/profile.json && cat autotune/profile.json | json | grep -q start || cp autotune/profile.pump.json autotune/profile.json @@ -130,26 +113,34 @@ def get_openaps_profile(directory): # print create_autotune_json # call(create_autotune_json, shell=True) - # cp settings/autotune.json autotune/profile.json + # cp settings/profile.json settings/autotune.json shutil.copy(os.path.join(directory, 'settings', 'profile.json'), os.path.join(directory, 'settings', 'autotune.json')) - + # cp settings/autotune.json autotune/profile.json shutil.copy(os.path.join(directory, 'settings', 'autotune.json'), os.path.join(directory, 'autotune', 'profile.json')) - + + # cp settings/autotune.json autotune/pumpprofile.json + shutil.copy(os.path.join(directory, 'settings', 'autotune.json'), os.path.join(directory, 'autotune', 'pumpprofile.json')) + #TODO: Do the correct copying here. # cat autotune/profile.json | json | grep -q start || cp autotune/profile.pump.json autotune/profile.json']) def get_nightscout_carb_and_insulin_treatments(nightscout_host, start_date, end_date, directory): logging.info('Grabbing NIGHTSCOUT treatments.json for date range: {0} to {1}'.format(start_date, end_date)) - # TODO: What does 'T20:00-05:00' mean? output_file_name = os.path.join(directory, 'autotune', 'ns-treatments.json') - start_date = start_date.strftime("%Y-%m-%d") + 'T20:00-05:00' - end_date = end_date.strftime("%Y-%m-%d") + 'T20:00-05:00' + + def _normalize_datetime(dt): + dt = dt.replace(hour=20, minute=0, second=0, microsecond=0, tzinfo=None) + dt = pytz.timezone('US/Eastern').localize(dt) + return dt + + start_date = _normalize_datetime(start_date) + end_date = _normalize_datetime(end_date) url='{0}/api/v1/treatments.json?find\[created_at\]\[\$gte\]=`date --date="{1} -4 hours" -Iminutes`&find\[created_at\]\[\$lte\]=`date --date="{2} +1 days" -Iminutes`'.format(nightscout_host, start_date, end_date) #TODO: Add ability to use API secret for Nightscout. res = requests.get(url) with open(output_file_name, 'w') as f: - f.write(res.text.encode('utf-8')) + f.write(six.ensure_str(res.text, 'utf-8')) def get_nightscout_bg_entries(nightscout_host, start_date, end_date, directory): logging.info('Grabbing NIGHTSCOUT enries/sgv.json for date range: {0} to {1}'.format(start_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))) @@ -161,50 +152,50 @@ def get_nightscout_bg_entries(nightscout_host, start_date, end_date, directory): #TODO: Add ability to use API secret for Nightscout. res = requests.get(url) with open(os.path.join(directory, 'autotune', 'ns-entries.{date}.json'.format(date=date.strftime("%Y-%m-%d"))), 'w') as f: - f.write(res.text.encode('utf-8')) + f.write(six.ensure_str(res.text, 'utf-8')) def run_autotune(start_date, end_date, number_of_runs, directory): date_list = [start_date + datetime.timedelta(days=x) for x in range(0, (end_date - start_date).days)] autotune_directory = os.path.join(directory, 'autotune') + FNULL = open(os.devnull, 'w') for run_number in range(1, number_of_runs + 1): for date in date_list: # cp profile.json profile.$run_number.$i.json shutil.copy(os.path.join(autotune_directory, 'profile.json'), os.path.join(autotune_directory, 'profile.{run_number}.{date}.json' .format(run_number=run_number, date=date.strftime("%Y-%m-%d")))) - - # Autotune Prep (required args, ), output prepped glucose + + # Autotune Prep (required args, ), output prepped glucose # data or below # oref0-autotune-prep ns-treatments.json profile.json ns-entries.$DATE.json > autotune.$RUN_NUMBER.$DATE.json ns_treatments = os.path.join(autotune_directory, 'ns-treatments.json') profile = os.path.join(autotune_directory, 'profile.json') + pump_profile = os.path.join(autotune_directory, "pumpprofile.json") ns_entries = os.path.join(autotune_directory, 'ns-entries.{date}.json'.format(date=date.strftime("%Y-%m-%d"))) - autotune_prep = 'oref0-autotune-prep {ns_treatments} {profile} {ns_entries}'.format(ns_treatments=ns_treatments, profile=profile, ns_entries=ns_entries) - - # autotune.$RUN_NUMBER.$DATE.json + + # autotune.$RUN_NUMBER.$DATE.json autotune_run_filename = os.path.join(autotune_directory, 'autotune.{run_number}.{date}.json' .format(run_number=run_number, date=date.strftime("%Y-%m-%d"))) - with open(autotune_run_filename, "w+") as output: - logging.info('Running {script}'.format(script=autotune_prep)) - call(autotune_prep, stdout=output, shell=True) - logging.info('Writing output to {filename}'.format(filename=autotune_run_filename)) - - # Autotune (required args, ), + autotune_prep = 'oref0-autotune-prep {ns_treatments} {profile} {ns_entries} {pump_profile} --output-file {autotune_run_filename}'.format(ns_treatments=ns_treatments, profile=profile, ns_entries=ns_entries, pump_profile=pump_profile, autotune_run_filename=autotune_run_filename) + logging.info('Running {script}'.format(script=autotune_prep)) + call(autotune_prep, stdout=FNULL, shell=True) + logging.info('Writing output to {filename}'.format(filename=autotune_run_filename)) + + # Autotune (required args, ), # output autotuned profile or what will be used as in the next iteration # oref0-autotune-core autotune.$RUN_NUMBER.$DATE.json profile.json profile.pump.json > newprofile.$RUN_NUMBER.$DATE.json - + # oref0-autotune-core autotune.$run_number.$i.json profile.json profile.pump.json > newprofile.$RUN_NUMBER.$DATE.json profile_pump = os.path.join(autotune_directory, 'profile.pump.json') - autotune_core = 'oref0-autotune-core {autotune_run} {profile} {profile_pump}'.format(profile=profile, profile_pump = profile_pump, autotune_run=autotune_run_filename) - + # newprofile.$RUN_NUMBER.$DATE.json newprofile_run_filename = os.path.join(autotune_directory, 'newprofile.{run_number}.{date}.json' .format(run_number=run_number, date=date.strftime("%Y-%m-%d"))) - with open(newprofile_run_filename, "w+") as output: - logging.info('Running {script}'.format(script=autotune_core)) - call(autotune_core, stdout=output, shell=True) - logging.info('Writing output to {filename}'.format(filename=autotune_run_filename)) - + autotune_core = 'oref0-autotune-core {autotune_run} {profile} {profile_pump} --output-file {newprofile_run_filename}'.format(profile=profile, profile_pump = profile_pump, autotune_run=autotune_run_filename, newprofile_run_filename=newprofile_run_filename) + logging.info('Running {script}'.format(script=autotune_core)) + call(autotune_core, stdout=FNULL, shell=True) + logging.info('Writing output to {filename}'.format(filename=newprofile_run_filename)) + # Copy tuned profile produced by autotune to profile.json for use with next day of data # cp newprofile.$RUN_NUMBER.$DATE.json profile.json shutil.copy(os.path.join(autotune_directory, 'newprofile.{run_number}.{date}.json'.format(run_number=run_number, date=date.strftime("%Y-%m-%d"))), @@ -218,13 +209,13 @@ def create_summary_report_and_display_results(output_directory): print() print("Autotune pump profile recommendations:") print("---------------------------------------------------------") - + report_file = os.path.join(output_directory, 'autotune', 'autotune_recommendations.log') autotune_recommends_report = 'oref0-autotune-recommends-report {0}'.format(output_directory) - + call(autotune_recommends_report, shell=True) print("Recommendations Log File: {0}".format(report_file)) - + # Go ahead and echo autotune_recommendations.log to the terminal, minus blank lines # cat $report_file | egrep -v "\| *\| *$" call(['cat {0} | egrep -v "\| *\| *$"'.format(report_file)], shell=True) @@ -234,20 +225,20 @@ def create_summary_report_and_display_results(output_directory): logging.basicConfig(level=logging.DEBUG) # Supress non-essential logs (below WARNING) from requests module. logging.getLogger("requests").setLevel(logging.WARNING) - + args = get_input_arguments() - assign_args_to_variables(args) - + directory, nightscout_host, start_date, end_date, number_of_runs, export_excel, recommends_report = assign_args_to_variables(args) + # TODO: Convert Nightscout profile to OpenAPS profile format. - #get_nightscout_profile(NIGHTSCOUT_HOST) - - get_openaps_profile(DIR) - get_nightscout_carb_and_insulin_treatments(NIGHTSCOUT_HOST, START_DATE, END_DATE, DIR) - get_nightscout_bg_entries(NIGHTSCOUT_HOST, START_DATE, END_DATE, DIR) - run_autotune(START_DATE, END_DATE, NUMBER_OF_RUNS, DIR) - - if EXPORT_EXCEL: - export_to_excel(DIR, EXPORT_EXCEL) - - if RECOMMENDS_REPORT: - create_summary_report_and_display_results(DIR) + #get_nightscout_profile(NIGHTSCOUT_HOST, DIR) + + get_openaps_profile(directory) + get_nightscout_carb_and_insulin_treatments(nightscout_host, start_date, end_date, directory) + get_nightscout_bg_entries(nightscout_host, start_date, end_date, directory) + run_autotune(start_date, end_date, number_of_runs, directory) + + if export_excel: + export_to_excel(directory, export_excel) + + if recommends_report: + create_summary_report_and_display_results(directory) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..b51dedf39 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +requests==2.25.1 +six==1.15.0 +pytz==2021.1 From da70837952034a9f23794eda7751701434d34508 Mon Sep 17 00:00:00 2001 From: Robert Date: Sat, 18 Dec 2021 20:08:48 +0100 Subject: [PATCH 62/66] fix #1405 - rT.reason extended (#1415) --- lib/determine-basal/determine-basal.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index 79ec61208..fcda1352b 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -86,8 +86,8 @@ function enable_smb( console.error("SMB enabled for COB of",meal_data.mealCOB); } return true; - } - + } + // enable SMB/UAM (if enabled in preferences) for a full 6 hours after any carb entry // (6 hours is defined in carbWindow in lib/meal/total.js) if (profile.enableSMB_after_carbs === true && meal_data.carbs ) { @@ -98,7 +98,7 @@ function enable_smb( } return true; } - + // enable SMB/UAM (if enabled in preferences) if a low temptarget is set if (profile.enableSMB_with_temptarget === true && (profile.temptargetSet && target_bg < 100)) { if (meal_data.bwFound) { @@ -120,7 +120,7 @@ function enable_smb( } return true; } - + console.error("SMB disabled (no enableSMB preferences active or no condition satisfied)"); return false; } @@ -803,12 +803,12 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ rT.COB=meal_data.mealCOB; rT.IOB=iob_data.iob; - rT.BGI=bgi; + rT.BGI=convert_bg(bgi,profile); rT.deviation=convert_bg(deviation, profile); rT.ISF=convert_bg(sens, profile); rT.CR=round(profile.carb_ratio, 2); rT.target_bg=convert_bg(target_bg, profile); - rT.reason="minPredBG " + convert_bg(minPredBG, profile) + ", minGuardBG " + convert_bg(minGuardBG, profile) + ", IOBpredBG " + convert_bg(lastIOBpredBG, profile); + rT.reason="COB: " + rT.COB + ", Dev: " + rT.deviation + ", BGI: " + rT.BGI+ ", ISF: " + rT.ISF + ", CR: " + rT.CR + ", minPredBG " + convert_bg(minPredBG, profile) + ", minGuardBG " + convert_bg(minGuardBG, profile) + ", IOBpredBG " + convert_bg(lastIOBpredBG, profile); if (lastCOBpredBG > 0) { rT.reason += ", COBpredBG " + convert_bg(lastCOBpredBG, profile); } From 912ab078fa2d1034d926e31e8e6f8dc3f3647b34 Mon Sep 17 00:00:00 2001 From: tzachi-dar Date: Mon, 20 Dec 2021 23:30:50 +0200 Subject: [PATCH 63/66] Tzachi shared node phase2 (#1370) * move oref0-calculate-iob to shared node. Signed-off-by: Tzachi Dar * move oref0-meal to shared node. Signed-off-by: Tzachi Dar * Add more files to strict mode. Signed-off-by: Tzachi Dar * move oref0-get-profile.js to shared node. Signed-off-by: Tzachi Dar * start using oref0-get-profile Signed-off-by: Tzachi Dar * Fix issues that have been found when running autosens.js in strict mode. Signed-off-by: Tzachi Dar * move oref0-get-ns-entries to shared node. Signed-off-by: Tzachi Dar * Fix creation of test data for oref0-get-profile. Signed-off-by: Tzachi Dar * Add printing to oref0-get-ns-entries and it's successors. Signed-off-by: Tzachi Dar * Add printing to oref0-meal (only very high level) Signed-off-by: Tzachi Dar * Fix some minor errors found in testing. Signed-off-by: Tzachi Dar * comment out echo dir_name from ns-loop.log * comment out echo dir_name in pump-loop.log Co-authored-by: Scott Leibrand --- bin/oref0-autosens-history.js | 13 ++-- bin/oref0-calculate-iob.js | 31 +++++++--- bin/oref0-detect-sensitivity.js | 14 ++--- bin/oref0-get-ns-entries.js | 67 ++++++++++++++------ bin/oref0-get-profile.js | 105 ++++++++++++++++++++------------ bin/oref0-meal.js | 72 ++++++++++++++-------- bin/oref0-normalize-temps.js | 1 + bin/oref0-ns-loop.sh | 23 +++++-- bin/oref0-pump-loop.sh | 29 +++++++-- bin/oref0-shared-node-utils.js | 50 +++++++++++++++ bin/oref0-shared-node.js | 89 ++++++++++++++++++++++++--- lib/autotune-prep/categorize.js | 6 +- lib/bolus.js | 1 + lib/determine-basal/autosens.js | 11 ++-- lib/determine-basal/cob.js | 10 ++- lib/iob/calculate.js | 2 + lib/iob/history.js | 2 + lib/iob/index.js | 3 +- lib/iob/total.js | 2 + lib/meal/history.js | 2 + lib/meal/index.js | 1 + lib/meal/total.js | 14 +++-- lib/medtronic-clock.js | 1 + lib/percentile.js | 1 + lib/profile/basal.js | 2 + lib/profile/carbs.js | 9 ++- lib/profile/index.js | 40 ++++++------ lib/profile/isf.js | 11 ++-- lib/profile/targets.js | 19 +++--- lib/pump.js | 1 + lib/temps.js | 1 + 31 files changed, 463 insertions(+), 170 deletions(-) create mode 100644 bin/oref0-shared-node-utils.js diff --git a/bin/oref0-autosens-history.js b/bin/oref0-autosens-history.js index a2df748e7..94d3d0c5c 100755 --- a/bin/oref0-autosens-history.js +++ b/bin/oref0-autosens-history.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict'; /* Determine Basal @@ -16,10 +17,10 @@ */ var basal = require('../lib/profile/basal'); -var detect = require('../lib/determine-basal/autosens'); +var detectSensitivity = require('../lib/determine-basal/autosens'); if (!module.parent) { - var detectsensitivity = init(); + //var detectsensitivity = init(); // I don't see where this variable is used, so deleted it. var argv = require('yargs') .usage("$0 [outputfile.json]") @@ -135,16 +136,16 @@ if (!module.parent) { var ratioArray = []; do { detection_inputs.deviations = 96; - detect(detection_inputs); + var result = detectSensitivity(detection_inputs); for(i=0; i [autosens.json] [pumphistory-24h-zoned.json]'); } -if (!module.parent) { - var argv = require('yargs') + + +var oref0_calculate_iob = function oref0_calculate_iob(argv_params) { + var argv = require('yargs')(argv_params) .usage("$0 [] []") .strict(true) .help('help'); @@ -46,21 +49,21 @@ if (!module.parent) { var pumphistory_24_input = inputs[4]; var cwd = process.cwd(); - var pumphistory_data = require(cwd + '/' + pumphistory_input); - var profile_data = require(cwd + '/' + profile_input); - var clock_data = require(cwd + '/' + clock_input); + var pumphistory_data = JSON.parse(fs.readFileSync(cwd + '/' + pumphistory_input)); + var profile_data = JSON.parse(fs.readFileSync(cwd + '/' + profile_input)); + var clock_data = JSON.parse(fs.readFileSync(cwd + '/' + clock_input)); var autosens_data = null; if (autosens_input) { try { - autosens_data = require(cwd + '/' + autosens_input); + autosens_data = JSON.parse(fs.readFileSync(cwd + '/' + autosens_input)); } catch (e) {} //console.error(autosens_input, JSON.stringify(autosens_data)); } var pumphistory_24_data = null; if (pumphistory_24_input) { try { - pumphistory_24_data = require(cwd + '/' + pumphistory_24_input); + pumphistory_24_data = JSON.parse(fs.readFileSync(cwd + '/' + pumphistory_24_input)); } catch (e) {} } @@ -77,6 +80,16 @@ if (!module.parent) { } var iob = generate(inputs); - console.log(JSON.stringify(iob)); + return(JSON.stringify(iob)); +} + +if (!module.parent) { + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + var result = oref0_calculate_iob(command) + console.log(result); } +exports = module.exports = oref0_calculate_iob \ No newline at end of file diff --git a/bin/oref0-detect-sensitivity.js b/bin/oref0-detect-sensitivity.js index f2a171ce4..d2ddfe710 100755 --- a/bin/oref0-detect-sensitivity.js +++ b/bin/oref0-detect-sensitivity.js @@ -14,7 +14,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ -var detect = require('../lib/determine-basal/autosens'); +var detectSensitivity = require('../lib/determine-basal/autosens'); if (!module.parent) { var argv = require('yargs') @@ -112,14 +112,14 @@ if (!module.parent) { }; console.error("Calculating sensitivity using 8h of non-exluded data"); detection_inputs.deviations = 96; - detect(detection_inputs); - var ratio8h = ratio; - var newisf8h = newisf; + var result = detectSensitivity(detection_inputs); + var ratio8h = result.ratio; + var newisf8h = result.newisf; console.error("Calculating sensitivity using all non-exluded data (up to 24h)"); detection_inputs.deviations = 288; - detect(detection_inputs); - var ratio24h = ratio; - var newisf24h = newisf; + result = detectSensitivity(detection_inputs); + var ratio24h = result.ratio; + var newisf24h = result.newisf; if ( ratio8h < ratio24h ) { console.error("Using 8h autosens ratio of",ratio8h,"(ISF",newisf8h+")"); } else { diff --git a/bin/oref0-get-ns-entries.js b/bin/oref0-get-ns-entries.js index b0dcd36d9..6e855fec8 100755 --- a/bin/oref0-get-ns-entries.js +++ b/bin/oref0-get-ns-entries.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict'; /* oref0 Nightscout treatment fetch tool @@ -25,13 +26,16 @@ var request = require('request'); var _ = require('lodash'); var fs = require('fs'); var network = require('network'); +var shared_node = require('./oref0-shared-node-utils'); +var console_error = shared_node.console_error; +var console_log = shared_node.console_log; +var initFinalResults = shared_node.initFinalResults; -var safe_errors = ['ECONNREFUSED', 'ESOCKETTIMEDOUT', 'ETIMEDOUT']; -var log_errors = true; +var oref0_get_ns_engtires = function oref0_get_ns_engtires(argv_params, print_callback, final_result) { + var safe_errors = ['ECONNREFUSED', 'ESOCKETTIMEDOUT', 'ETIMEDOUT']; + var log_errors = true; -if (!module.parent) { - - var argv = require('yargs') + var argv = require('yargs')(argv_params) .usage("$0 ns-glucose.json NSURL API-SECRET ") .strict(true) .help('help'); @@ -45,7 +49,7 @@ if (!module.parent) { if ([null, '--help', '-h', 'help'].indexOf(glucose_input) > 0) { usage(); - process.exit(0); + process.exit(0); //??????? } var nsurl = params._.slice(1, 2).pop(); @@ -88,21 +92,21 @@ if (!module.parent) { , headers: headers }; - if (log_errors) console.error('Connected to ' + ip +', testing for xDrip API availability'); + if (log_errors) console_error(final_result, 'Connecting to ' + ip +', testing for xDrip API availability'); request(options, function(error, res, data) { var failed = false; if (res && res.statusCode == 403) { - console.error("Load from xDrip failed: API_SECRET didn't match"); + console_error(final_result, "Load from xDrip failed: API_SECRET didn't match"); failed = true; } if (error) { if (safe_errors.includes(error.code)) { - if (log_errors) console.error('Load from local xDrip timed out, likely not connected to xDrip hotspot'); + if (log_errors) console_error(final_result, 'Load from local xDrip timed out, likely not connected to xDrip hotspot'); log_errors = false; } else { - if (log_errors) console.error("Load from xDrip failed", error); + if (log_errors) console_error(final_result, "Load from xDrip failed", error); log_errors = false; failed = true; } @@ -111,12 +115,18 @@ if (!module.parent) { } if (!failed && data) { - console.error("CGM results loaded from xDrip"); + console_error(final_result, "CGM results loaded from xDrip"); processAndOutput(data); return true; } - if (failed && callback) callback(); + if (failed && callback) { + // printing will happen in the callback + callback(); + } else { + print_callback(final_result); + } + }); return false; @@ -131,7 +141,7 @@ if (!module.parent) { fs.readFile(outputPath, 'utf8', function(err, fileContent) { if (err) { - console.error(err); + console_error(final_result, err); } else { try { glucosedata = JSON.parse(fileContent); @@ -147,10 +157,11 @@ if (!module.parent) { glucosedata = null; } } catch (e) { - console.error(e); + console_error(final_result, e); } } loadFromNightscoutWithDate(lastDate, glucosedata); + // callback will happen in loadFromNightscoutWithDate }); } @@ -181,18 +192,19 @@ if (!module.parent) { if (res && (res.statusCode == 200 || res.statusCode == 304)) { if (data) { - console.error("Got CGM results from Nightscout"); + console_error(final_result, "Got CGM results from Nightscout"); processAndOutput(data); } else { - console.error("Got Not Changed response from Nightscout, assuming no new data is available"); + console_error(final_result, "Got Not Changed response from Nightscout, assuming no new data is available"); // output old file if (!_.isNil(glucosedata)) { - console.log(JSON.stringify(glucosedata)); + console_log(final_result, JSON.stringify(glucosedata)); } } } else { - console.error("Loading CGM data from Nightscout failed", error); + console_error(final_result, "Loading CGM data from Nightscout failed", error); } + print_callback(final_result); }); } @@ -203,11 +215,28 @@ if (!module.parent) { sgvrecord.glucose = sgvrecord.sgv; }); - console.log(JSON.stringify(glucosedata)); + console_log(final_result, JSON.stringify(glucosedata)); } network.get_gateway_ip(function(err, ip) { loadFromxDrip(nsCallback, ip); }); +} +function print_callback(final_result) { + console.log(final_result.stdout); + console.error(final_result.err); } + + +if (!module.parent) { + var final_result = initFinalResults(); + + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + var result = oref0_get_ns_engtires(command, print_callback, final_result) +} + +exports = module.exports = oref0_get_ns_engtires diff --git a/bin/oref0-get-profile.js b/bin/oref0-get-profile.js index 96a247fda..0f08ff1f9 100755 --- a/bin/oref0-get-profile.js +++ b/bin/oref0-get-profile.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict'; /* Get Basal Information @@ -16,15 +17,21 @@ */ +var fs = require('fs'); var generate = require('../lib/profile/'); +var shared_node_utils = require('./oref0-shared-node-utils'); +var console_error = shared_node_utils.console_error; +var console_log = shared_node_utils.console_log; +var process_exit = shared_node_utils.process_exit; +var initFinalResults = shared_node_utils.initFinalResults; -function exportDefaults () { - var defaults = generate.displayedDefaults(); - console.log(JSON.stringify(defaults, null, '\t')); +function exportDefaults (final_result) { + var defaults = generate.displayedDefaults(final_result); + console_log(final_result, JSON.stringify(defaults, null, '\t')); } -function updatePreferences (prefs) { - var defaults = generate.displayedDefaults(); +function updatePreferences (final_result, prefs) { + var defaults = generate.displayedDefaults(final_result); // check for any displayedDefaults missing from current prefs and add from defaults @@ -34,12 +41,11 @@ function updatePreferences (prefs) { } } - console.log(JSON.stringify(prefs, null, '\t')); + console_log(final_result, JSON.stringify(prefs, null, '\t')); } -if (!module.parent) { - - var argv = require('yargs') +var oref0_get_profile = function oref0_get_profile(final_result, argv_params) { + var argv = require('yargs')(argv_params) .usage("$0 [] [] [] [--model ] [--autotune ] [--exportDefaults] [--updatePreferences ]") .option('model', { alias: 'm', @@ -71,22 +77,23 @@ if (!module.parent) { if (!params.exportDefaults && !params.updatePreferences) { if (params._.length < 4 || params._.length > 7) { argv.showHelp(); - process.exit(1); + process_exit(final_result, 1); + return; } } var pumpsettings_input = params._[0]; if (params.exportDefaults) { - exportDefaults(); - process.exit(0); + exportDefaults(final_result); + return; } if (params.updatePreferences) { var preferences = {}; var cwd = process.cwd() - preferences = require(cwd + '/' + params.updatePreferences); - updatePreferences(preferences); - process.exit(0); + preferences = JSON.parse(fs.readFileSync(cwd + '/' + params.updatePreferences)); + updatePreferences(final_result, preferences); + return; } var bgtargets_input = params._[1] @@ -99,8 +106,8 @@ if (!module.parent) { var autotune_input = params.autotune; cwd = process.cwd() - var pumpsettings_data = require(cwd + '/' + pumpsettings_input); - var bgtargets_data = require(cwd + '/' + bgtargets_input); + var pumpsettings_data = JSON.parse(fs.readFileSync(cwd + '/' + pumpsettings_input)); + var bgtargets_data = JSON.parse(fs.readFileSync(cwd + '/' + bgtargets_input)); if (bgtargets_data.units !== 'mg/dL') { if (bgtargets_data.units === 'mmol/L') { for (var i = 0, len = bgtargets_data.targets.length; i < len; i++) { @@ -109,13 +116,14 @@ if (!module.parent) { } bgtargets_data.units = 'mg/dL'; } else { - console.log('BG Target data is expected to be expressed in mg/dL or mmol/L.' + console_log(final_result, 'BG Target data is expected to be expressed in mg/dL or mmol/L.' , 'Found', bgtargets_data.units, 'in', bgtargets_input, '.'); - process.exit(2); + process_exit(final_result, 2); + return; } } - var isf_data = require(cwd + '/' + isf_input); + var isf_data = JSON.parse(fs.readFileSync(cwd + '/' + isf_input)); if (isf_data.units !== 'mg/dL') { if (isf_data.units === 'mmol/L') { for (i = 0, len = isf_data.sensitivities.length; i < len; i++) { @@ -123,18 +131,18 @@ if (!module.parent) { } isf_data.units = 'mg/dL'; } else { - console.log('ISF is expected to be expressed in mg/dL or mmol/L.' + console_log(final_result, 'ISF is expected to be expressed in mg/dL or mmol/L.' , 'Found', isf_data.units, 'in', isf_input, '.'); - process.exit(2); + process_exit(final_result, 2); + return; } } - var basalprofile_data = require(cwd + '/' + basalprofile_input); + var basalprofile_data = JSON.parse(fs.readFileSync(cwd + '/' + basalprofile_input)); preferences = {}; if (typeof preferences_input !== 'undefined') { - preferences = require(cwd + '/' + preferences_input); + preferences = JSON.parse(fs.readFileSync(cwd + '/' + preferences_input)); } - var fs = require('fs'); var model_data = { } if (params.model) { @@ -143,9 +151,10 @@ if (!module.parent) { model_data = model_string.replace(/"/gi, ''); } catch (e) { var msg = { error: e, msg: "Could not parse model_data", file: model_input}; - console.error(msg.msg); - console.log(JSON.stringify(msg)); - process.exit(1); + console_error(final_result, msg.msg); + console_log(final_result, JSON.stringify(msg)); + process_exit(final_result, 1); + return; } } var autotune_data = { } @@ -155,7 +164,7 @@ if (!module.parent) { } catch (e) { msg = { error: e, msg: "Could not parse autotune_data", file: autotune_input}; - console.error(msg.msg); + console_error(final_result, msg.msg); // Continue and output a non-autotuned profile if we don't have autotune_data //console.log(JSON.stringify(msg)); //process.exit(1); @@ -170,9 +179,10 @@ if (!module.parent) { } catch (e) { msg = { error: e, msg: "Could not parse carbratio_data. Feature Meal Assist enabled but cannot find required carb_ratios.", file: carbratio_input }; - console.error(msg.msg); - console.log(JSON.stringify(msg)); - process.exit(1); + console_error(final_result, msg.msg); + console.log(final_result, JSON.stringify(msg)); + process_exit(final_result, 1); + return; } var errors = [ ]; @@ -186,10 +196,12 @@ if (!module.parent) { if (errors.length) { errors.forEach(function (msg) { - console.error(msg.msg); + console_error(final_result, msg.msg); }); - console.log(JSON.stringify(errors)); - process.exit(1); + console_log(final_result, JSON.stringify(errors)); + process_exit(final_result, 1); + + return; } } var temptargets_data = { }; @@ -197,7 +209,7 @@ if (!module.parent) { try { temptargets_data = JSON.parse(fs.readFileSync(temptargets_input, 'utf8')); } catch (e) { - console.error("Could not parse temptargets_data."); + console_error(final_result, "Could not parse temptargets_data."); } } @@ -229,8 +241,25 @@ if (!module.parent) { if (autotune_data.isfProfile) { inputs.isf = autotune_data.isfProfile; } if (autotune_data.carb_ratio) { inputs.carbratio.schedule[0].ratio = autotune_data.carb_ratio; } } - var profile = generate(inputs); + var profile = generate(final_result, inputs); + + console_log(final_result, JSON.stringify(profile)); + +} - console.log(JSON.stringify(profile)); +if (!module.parent) { + var final_result = initFinalResults(); + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + oref0_get_profile(final_result, command) + console.log(final_result.stdout); + if(final_result.err.length > 0) { + console.error(final_result.err); + } + process.exit(final_result.return_val); } + +exports = module.exports = oref0_get_profile; diff --git a/bin/oref0-meal.js b/bin/oref0-meal.js index 572a18baa..50ad3d1d0 100755 --- a/bin/oref0-meal.js +++ b/bin/oref0-meal.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict'; /* oref0 meal data tool @@ -20,9 +21,14 @@ */ var generate = require('../lib/meal'); - -if (!module.parent) { - var argv = require('yargs') +var shared_node_utils = require('./oref0-shared-node-utils'); +var console_error = shared_node_utils.console_error; +var console_log = shared_node_utils.console_log; +var process_exit = shared_node_utils.process_exit; +var initFinalResults = shared_node_utils.initFinalResults; + +var oref0_meal = function oref0_meal(final_result, argv_params) { + var argv = require('yargs')(argv_params) .usage('$0 []') // error and show help if some other args given .strict(true) @@ -40,8 +46,9 @@ if (!module.parent) { if (inputs.length < 5 || inputs.length > 6) { argv.showHelp(); - console.log('{ "carbs": 0, "reason": "Insufficient arguments" }'); - process.exit(1); + console_log(final_result, '{ "carbs": 0, "reason": "Insufficient arguments" }'); + process_exit(1); + return; } var fs = require('fs'); @@ -53,41 +60,41 @@ if (!module.parent) { try { pumphistory_data = JSON.parse(fs.readFileSync(pumphistory_input, 'utf8')); } catch (e) { - console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse pumphistory data" }'); - return console.error("Could not parse pumphistory data: ", e); + console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse pumphistory data" }'); //?? + return console_error(final_result, "Could not parse pumphistory data: ", e); } try { profile_data = JSON.parse(fs.readFileSync(profile_input, 'utf8')); } catch (e) { - console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse profile data" }'); - return console.error("Could not parse profile data: ", e); + console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse profile data" }'); + return console_error(final_result, "Could not parse profile data: ", e); } try { clock_data = JSON.parse(fs.readFileSync(clock_input, 'utf8')); } catch (e) { - console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse clock data" }'); - return console.error("Could not parse clock data: ", e); + console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse clock data" }'); + return console_error(final_result, "Could not parse clock data: ", e); } try { basalprofile_data = JSON.parse(fs.readFileSync(basalprofile_input, 'utf8')); } catch (e) { - console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse basalprofile data" }'); - return console.error("Could not parse basalprofile data: ", e); + console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse basalprofile data" }'); + return console_error(final_result, "Could not parse basalprofile data: ", e); } // disallow impossibly low carbRatios due to bad decoding if ( typeof(profile_data.carb_ratio) === 'undefined' || profile_data.carb_ratio < 3 ) { - console.log('{ "carbs": 0, "mealCOB": 0, "reason": "carb_ratio ' + profile_data.carb_ratio + ' out of bounds" }'); - return console.error("Error: carb_ratio " + profile_data.carb_ratio + " out of bounds"); + console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "carb_ratio ' + profile_data.carb_ratio + ' out of bounds" }'); + return console_error(final_result, "Error: carb_ratio " + profile_data.carb_ratio + " out of bounds"); } try { var glucose_data = JSON.parse(fs.readFileSync(glucose_input, 'utf8')); } catch (e) { - console.error("Warning: could not parse "+glucose_input); + console_error(final_result, "Warning: could not parse "+glucose_input); } var carb_data = { }; @@ -95,19 +102,19 @@ if (!module.parent) { try { carb_data = JSON.parse(fs.readFileSync(carb_input, 'utf8')); } catch (e) { - console.error("Warning: could not parse "+carb_input); + console_error(final_result, "Warning: could not parse "+carb_input); } } if (typeof basalprofile_data[0] === 'undefined') { - return console.error("Error: bad basalprofile_data:" + basalprofile_data); + return console_error(final_result, "Error: bad basalprofile_data:" + basalprofile_data); } if (typeof basalprofile_data[0].glucose !== 'undefined') { - console.error("Warning: Argument order has changed: please update your oref0-meal device and meal.json report to place carbhistory.json after basalprofile.json"); - var temp = carb_data; - carb_data = glucose_data; - glucose_data = basalprofile_data; - basalprofile_data = temp; + console_error(final_result, "Warning: Argument order has changed: please update your oref0-meal device and meal.json report to place carbhistory.json after basalprofile.json"); + var temp = carb_data; + carb_data = glucose_data; + glucose_data = basalprofile_data; + basalprofile_data = temp; } inputs = { @@ -122,11 +129,26 @@ if (!module.parent) { var recentCarbs = generate(inputs); if (glucose_data.length < 36) { - console.error("Not enough glucose data to calculate carb absorption; found:", glucose_data.length); + console_error(final_result, "Not enough glucose data to calculate carb absorption; found:", glucose_data.length); recentCarbs.mealCOB = 0; recentCarbs.reason = "not enough glucose data to calculate carb absorption"; } - console.log(JSON.stringify(recentCarbs)); + console_log(final_result, recentCarbs); +} + +if (!module.parent) { + var final_result = initFinalResults(); + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + oref0_meal(final_result, command); + console.log(final_result.stdout); + if(final_result.err.length > 0) { + console.error(final_result.err); + } + process.exit(final_result.return_val); } +exports = module.exports = oref0_meal \ No newline at end of file diff --git a/bin/oref0-normalize-temps.js b/bin/oref0-normalize-temps.js index 91fa49a52..2acdb6f70 100755 --- a/bin/oref0-normalize-temps.js +++ b/bin/oref0-normalize-temps.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict'; /* Released under MIT license. See the accompanying LICENSE.txt file for diff --git a/bin/oref0-ns-loop.sh b/bin/oref0-ns-loop.sh index a47bdfa5e..cf5b76c13 100755 --- a/bin/oref0-ns-loop.sh +++ b/bin/oref0-ns-loop.sh @@ -62,11 +62,11 @@ function get_ns_bg { || ! jq . cgm/ns-glucose-24h.json | grep -c glucose | jq -e '. > 36' >/dev/null; then #nightscout ns $NIGHTSCOUT_HOST $API_SECRET oref0_glucose_since -24hours > cgm/ns-glucose-24h.json cp cgm/ns-glucose-24h.json cgm/ns-glucose-24h-temp.json - oref0-get-ns-entries cgm/ns-glucose-24h-temp.json $NIGHTSCOUT_HOST $API_SECRET 24 2>&1 >cgm/ns-glucose-24h.json + run_remote_command "oref0-get-ns-entries cgm/ns-glucose-24h-temp.json $NIGHTSCOUT_HOST $API_SECRET 24" 2>&1 >cgm/ns-glucose-24h.json fi #nightscout ns $NIGHTSCOUT_HOST $API_SECRET oref0_glucose_since -1hour > cgm/ns-glucose-1h.json cp cgm/ns-glucose-1h.json cgm/ns-glucose-1h-temp.json - oref0-get-ns-entries cgm/ns-glucose-1h-temp.json $NIGHTSCOUT_HOST $API_SECRET 1 2>&1 >cgm/ns-glucose-1h.json + run_remote_command "oref0-get-ns-entries cgm/ns-glucose-1h-temp.json $NIGHTSCOUT_HOST $API_SECRET 1" 2>&1 >cgm/ns-glucose-1h.json jq -s '.[0] + .[1]|unique|sort_by(.date)|reverse' cgm/ns-glucose-24h.json cgm/ns-glucose-1h.json > cgm/ns-glucose.json glucose_fresh # update timestamp on cgm/ns-glucose.json @@ -116,7 +116,13 @@ function ns_temptargets { jq -s '.[0] + .[1]|unique|sort_by(.created_at)|reverse' settings/ns-temptargets.json settings/local-temptargets.json > settings/temptargets.json echo -n "Temptargets merged: " cat settings/temptargets.json | colorize_json '.[0] | { target: .targetBottom, duration: .duration, start: .created_at }' - oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json | jq . > settings/profile.json.new || die "Couldn't refresh profile" + + dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-ns + #echo dir_name = $dir_name + mkdir -p $dir_name + cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name + + run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json' | jq . > settings/profile.json.new || die "Couldn't refresh profile" if cat settings/profile.json.new | jq . | grep -q basal; then mv settings/profile.json.new settings/profile.json else @@ -124,12 +130,19 @@ function ns_temptargets { fi } -# openaps report invoke monitor/carbhistory.json; oref0-meal monitor/pumphistory-merged.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new; grep -q COB monitor/meal.json.new && mv monitor/meal.json.new monitor/meal.json; exit 0 +# openaps report invoke monitor/carbhistory.json; oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new; grep -q COB monitor/meal.json.new && mv monitor/meal.json.new monitor/meal.json; exit 0 function ns_meal_carbs { #openaps report invoke monitor/carbhistory.json >/dev/null nightscout ns $NIGHTSCOUT_HOST $API_SECRET carb_history > monitor/carbhistory.json.new cat monitor/carbhistory.json.new | jq .[0].carbs | egrep -q [0-9] && mv monitor/carbhistory.json.new monitor/carbhistory.json - oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new + + dir_name=~/test_data/oref0-meal$(date +"%Y-%m-%d-%H%M") + #echo dir_name = $dir_name + mkdir -p $dir_name + cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name + + + run_remote_command 'oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json' > monitor/meal.json.new #grep -q COB monitor/meal.json.new && mv monitor/meal.json.new monitor/meal.json check_cp_meal || return 1 echo -n "Refreshed carbhistory; COB: " diff --git a/bin/oref0-pump-loop.sh b/bin/oref0-pump-loop.sh index 8af151386..cd49d1fa9 100755 --- a/bin/oref0-pump-loop.sh +++ b/bin/oref0-pump-loop.sh @@ -597,7 +597,12 @@ function refresh_pumphistory_and_meal { try_return invoke_pumphistory_etc || return 1 try_return invoke_reservoir_etc || return 1 echo -n "meal.json " - if ! retry_return oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new ; then + + dir_name=~/test_data/oref0-meal$(date +"%Y-%m-%d-%H%M") + #echo dir_name = $dir_name + mkdir -p $dir_name + cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name + if ! retry_return run_remote_command 'oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json' > monitor/meal.json.new ; then echo; echo "Couldn't calculate COB" return 1 fi @@ -624,7 +629,12 @@ function check_cp_meal { } function calculate_iob { - oref0-calculate-iob monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json > monitor/iob.json.new || { echo; echo "Couldn't calculate IOB"; fail "$@"; } + dir_name=~/test_data/oref0-calculate-iob$(date +"%Y-%m-%d-%H%M") + #echo dir_name = $dir_name + mkdir -p $dir_name + cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json $dir_name + + run_remote_command 'oref0-calculate-iob monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json' > monitor/iob.json.new || { echo; echo "Couldn't calculate IOB"; fail "$@"; } [ -s monitor/iob.json.new ] && jq -e .[0].iob monitor/iob.json.new >&3 && cp monitor/iob.json.new monitor/iob.json || { echo; echo "Couldn't copy IOB"; fail "$@"; } } @@ -674,7 +684,13 @@ function get_settings { fi # generate settings/pumpprofile.json without autotune - oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json 2>&3 | jq . > settings/pumpprofile.json.new || { echo "Couldn't refresh pumpprofile"; fail "$@"; } + + dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-pump + #echo dir_name = $dir_name + mkdir -p $dir_name + cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json $dir_name + + run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json' 2>&3 | jq . > settings/pumpprofile.json.new || { echo "Couldn't refresh pumpprofile"; fail "$@"; } if [ -s settings/pumpprofile.json.new ] && jq -e .current_basal settings/pumpprofile.json.new >&4; then mv settings/pumpprofile.json.new settings/pumpprofile.json echo -n "Pump profile refreshed; " @@ -683,7 +699,12 @@ function get_settings { ls -lart settings/pumpprofile.json.new fi # generate settings/profile.json.new with autotune - oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json | jq . > settings/profile.json.new || { echo "Couldn't refresh profile"; fail "$@"; } + dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-pump-auto + #echo dir_name = $dir_name + mkdir -p $dir_name + cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name + + run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json' | jq . > settings/profile.json.new || { echo "Couldn't refresh profile"; fail "$@"; } if [ -s settings/profile.json.new ] && jq -e .current_basal settings/profile.json.new >&4; then mv settings/profile.json.new settings/profile.json echo -n "Settings refreshed; " diff --git a/bin/oref0-shared-node-utils.js b/bin/oref0-shared-node-utils.js new file mode 100644 index 000000000..e73846552 --- /dev/null +++ b/bin/oref0-shared-node-utils.js @@ -0,0 +1,50 @@ +'use strict'; + +function console_both(final_result, theArgs) { + if(final_result.length > 0) { + final_result += '\n'; + } + var len = theArgs.length; + for (var i = 0 ; i < len; i++) { + if (typeof theArgs[i] != 'object') { + final_result += theArgs[i]; + } else { + final_result += JSON.stringify(theArgs[i]); + } + if(i != len -1 ) { + final_result += ' '; + } + + } + return final_result; +} + +var console_error = function console_error(final_result, ...theArgs) { + final_result.err = console_both(final_result.err, theArgs); +} + +var console_log = function console_log(final_result, ...theArgs) { + final_result.stdout = console_both(final_result.stdout, theArgs); +} + +var process_exit = function process_exit(final_result, ret) { + final_result.return_val = ret; +} + +var initFinalResults = function initFinalResults() { + var final_result = { + stdout: '' + , err: '' + , return_val : 0 + }; + return final_result; +} + + + +module.exports = { + console_log : console_log, + console_error : console_error, + process_exit : process_exit, + initFinalResults : initFinalResults +} \ No newline at end of file diff --git a/bin/oref0-shared-node.js b/bin/oref0-shared-node.js index 24252ba6b..2110cc737 100644 --- a/bin/oref0-shared-node.js +++ b/bin/oref0-shared-node.js @@ -5,8 +5,16 @@ var os = require("os"); var ns_status = require("./ns-status"); var oref0_normalize_temps = require("./oref0-normalize-temps"); +var oref0_calculate_iob = require("./oref0-calculate-iob"); +var oref0_meal = require("./oref0-meal"); +var oref0_get_profile = require("./oref0-get-profile"); +var oref0_get_ns_entries = require("./oref0-get-ns-entries"); var fs = require('fs'); var requireUtils = require('../lib/require-utils'); +var shared_node_utils = require('./oref0-shared-node-utils'); +var console_error = shared_node_utils.console_error; +var console_log = shared_node_utils.console_log; +var initFinalResults = shared_node_utils.initFinalResults; function createRetVal(stdout, return_val) { var returnObj = { @@ -76,10 +84,11 @@ function serverListen() { command = command.map(s => s.trim()); - var result = 'unknown command'; - var return_val = 0; + var result = 'unknown command\n'; console.log('command = ', command); + var async_command = false; + var final_result = initFinalResults(); if (command[0] == 'ns-status') { // remove the first parameter. @@ -87,37 +96,99 @@ function serverListen() { try { result = ns_status(command); result = addNewlToResult(result); + final_result = createRetVal(result, 0); } catch (err) { - return_val = 1; + final_result.return_val = 1; console.log('exception when parsing ns_status ', err); + console_err(final_result, 'exception when parsing ns_status ', err); } } else if (command[0] == 'oref0-normalize-temps') { command.shift(); try { result = oref0_normalize_temps(command); result = addNewlToResult(result); + final_result = createRetVal(result, 0); } catch (err) { - return_val = 1; + final_result.return_val = 1; console.log('exception when parsing oref0-normalize-temps ', err); } + } else if (command[0] == 'oref0-calculate-iob') { + command.shift(); + try { + result = oref0_calculate_iob(command); + result = addNewlToResult(result); + final_result = createRetVal(result, 0); + } catch (err) { + final_result.return_val = 1; + console.log('exception when parsing oref0-calculate-iob ', err); + } + } else if (command[0] == 'oref0-meal') { + command.shift(); + try { + result = oref0_meal(final_result, command); + final_result.stdout = addNewlToResult(final_result.stdout); // put them both in a new function ???????????? + final_result.err = addNewlToResult(final_result.err); + } catch (err) { + final_result.return_val = 1; + console.log('exception when parsing oref0-meal ', err); + } + } else if (command[0] == 'oref0-get-profile') { + command.shift(); + try { + oref0_get_profile(final_result, command); + final_result.stdout = addNewlToResult(final_result.stdout); // put them both in a new function ???????????? + final_result.err = addNewlToResult(final_result.err); + } catch (err) { + final_result.return_val = 1; + console.log('exception when parsing oref0-get-profile ', err); + } + } else if (command[0] == 'oref0-get-ns-entries') { + async_command = true; + + var final_result = initFinalResults(); + function print_callback(final_result) { + try { + final_result.stdout = addNewlToResult(final_result.stdout); // put them both in a new function ???????????? + final_result.err = addNewlToResult(final_result.err); + s.write(JSON.stringify(final_result)); + s.end(); + } catch (err) { + // I assume here that error happens when handeling the socket, so not trying to close it + console.log('exception in print_callback ', err); + } + } + command.shift(); + try { + result = oref0_get_ns_entries(command, print_callback, final_result); + result = addNewlToResult(result); + } catch (err) { + final_result.return_val = 1; + console.log('exception when parsing oref0-get-profile ', err); + } } else if (command[0] == 'ping') { result = 'pong'; + final_result = createRetVal(result, 0); } else if (command[0] == 'json') { // remove the first parameter. command.shift(); try { + var return_val; [result, return_val] = jsonWrapper(command); result = addNewlToResult(result); + final_result = createRetVal(result, return_val); } catch (err) { - return_val = 1; + final_result.return_val = 1; console.log('exception when running json_wrarpper ', err); } } else { console.error('Unknown command = ', command); - return_val = 1; + console_error(final_result, 'Unknown command = ', command); + final_result.return_val = 1; + } + if(!async_command) { + s.write(JSON.stringify(final_result)); + s.end(); } - s.write(JSON.stringify(createRetVal(result, return_val))); - s.end(); }); }); } @@ -145,7 +216,7 @@ function addNewlToResult(result) { if (result === undefined) { // This preserves the oref0_normalize_temps behavior. result = "" - } else { + } else if (result.length != 0) { result += "\n"; } return result; diff --git a/lib/autotune-prep/categorize.js b/lib/autotune-prep/categorize.js index bd824e876..5ebb8fde5 100644 --- a/lib/autotune-prep/categorize.js +++ b/lib/autotune-prep/categorize.js @@ -1,3 +1,5 @@ +'use strict'; + var tz = require('moment-timezone'); var basal = require('../profile/basal'); var getIOB = require('../iob'); @@ -122,6 +124,7 @@ function categorizeBGDatums(opts) { var type=""; // main for loop var fullHistory = IOBInputs.history; + var lastIsfResult = null; for (i=bucketedData.length-5; i > 0; --i) { glucoseDatum = bucketedData[i]; //console.error(glucoseDatum); @@ -165,7 +168,8 @@ function categorizeBGDatums(opts) { glucoseDatum.avgDelta = avgDelta; //sens = ISF - var sens = ISF.isfLookup(IOBInputs.profile.isfProfile,BGDate); + var sens; + [sens, lastIsfResult] = ISF.isfLookup(IOBInputs.profile.isfProfile, BGDate, lastIsfResult); IOBInputs.clock=BGDate.toISOString(); // trim down IOBInputs.history to just the data for 6h prior to BGDate //console.error(IOBInputs.history[0].created_at); diff --git a/lib/bolus.js b/lib/bolus.js index c7dc0f61d..b46d3fc5c 100644 --- a/lib/bolus.js +++ b/lib/bolus.js @@ -1,3 +1,4 @@ +'use strict'; function reduce (treatments) { diff --git a/lib/determine-basal/autosens.js b/lib/determine-basal/autosens.js index db2c86834..6a8b89a41 100644 --- a/lib/determine-basal/autosens.js +++ b/lib/determine-basal/autosens.js @@ -1,3 +1,5 @@ +'use strict'; + var basal = require('../profile/basal'); var get_iob = require('../iob'); var find_insulin = require('../iob/history'); @@ -143,11 +145,12 @@ function detectSensitivity(inputs) { var mealCarbs = 0; var mealStartCounter = 999; var type=""; + var lastIsfResult = null; //console.error(bucketed_data); for (i=3; i < bucketed_data.length; ++i) { bgTime = new Date(bucketed_data[i].date); - - var sens = isf.isfLookup(profile.isfProfile,bgTime); + var sens; + [sens, lastIsfResult] = isf.isfLookup(profile.isfProfile, bgTime, lastIsfResult); //console.error(bgTime , bucketed_data[i].glucose); var bg; @@ -398,7 +401,7 @@ function detectSensitivity(inputs) { } else { console.error("Sensitivity normal."); } - ratio = 1 + (basalOff / profile.max_daily_basal); + var ratio = 1 + (basalOff / profile.max_daily_basal); //console.error(basalOff, profile.max_daily_basal, ratio); // don't adjust more than 1.2x by default (set in preferences.json) @@ -411,7 +414,7 @@ function detectSensitivity(inputs) { } ratio = Math.round(ratio*100)/100; - newisf = Math.round(profile.sens / ratio); + var newisf = Math.round(profile.sens / ratio); //console.error(profile, newisf, ratio); console.error("ISF adjusted from "+profile.sens+" to "+newisf); //console.error("Basal adjustment "+basalOff.toFixed(2)+"U/hr"); diff --git a/lib/determine-basal/cob.js b/lib/determine-basal/cob.js index 85f0522b9..903409ca6 100644 --- a/lib/determine-basal/cob.js +++ b/lib/determine-basal/cob.js @@ -1,3 +1,5 @@ +'use strict'; + var basal = require('../profile/basal'); var get_iob = require('../iob'); var find_insulin = require('../iob/history'); @@ -12,7 +14,9 @@ function detectCarbAbsorption(inputs) { }); var iob_inputs = inputs.iob_inputs; var basalprofile = inputs.basalprofile; - /* TODO why does declaring profile break tests-command-behavior.tests.sh? */ profile = inputs.iob_inputs.profile; + /* TODO why does declaring profile break tests-command-behavior.tests.sh? + because it is a global variable used in other places.*/ + var profile = inputs.iob_inputs.profile; var mealTime = new Date(inputs.mealTime); var ciTime = new Date(inputs.ciTime); @@ -114,10 +118,12 @@ function detectCarbAbsorption(inputs) { var minDeviation = 999; var allDeviations = []; //console.error(bucketed_data); + var lastIsfResult = null; for (i=0; i < bucketed_data.length-3; ++i) { bgTime = new Date(bucketed_data[i].date); - var sens = isf.isfLookup(profile.isfProfile,bgTime); + var sens; + [sens, lastIsfResult] = isf.isfLookup(profile.isfProfile, bgTime, lastIsfResult); //console.error(bgTime , bucketed_data[i].glucose, bucketed_data[i].date); var bg; diff --git a/lib/iob/calculate.js b/lib/iob/calculate.js index ba808692f..904e953f4 100644 --- a/lib/iob/calculate.js +++ b/lib/iob/calculate.js @@ -1,3 +1,5 @@ +'use strict'; + function iobCalc(treatment, time, curve, dia, peak, profile) { // iobCalc returns two variables: // activityContrib = units of treatment.insulin used in previous minute diff --git a/lib/iob/history.js b/lib/iob/history.js index 1a18e27df..5c7ffe67a 100644 --- a/lib/iob/history.js +++ b/lib/iob/history.js @@ -1,3 +1,4 @@ +'use strict'; var tz = require('moment-timezone'); var basalprofile = require('../profile/basal.js'); @@ -514,6 +515,7 @@ function calcTempTreatments (inputs, zeroTempDuration) { var currentItem = splitHistory[i]; if (currentItem.duration > 0) { + var target_bg; var currentRate = profile_data.current_basal; if (!_.isEmpty(profile_data.basalprofile)) { diff --git a/lib/iob/index.js b/lib/iob/index.js index 678bbca04..fd64e3473 100644 --- a/lib/iob/index.js +++ b/lib/iob/index.js @@ -1,3 +1,4 @@ +'use strict'; var tz = require('moment-timezone'); var find_insulin = require('./history'); @@ -65,7 +66,7 @@ function generate (inputs, currentIOBOnly, treatments) { iStop=4*60; } for (var i=0; i 150) { - console.error("Error: carbRatio of " + carbRatio + " out of bounds."); + console_error(final_result, "Error: carbRatio of " + carbRatio + " out of bounds."); return; } break; @@ -26,7 +29,7 @@ function carbRatioLookup (inputs, profile) { } return carbRatio.ratio; } else { - console.error("Error: Unsupported carb_ratio units " + carbratio_data.units); + console_error(final_result, "Error: Unsupported carb_ratio units " + carbratio_data.units); return; } //return carbRatio.ratio; diff --git a/lib/profile/index.js b/lib/profile/index.js index f4bdae9f7..8ede7f428 100644 --- a/lib/profile/index.js +++ b/lib/profile/index.js @@ -1,3 +1,4 @@ +'use strict'; var basal = require('./basal'); var targets = require('./targets'); @@ -5,6 +6,10 @@ var isf = require('./isf'); var carb_ratios = require('./carbs'); var _ = require('lodash'); +var shared_node_utils = require('../../bin/oref0-shared-node-utils'); +var console_error = shared_node_utils.console_error; +var console_log = shared_node_utils.console_log; + function defaults ( ) { return /* profile */ { max_iob: 0 // if max_iob is not provided, will default to zero @@ -73,7 +78,7 @@ function defaults ( ) { } } -function displayedDefaults () { +function displayedDefaults (final_result) { var allDefaults = defaults(); var profile = { }; @@ -94,11 +99,11 @@ function displayedDefaults () { profile.edison_battery_shutdown_voltage = allDefaults.edison_battery_shutdown_voltage; profile.pi_battery_shutdown_percent = allDefaults.pi_battery_shutdown_percent; - console.error(profile); + console_error(final_result, profile); return profile } -function generate (inputs, opts) { +function generate (final_result, inputs, opts) { var profile = opts && opts.type ? opts : defaults( ); // check if inputs has overrides for any of the default prefs @@ -113,8 +118,8 @@ function generate (inputs, opts) { if (inputs.settings.insulin_action_curve > 1) { profile.dia = pumpsettings_data.insulin_action_curve; } else { - console.error('DIA of', profile.dia, 'is not supported'); - return -1; + console_error(final_result, 'DIA of', profile.dia, 'is not supported'); + return -1; } if (inputs.model) { @@ -132,19 +137,19 @@ function generate (inputs, opts) { profile.max_daily_basal = basal.maxDailyBasal(inputs); profile.max_basal = basal.maxBasalLookup(inputs); if (profile.current_basal === 0) { - console.error("current_basal of",profile.current_basal,"is not supported"); - return -1; + console_error(final_result, "current_basal of",profile.current_basal,"is not supported"); + return -1; } if (profile.max_daily_basal === 0) { - console.error("max_daily_basal of",profile.max_daily_basal,"is not supported"); - return -1; + console_error(final_result, "max_daily_basal of",profile.max_daily_basal,"is not supported"); + return -1; } if (profile.max_basal < 0.1) { - console.error("max_basal of",profile.max_basal,"is not supported"); - return -1; + console_error(final_result, "max_basal of",profile.max_basal,"is not supported"); + return -1; } - var range = targets.bgTargetsLookup(inputs, profile); + var range = targets.bgTargetsLookup(final_result, inputs, profile); profile.out_units = inputs.targets.user_preferred_units; profile.min_bg = Math.round(range.min_bg); profile.max_bg = Math.round(range.max_bg); @@ -160,17 +165,18 @@ function generate (inputs, opts) { delete profile.bg_targets.raw; profile.temptargetSet = range.temptargetSet; - profile.sens = isf.isfLookup(inputs.isf); + var lastResult = null; + [profile.sens, lastResult] = isf.isfLookup(inputs.isf, undefined, lastResult); profile.isfProfile = inputs.isf; if (profile.sens < 5) { - console.error("ISF of",profile.sens,"is not supported"); - return -1; + console_error(final_result, "ISF of",profile.sens,"is not supported"); + return -1; } if (typeof(inputs.carbratio) !== "undefined") { - profile.carb_ratio = carb_ratios.carbRatioLookup(inputs, profile); + profile.carb_ratio = carb_ratios.carbRatioLookup(final_result, inputs, profile); profile.carb_ratios = inputs.carbratio; } else { - console.error("Profile wasn't given carb ratio data, cannot calculate carb_ratio"); + console_error(final_result, "Profile wasn't given carb ratio data, cannot calculate carb_ratio"); } return profile; } diff --git a/lib/profile/isf.js b/lib/profile/isf.js index ca9bcea05..27cdca6e7 100644 --- a/lib/profile/isf.js +++ b/lib/profile/isf.js @@ -1,9 +1,8 @@ +'use strict'; var _ = require('lodash'); -var lastResult = null; - -function isfLookup(isf_data, timestamp) { +function isfLookup(isf_data, timestamp, lastResult) { var nowDate = timestamp; @@ -14,7 +13,7 @@ function isfLookup(isf_data, timestamp) { var nowMinutes = nowDate.getHours() * 60 + nowDate.getMinutes(); if (lastResult && nowMinutes >= lastResult.offset && nowMinutes < lastResult.endOffset) { - return lastResult.sensitivity; + return [lastResult.sensitivity, lastResult]; } isf_data = _.sortBy(isf_data.sensitivities, function(o) { return o.offset; }); @@ -22,7 +21,7 @@ function isfLookup(isf_data, timestamp) { var isfSchedule = isf_data[isf_data.length - 1]; if (isf_data[0].offset !== 0) { - return -1; + return [-1, lastResult]; } var endMinutes = 1440; @@ -40,7 +39,7 @@ function isfLookup(isf_data, timestamp) { lastResult = isfSchedule; lastResult.endOffset = endMinutes; - return isfSchedule.sensitivity; + return [isfSchedule.sensitivity, lastResult]; } isfLookup.isfLookup = isfLookup; diff --git a/lib/profile/targets.js b/lib/profile/targets.js index 741b83151..31a140a91 100644 --- a/lib/profile/targets.js +++ b/lib/profile/targets.js @@ -1,11 +1,14 @@ +'use strict'; var getTime = require('../medtronic-clock'); +var shared_node_utils = require('../../bin/oref0-shared-node-utils'); +var console_error = shared_node_utils.console_error; -function bgTargetsLookup (inputs, profile) { - return bound_target_range(lookup(inputs, profile)); +function bgTargetsLookup (final_result, inputs, profile) { + return bound_target_range(lookup(final_result, inputs, profile)); } -function lookup (inputs, profile) { +function lookup (final_result, inputs, profile) { var bgtargets_data = inputs.targets; var temptargets_data = inputs.temptargets; var now = new Date(); @@ -33,7 +36,7 @@ function lookup (inputs, profile) { try { temptargets_data.sort(function (a, b) { return new Date(b.created_at) - new Date(a.created_at) }); } catch (e) { - console.error("No temptargets found."); + console_error(final_result, "No temptargets found."); } //console.error(temptargets_data); //console.error(now); @@ -48,7 +51,7 @@ function lookup (inputs, profile) { tempTargets = bgTargets; break; } else if (! temptargets_data[i].targetBottom || ! temptargets_data[i].targetTop) { - console.error("eventualBG target range invalid: " + temptargets_data[i].targetBottom + "-" + temptargets_data[i].targetTop); + console_error(final_result, "eventualBG target range invalid: " + temptargets_data[i].targetBottom + "-" + temptargets_data[i].targetTop); break; } else if (now >= start && now < expires ) { //console.error(temptargets_data[i]); @@ -77,8 +80,8 @@ function bound_target_range (target) { return target } -bgTargetsLookup.bgTargetsLookup = bgTargetsLookup; -bgTargetsLookup.lookup = lookup; -bgTargetsLookup.bound_target_range = bound_target_range; +bgTargetsLookup.bgTargetsLookup = bgTargetsLookup; // does use log +bgTargetsLookup.lookup = lookup; // not used outside +bgTargetsLookup.bound_target_range = bound_target_range; // does not log exports = module.exports = bgTargetsLookup; diff --git a/lib/pump.js b/lib/pump.js index b2f892a68..838fea172 100644 --- a/lib/pump.js +++ b/lib/pump.js @@ -1,3 +1,4 @@ +'use strict'; function translate (treatments) { diff --git a/lib/temps.js b/lib/temps.js index 90abd1bf0..9ac6918cc 100644 --- a/lib/temps.js +++ b/lib/temps.js @@ -1,3 +1,4 @@ +'use strict'; function filter (treatments) { From 5d0984e5368ae43b0e4ae0ec1e8f32d6b4d5050d Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Tue, 21 Dec 2021 08:30:41 -0800 Subject: [PATCH 64/66] comment out shared-node cp to avoid full disk (#1416) --- bin/oref0-ns-loop.sh | 4 ++-- bin/oref0-pump-loop.sh | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/bin/oref0-ns-loop.sh b/bin/oref0-ns-loop.sh index cf5b76c13..100b2d844 100755 --- a/bin/oref0-ns-loop.sh +++ b/bin/oref0-ns-loop.sh @@ -120,7 +120,7 @@ function ns_temptargets { dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-ns #echo dir_name = $dir_name mkdir -p $dir_name - cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name + #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json' | jq . > settings/profile.json.new || die "Couldn't refresh profile" if cat settings/profile.json.new | jq . | grep -q basal; then @@ -139,7 +139,7 @@ function ns_meal_carbs { dir_name=~/test_data/oref0-meal$(date +"%Y-%m-%d-%H%M") #echo dir_name = $dir_name mkdir -p $dir_name - cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name + #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name run_remote_command 'oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json' > monitor/meal.json.new diff --git a/bin/oref0-pump-loop.sh b/bin/oref0-pump-loop.sh index cd49d1fa9..816fed2d0 100755 --- a/bin/oref0-pump-loop.sh +++ b/bin/oref0-pump-loop.sh @@ -601,7 +601,7 @@ function refresh_pumphistory_and_meal { dir_name=~/test_data/oref0-meal$(date +"%Y-%m-%d-%H%M") #echo dir_name = $dir_name mkdir -p $dir_name - cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name + #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name if ! retry_return run_remote_command 'oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json' > monitor/meal.json.new ; then echo; echo "Couldn't calculate COB" return 1 @@ -632,7 +632,7 @@ function calculate_iob { dir_name=~/test_data/oref0-calculate-iob$(date +"%Y-%m-%d-%H%M") #echo dir_name = $dir_name mkdir -p $dir_name - cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json $dir_name + #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json $dir_name run_remote_command 'oref0-calculate-iob monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json' > monitor/iob.json.new || { echo; echo "Couldn't calculate IOB"; fail "$@"; } [ -s monitor/iob.json.new ] && jq -e .[0].iob monitor/iob.json.new >&3 && cp monitor/iob.json.new monitor/iob.json || { echo; echo "Couldn't copy IOB"; fail "$@"; } @@ -685,10 +685,10 @@ function get_settings { # generate settings/pumpprofile.json without autotune - dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-pump + #dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-pump #echo dir_name = $dir_name mkdir -p $dir_name - cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json $dir_name + #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json $dir_name run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json' 2>&3 | jq . > settings/pumpprofile.json.new || { echo "Couldn't refresh pumpprofile"; fail "$@"; } if [ -s settings/pumpprofile.json.new ] && jq -e .current_basal settings/pumpprofile.json.new >&4; then @@ -702,7 +702,7 @@ function get_settings { dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-pump-auto #echo dir_name = $dir_name mkdir -p $dir_name - cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name + #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json' | jq . > settings/profile.json.new || { echo "Couldn't refresh profile"; fail "$@"; } if [ -s settings/profile.json.new ] && jq -e .current_basal settings/profile.json.new >&4; then From 5e4ee98c920f8f283b101ea17937728c3206d367 Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Tue, 4 Jan 2022 17:31:42 -0800 Subject: [PATCH 65/66] define undefined var dev5m (#1418) * comment out shared-node cp to avoid full disk * #1417: define undefined var dev5m --- lib/autotune-prep/categorize.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/autotune-prep/categorize.js b/lib/autotune-prep/categorize.js index 5ebb8fde5..4166bb5d0 100644 --- a/lib/autotune-prep/categorize.js +++ b/lib/autotune-prep/categorize.js @@ -221,7 +221,7 @@ function categorizeBGDatums(opts) { glucoseDatum.BGI = BGI; // calculating deviation var deviation = avgDelta-BGI; - dev5m = delta-BGI; + var dev5m = delta-BGI; //console.error(deviation,avgDelta,BG,bucketedData[i].glucose); // set positive deviations to zero if BG is below 80 From da7015cf32943a85d0d2152bebf5bcf9b6fa2d1e Mon Sep 17 00:00:00 2001 From: Scott Leibrand Date: Mon, 13 Jun 2022 17:52:33 -0700 Subject: [PATCH 66/66] carbsReq pushover improvements (#1421) * comment out shared-node cp to avoid full disk * don't send carbsReq alerts if COB is zero due to clock data parsing error * run pushover in pump-loop * run pushover in pump-loop * check pushover_snooze immediately before sending a pushover * pathfix * run pushover in pump-loop instead of cron-every-minute * don't background pushover for now * uncomment debug line * debugging * debugging * debugging * -e to make jq return non-zero if no snooze found * apparently this line never worked * comment out debug line * no newline * don't print date * remove extra logging * fix check for empty MAKER_KEY * only print snooze message if there's something to snooze * don't print contents of file * update Pushover Glances to include temp basal rate*duration * direction arrows * U/h * use NS to coordinate glances snoozes between rigs * more compact carbsreq string * make glanceDelay configurable * better check of whether pushoverGlances is a number * fix glanceDelay=pushoverGlances if numeric check * wait_for_silence before refreshing profile and pumphistory * fix glanceDelay=pushoverGlances if numeric check * fix carbhistory egrep syntax * ls profile.json.new if invalid * Invalid profile.json.new debugging * Invalid profile.json.new debugging * Invalid profile.json.new debugging * NS uses UTC now * grep -q exits early w/ 141 in some race conditions: use grep >/dev/null instead * remove debug lines * Revert "NS uses UTC now" This reverts commit e9f881000e6169a65a1a3640833d4dd9b7c8bb90. * convert NS UTC date into local time * don't print * comment out debug lines * don't print snooze.json, just the fact we uploaded one * check that glance api returns status: 1 * don't print snooze.json * just print the fact we sent a carbsReq pushover * don't print meal.json (twice) * less verbose logging after wait_for_silence * don't print * check that pushover api returns status: 1 * spacing * just print COB after refreshing meal.json * less verbose logging * print entirer meal.json, colorized * remove duplicate cat of meal.json * add delta to glance title * print last_glance time inline * display tick instead of delta * tick before arrow * remove quotes * retry w/o new BG after failure to parse clock data * don't use grep -q to avoid return code 141 race condition * don't use grep -q to avoid return code 141 race condition * don't use grep -q to avoid return code 141 race condition * don't mkdir: directory entries eventually fill up the disk --- bin/oref0-autosens-loop.sh | 8 +-- bin/oref0-bash-common-functions.sh | 2 +- bin/oref0-cron-every-minute.sh | 2 +- bin/oref0-ns-loop.sh | 22 +++--- bin/oref0-pump-loop.sh | 24 ++++--- bin/oref0-pushover.sh | 99 ++++++++++++++++++-------- lib/determine-basal/determine-basal.js | 4 +- 7 files changed, 104 insertions(+), 57 deletions(-) diff --git a/bin/oref0-autosens-loop.sh b/bin/oref0-autosens-loop.sh index a811d68e3..44f001777 100755 --- a/bin/oref0-autosens-loop.sh +++ b/bin/oref0-autosens-loop.sh @@ -33,11 +33,11 @@ function completed_recently { # openaps use detect-sensitivity shell monitor/glucose.json settings/pumphistory-24h-zoned.json settings/insulin_sensitivities.json settings/basal_profile.json settings/profile.json monitor/carbhistory.json settings/temptargets.json function autosens { # only run autosens if pumphistory-24h is newer than autosens - if find monitor/ -newer settings/autosens.json | grep -q pumphistory-24h-zoned.json \ - || find settings/ -size -5c | grep -q autosens.json \ - || ! find settings/ | grep -q autosens \ + if find monitor/ -newer settings/autosens.json | grep pumphistory-24h-zoned.json \ + || find settings/ -size -5c | grep autosens.json \ + || ! find settings/ | grep autosens \ || ! find settings/autosens.json >/dev/null; then - if oref0-detect-sensitivity monitor/glucose.json monitor/pumphistory-24h-zoned.json settings/insulin_sensitivities.json settings/basal_profile.json settings/profile.json monitor/carbhistory.json settings/temptargets.json > settings/autosens.json.new && cat settings/autosens.json.new | jq .ratio | grep -q [0-9]; then + if oref0-detect-sensitivity monitor/glucose.json monitor/pumphistory-24h-zoned.json settings/insulin_sensitivities.json settings/basal_profile.json settings/profile.json monitor/carbhistory.json settings/temptargets.json > settings/autosens.json.new && cat settings/autosens.json.new | jq .ratio | grep "[0-9]"; then mv settings/autosens.json.new settings/autosens.json echo -n "Autosens refreshed: " else diff --git a/bin/oref0-bash-common-functions.sh b/bin/oref0-bash-common-functions.sh index f4262d43f..9a8d5b3e7 100755 --- a/bin/oref0-bash-common-functions.sh +++ b/bin/oref0-bash-common-functions.sh @@ -542,7 +542,7 @@ function wait_for_silence { echo -n . # returns true if it hears pump comms, false otherwise if ! listen -t $waitfor's' 2>&4 ; then - echo "No interfering pump comms detected from other rigs (this is a good thing!)" + echo " All clear." echo -n "Continuing oref0-pump-loop at "; date return 0 else diff --git a/bin/oref0-cron-every-minute.sh b/bin/oref0-cron-every-minute.sh index 2874b2be4..f84f79e52 100755 --- a/bin/oref0-cron-every-minute.sh +++ b/bin/oref0-cron-every-minute.sh @@ -129,7 +129,7 @@ if [[ ! -z "$BT_PEB" || ! -z "$BT_MAC" ]]; then fi if [[ ! -z "$PUSHOVER_TOKEN" && ! -z "$PUSHOVER_USER" ]]; then - oref0-pushover $PUSHOVER_TOKEN $PUSHOVER_USER 2>&1 >> /var/log/openaps/pushover.log & + #oref0-pushover $PUSHOVER_TOKEN $PUSHOVER_USER 2>&1 >> /var/log/openaps/pushover.log & fi # if disk has less than 10MB free, delete something and logrotate diff --git a/bin/oref0-ns-loop.sh b/bin/oref0-ns-loop.sh index 100b2d844..34e929e92 100755 --- a/bin/oref0-ns-loop.sh +++ b/bin/oref0-ns-loop.sh @@ -25,7 +25,7 @@ main() { fi fi - pushover_snooze + #pushover_snooze ns_temptargets || die "ns_temptargets failed" ns_meal_carbs || echo "ns_meal_carbs failed" battery_status @@ -73,7 +73,7 @@ function get_ns_bg { # if ns-glucose.json data is <10m old, no more than 5m in the future, and valid (>38), # copy cgm/ns-glucose.json over to cgm/glucose.json if it's newer valid_glucose=$(find_valid_ns_glucose) - if echo $valid_glucose | grep -q glucose; then + if echo $valid_glucose | grep glucose >/dev/null; then echo Found recent valid BG: echo $valid_glucose | colorize_json '.[0] | { glucose: .glucose, dateString: .dateString }' cp -pu cgm/ns-glucose.json cgm/glucose.json @@ -104,7 +104,7 @@ function find_valid_ns_glucose { function ns_temptargets { #openaps report invoke settings/temptargets.json settings/profile.json >/dev/null nightscout ns $NIGHTSCOUT_HOST $API_SECRET temp_targets > settings/ns-temptargets.json.new - cat settings/ns-temptargets.json.new | jq .[0].duration | egrep -q [0-9] && mv settings/ns-temptargets.json.new settings/ns-temptargets.json + cat settings/ns-temptargets.json.new | jq .[0].duration | egrep "[0-9]" >/dev/null && mv settings/ns-temptargets.json.new settings/ns-temptargets.json # TODO: merge local-temptargets.json with ns-temptargets.json #openaps report invoke settings/ns-temptargets.json settings/profile.json echo -n "Latest NS temptargets: " @@ -119,11 +119,11 @@ function ns_temptargets { dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-ns #echo dir_name = $dir_name - mkdir -p $dir_name + # mkdir -p $dir_name #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json' | jq . > settings/profile.json.new || die "Couldn't refresh profile" - if cat settings/profile.json.new | jq . | grep -q basal; then + if cat settings/profile.json.new | jq . | grep basal > /dev/null; then mv settings/profile.json.new settings/profile.json else die "Invalid profile.json.new after refresh" @@ -134,11 +134,11 @@ function ns_temptargets { function ns_meal_carbs { #openaps report invoke monitor/carbhistory.json >/dev/null nightscout ns $NIGHTSCOUT_HOST $API_SECRET carb_history > monitor/carbhistory.json.new - cat monitor/carbhistory.json.new | jq .[0].carbs | egrep -q [0-9] && mv monitor/carbhistory.json.new monitor/carbhistory.json + cat monitor/carbhistory.json.new | jq .[0].carbs | egrep "[0-9]" >/dev/null && mv monitor/carbhistory.json.new monitor/carbhistory.json dir_name=~/test_data/oref0-meal$(date +"%Y-%m-%d-%H%M") #echo dir_name = $dir_name - mkdir -p $dir_name + # mkdir -p $dir_name #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name @@ -191,7 +191,7 @@ function upload { function upload_ns_status { set -o pipefail #echo Uploading devicestatus - grep -q iob monitor/iob.json || die "IOB not found" + grep iob monitor/iob.json >/dev/null || die "IOB not found" # set the timestamp on enact/suggested.json to match the deliverAt time touch -d $(cat enact/suggested.json | jq .deliverAt | sed 's/"//g') enact/suggested.json if ! file_is_recent_and_min_size enact/suggested.json 10; then @@ -200,12 +200,12 @@ function upload_ns_status { return 1 fi ns_status_file_name=ns-status$(date +"%Y-%m-%d-%T").json - format_ns_status $ns_status_file_name && grep -q iob upload/$ns_status_file_name || die "Couldn't generate ns-status.json" + format_ns_status $ns_status_file_name && grep iob upload/$ns_status_file_name >/dev/null || die "Couldn't generate ns-status.json" # Delete files older than 24 hours. find upload -maxdepth 1 -mmin +1440 -type f -name "ns-status*.json" -delete # Upload the files one by one according to their order. ls upload/ns-status*.json | while read -r file_name ; do - if ! grep -q iob $file_name ; then + if ! grep iob $file_name >/dev/null ; then #echo deleteing file $file_name rm $file_name continue @@ -239,7 +239,7 @@ function upload_recent_treatments { } function latest_ns_treatment_time { - nightscout latest-openaps-treatment $NIGHTSCOUT_HOST $API_SECRET | jq -r .created_at + date -Is -d $(nightscout latest-openaps-treatment $NIGHTSCOUT_HOST $API_SECRET | jq -r .created_at) } #nightscout cull-latest-openaps-treatments monitor/pumphistory-zoned.json settings/model.json $(openaps latest-ns-treatment-time) > upload/latest-treatments.json diff --git a/bin/oref0-pump-loop.sh b/bin/oref0-pump-loop.sh index 816fed2d0..3ff08fa50 100755 --- a/bin/oref0-pump-loop.sh +++ b/bin/oref0-pump-loop.sh @@ -68,9 +68,17 @@ main() { fi fi touch /tmp/pump_loop_completed -r /tmp/pump_loop_enacted + # run pushover immediately after completing loop for more timely carbsReq notifications without race conditions + PUSHOVER_TOKEN="$(get_pref_string .pushover_token "")" + PUSHOVER_USER="$(get_pref_string .pushover_user "")" + if [[ ! -z "$PUSHOVER_TOKEN" && ! -z "$PUSHOVER_USER" ]]; then + oref0-pushover $PUSHOVER_TOKEN $PUSHOVER_USER # 2>&1 >> /var/log/openaps/pushover.log & + fi + # before each of these (optional) refresh checks, make sure we don't have fresh glucose data # if we do, then skip the optional checks to finish up this loop and start the next one if ! glucose-fresh; then + wait_for_silence $upto10s if onbattery; then refresh_profile 30 else @@ -317,7 +325,7 @@ function smb_suggest { } function determine_basal { - cat monitor/meal.json + #cat monitor/meal.json update_glucose_noise @@ -480,7 +488,7 @@ function refresh_after_bolus_or_enact { function unsuspend_if_no_temp { # If temp basal duration is zero, unsuspend pump - if (cat monitor/temp_basal.json | jq '. | select(.duration == 0)' | grep -q duration); then + if (cat monitor/temp_basal.json | jq '. | select(.duration == 0)' | grep duration); then if check_pref_bool .unsuspend_if_no_temp false; then echo Temp basal has ended: unsuspending pump mdt resume 2>&3 @@ -600,7 +608,7 @@ function refresh_pumphistory_and_meal { dir_name=~/test_data/oref0-meal$(date +"%Y-%m-%d-%H%M") #echo dir_name = $dir_name - mkdir -p $dir_name + # mkdir -p $dir_name #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name if ! retry_return run_remote_command 'oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json' > monitor/meal.json.new ; then echo; echo "Couldn't calculate COB" @@ -608,7 +616,7 @@ function refresh_pumphistory_and_meal { fi try_return check_cp_meal || return 1 echo -n "refreshed: " - cat monitor/meal.json + cat monitor/meal.json | jq -cC . } function check_cp_meal { @@ -631,7 +639,7 @@ function check_cp_meal { function calculate_iob { dir_name=~/test_data/oref0-calculate-iob$(date +"%Y-%m-%d-%H%M") #echo dir_name = $dir_name - mkdir -p $dir_name + # mkdir -p $dir_name #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json $dir_name run_remote_command 'oref0-calculate-iob monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json' > monitor/iob.json.new || { echo; echo "Couldn't calculate IOB"; fail "$@"; } @@ -687,7 +695,7 @@ function get_settings { #dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-pump #echo dir_name = $dir_name - mkdir -p $dir_name + # mkdir -p $dir_name #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json $dir_name run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json' 2>&3 | jq . > settings/pumpprofile.json.new || { echo "Couldn't refresh pumpprofile"; fail "$@"; } @@ -701,7 +709,7 @@ function get_settings { # generate settings/profile.json.new with autotune dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-pump-auto #echo dir_name = $dir_name - mkdir -p $dir_name + # mkdir -p $dir_name #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json' | jq . > settings/profile.json.new || { echo "Couldn't refresh profile"; fail "$@"; } @@ -736,7 +744,7 @@ function onbattery { function wait_for_bg { if [ "$(get_pref_string .cgm '')" == "mdt" ]; then echo "MDT CGM configured; not waiting" - elif egrep -q "Warning:" enact/smb-suggested.json 2>&3; then + elif egrep -q "Warning:" enact/smb-suggested.json 2>&3 || egrep -q "Could not parse clock data" monitor/meal.json 2>&3; then echo "Retrying without waiting for new BG" elif egrep -q "Waiting [0](\.[0-9])?m ([0-6]?[0-9]s )?to microbolus again." enact/smb-suggested.json 2>&3; then echo "Retrying microbolus without waiting for new BG" diff --git a/bin/oref0-pushover.sh b/bin/oref0-pushover.sh index bc13dd099..d31859870 100755 --- a/bin/oref0-pushover.sh +++ b/bin/oref0-pushover.sh @@ -76,21 +76,38 @@ else PRIORITY_OPTIONS="" fi -date +#date -if file_is_recent monitor/pushover-sent $SNOOZE; then - echo "Last pushover sent less than $SNOOZE minutes ago." -elif ! file_is_recent "$FILE"; then +#function pushover_snooze { +# check Nightscout to see if another rig has already sent a carbsReq pushover recently + URL=$NIGHTSCOUT_HOST/api/v1/devicestatus.json?count=100 + if [[ "${API_SECRET}" =~ "token=" ]]; then + URL="${URL}&${API_SECRET}" + else + CURL_AUTH='-H api-secret:'${API_SECRET} + fi + + if snooze=$(curl -s ${CURL_AUTH} ${URL} | jq '.[] | select(.snooze=="carbsReq") | select(.date>'$(date +%s -d "10 minutes ago")')' | jq -s .[0].date | noquotes | grep -v null); then + #echo $snooze + #echo date -Is -d @$snooze; echo + touch -d $(date -Is -d @$snooze) monitor/pushover-sent + #ls -la monitor/pushover-sent | awk '{print $8,$9}' + fi +#} + +if ! file_is_recent "$FILE"; then echo "$FILE more than 5 minutes old" exit -elif ! cat $FILE | egrep "add'l|maxBolus"; then - echo "No additional carbs or bolus required." -elif [[ $ONLYFOR =~ "carb" ]] && ! cat $FILE | egrep "add'l"; then - echo "No additional carbs required." -elif [[ $ONLYFOR =~ "insulin" ]] && ! cat $FILE | egrep "maxBolus"; then - echo "No additional insulin required." +elif ! cat $FILE | egrep "add'l|maxBolus" > /dev/null; then + echo -n "No carbsReq. " +elif [[ $ONLYFOR =~ "carb" ]] && ! cat $FILE | egrep "add'l" > /dev/null; then + echo -n "No carbsReq. " +elif [[ $ONLYFOR =~ "insulin" ]] && ! cat $FILE | egrep "maxBolus" > /dev/null; then + echo -n "No additional insulin required. " +elif file_is_recent monitor/pushover-sent $SNOOZE; then + echo -n "Last pushover sent less than $SNOOZE minutes ago. " else - curl -s -F token=$TOKEN -F user=$USER $SOUND_OPTION -F priority=$PRIORITY $PRIORITY_OPTIONS -F "message=$(jq -c "{bg, tick, carbsReq, insulinReq, reason}|del(.[] | nulls)" $FILE) - $(hostname)" https://api.pushover.net/1/messages.json && touch monitor/pushover-sent && echo '{"date":'$(epochtime_now)',"device":"openaps://'$(hostname)'","snooze":"carbsReq"}' | tee /tmp/snooze.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json /tmp/snooze.json + curl -s -F token=$TOKEN -F user=$USER $SOUND_OPTION -F priority=$PRIORITY $PRIORITY_OPTIONS -F "message=$(jq -c "{bg, tick, carbsReq, insulinReq, reason}|del(.[] | nulls)" $FILE) - $(hostname)" https://api.pushover.net/1/messages.json | jq .status| grep 1 >/dev/null && touch monitor/pushover-sent && echo '{"date":'$(epochtime_now)',"device":"openaps://'$(hostname)'","snooze":"carbsReq"}' > /tmp/snooze.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json /tmp/snooze.json >/dev/null && echo "carbsReq pushover sent." echo fi @@ -106,6 +123,8 @@ source $HOME/.bash_profile key=${MAKER_KEY:-"null"} carbsReq=`jq .carbsReq ${FILE}` tick=`jq .tick ${FILE}` +tick="${tick%\"}" +tick="${tick#\"}" bgNow=`jq .bg ${FILE}` delta=`echo "${tick}" | tr -d +` delta="${delta%\"}" @@ -119,50 +138,68 @@ pushoverGlances=$(get_prefs_json | jq -M '.pushoverGlances') if [ "${pushoverGlances}" == "null" -o "${pushoverGlances}" == "false" ]; then echo "pushoverGlances not enabled in preferences.json" else + # if pushoverGlances is a number instead of just true, use it to set the minutes allowed between glances + re='^[0-9]+$' + if [[ ${pushoverGlances} =~ $re ]]; then + glanceDelay=${pushoverGlances} + else + glanceDelay=10 + fi GLANCES="monitor/last_glance" GLUCOSE="monitor/glucose.json" if [ ! -f $GLANCES ]; then - # First time through it will get created older than 10 minutes so it'll fire - touch $GLANCES && touch -r $GLANCES -d '-11 mins' $GLANCES + # First time through it will get created 1h old so it'll fire + touch $GLANCES && touch -r $GLANCES -d '-60 mins' $GLANCES + fi + + if snooze=$(curl -s ${CURL_AUTH} ${URL} | jq '.[] | select(.snooze=="glance") | select(.date>'$(date +%s -d "$glanceDelay minutes ago")')' | jq -s .[0].date | noquotes | grep -v null); then + #echo $snooze + #echo date -Is -d @$snooze; echo + touch -d $(date -Is -d @$snooze) $GLANCES + #ls -la $GLANCES | awk '{print $8,$9}' fi - if test `find $GLANCES -mmin +10` + if test `find $GLANCES -mmin +$glanceDelay` || cat $FILE | egrep "add'l" >/dev/null then - enactTime=$(ls -l --time-style=+"%l:%M" ${FILE} | awk '{printf ($6)}') - + curTime=$(ls -l --time-style=+"%l:%M" ${FILE} | awk '{printf ($6)}') + lastDirection=`jq -M '.[0] .direction' $GLUCOSE` lastDirection="${lastDirection%\"}" lastDirection="${lastDirection#\"}" + rate=`jq -M '.rate' monitor/temp_basal.json` + duration=`jq -M '.duration' monitor/temp_basal.json` #echo lastDirection=$lastDirection if [ "${lastDirection}" == "SingleUp" ]; then - direction="+" + direction="↑" elif [ "${lastDirection}" == "FortyFiveUp" ]; then - direction="++" + direction="↗" elif [ "${lastDirection}" == "DoubleUp" ]; then - direction="+++" + direction="↑↑" elif [ "${lastDirection}" == "SingleDown" ]; then - direction="-" + direction="↓" elif [ "${lastDirection}" == "FortyFiveDown" ]; then - direction="--" + direction="↘" elif [ "${lastDirection}" == "DoubleDown" ]; then - direction="---" + direction="↓↓" else - direction="" # default for NONE or Flat + direction="→" # default for NONE or Flat fi - if [ test cat $FILE | egrep "add'l" ]; then - subtext="cr ${carbsReq}g" - else - subtext="e${enactTime}" + title="${bgNow} ${tick} ${direction} @ ${curTime}" + text="IOB ${iob}, COB ${cob}" + if cat $FILE | egrep "add'l" >/dev/null; then + carbsMsg="${carbsReq}g req " fi - text="${bgNow}${direction}" - title="cob ${cob}, iob ${iob}" + subtext="$carbsMsg${rate}U/h ${duration}m" # echo "pushover glance text=${text} subtext=${subtext} delta=${delta} title=${title} battery percent=${battery}" - curl -s -F "token=$TOKEN" -F "user=$USER" -F "text=${text}" -F "subtext=${subtext}" -F "count=$bgNow" -F "percent=${battery}" -F "title=${title}" https://api.pushover.net/1/glances.json + curl -s -F "token=$TOKEN" -F "user=$USER" -F "text=${text}" -F "subtext=${subtext}" -F "count=$bgNow" -F "percent=${battery}" -F "title=${title}" https://api.pushover.net/1/glances.json | jq .status| grep 1 >/dev/null && echo '{"date":'$(epochtime_now)',"device":"openaps://'$(hostname)'","snooze":"glance"}' > /tmp/snooze.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json /tmp/snooze.json >/dev/null && echo "Glance uploaded and snoozed" touch $GLANCES + else + echo -n "Pushover glance last updated less than $glanceDelay minutes ago @ " + ls -la $GLANCES | awk '{print $8}' fi fi @@ -174,7 +211,7 @@ fi # call with this event that will read out in human language the additional carbs and other # vital facts. It will leave a voice mail if not answered. -if [[ "$MAKER_KEY" != "null" ]] && cat $FILE | egrep "add'l"; then +if ! [ -z "$MAKER_KEY" ] && [[ "$MAKER_KEY" != "null" ]] && cat $FILE | egrep "add'l"; then if file_is_recent monitor/ifttt-sent 60; then echo "carbsReq=${carbsReq} but last IFTTT event sent less than 60 minutes ago." else diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index fcda1352b..a599fc8c6 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -895,7 +895,9 @@ var maxDelta_bg_threshold; zeroTempEffect = round(zeroTempEffect); carbsReq = round(carbsReq); console.error("naive_eventualBG:",naive_eventualBG,"bgUndershoot:",bgUndershoot,"zeroTempDuration:",zeroTempDuration,"zeroTempEffect:",zeroTempEffect,"carbsReq:",carbsReq); - if ( carbsReq >= profile.carbsReqThreshold && minutesAboveThreshold <= 45 ) { + if ( meal_data.reason == "Could not parse clock data" ) { + console.error("carbsReq unknown: Could not parse clock data"); + } else if ( carbsReq >= profile.carbsReqThreshold && minutesAboveThreshold <= 45 ) { rT.carbsReq = carbsReq; rT.reason += carbsReq + " add'l carbs req w/in " + minutesAboveThreshold + "m; "; }