import { l, t } from "./chunk-USORKAIZ.js"; // node_modules/@arcgis/core/statistics/utils.js var t2 = "equal-interval"; var l2 = 1; var i = 5; var a = 10; var r = /\s*(\+|-)?((\d+(\.\d+)?)|(\.\d+))\s*/gi; var o = new Set(["esriFieldTypeInteger", "esriFieldTypeSmallInteger", "esriFieldTypeSingle", "esriFieldTypeDouble"]); var u = ["min", "max", "avg", "stddev", "count", "sum", "variance", "nullcount", "median"]; function s(e) { const n = e.normalizationField != null || e.normalizationType != null, t3 = e.minValue != null || e.maxValue != null, l3 = !!e.sqlExpression && e.supportsSQLExpression; return !n && !t3 && !l3; } function m(e) { const n = e.returnDistinct ? [...new Set(e.values)] : e.values, t3 = n.filter((e2) => e2 != null).length, l3 = { count: t3 }; return e.supportsNullCount && (l3.nullcount = n.length - t3), e.percentileParams && (l3.median = d(n, e.percentileParams)), l3; } function c(e) { const { values: n, useSampleStdDev: t3, supportsNullCount: l3 } = e; let i2 = Number.POSITIVE_INFINITY, a2 = Number.NEGATIVE_INFINITY, r2 = null, o2 = null, u2 = null, s2 = null, m2 = 0; const c2 = e.minValue == null ? -1 / 0 : e.minValue, f2 = e.maxValue == null ? 1 / 0 : e.maxValue; for (const d2 of n) Number.isFinite(d2) ? d2 >= c2 && d2 <= f2 && (r2 += d2, i2 = Math.min(i2, d2), a2 = Math.max(a2, d2), m2++) : typeof d2 == "string" && m2++; if (m2 && r2 != null) { o2 = r2 / m2; let e2 = 0; for (const t4 of n) Number.isFinite(t4) && t4 >= c2 && t4 <= f2 && (e2 += (t4 - o2) ** 2); s2 = t3 ? m2 > 1 ? e2 / (m2 - 1) : 0 : m2 > 0 ? e2 / m2 : 0, u2 = Math.sqrt(s2); } else i2 = null, a2 = null; const p2 = { avg: o2, count: m2, max: a2, min: i2, stddev: u2, sum: r2, variance: s2 }; return l3 && (p2.nullcount = n.length - m2), e.percentileParams && (p2.median = d(n, e.percentileParams)), p2; } function d(e, n) { const { fieldType: t3, value: l3, orderBy: i2, isDiscrete: a2 } = n, r2 = f(t3, i2 === "desc"); if ((e = [...e].filter((e2) => e2 != null).sort((e2, n2) => r2(e2, n2))).length === 0) return null; if (l3 <= 0) return e[0]; if (l3 >= 1) return e[e.length - 1]; const o2 = (e.length - 1) * l3, u2 = Math.floor(o2), s2 = u2 + 1, m2 = o2 % 1, c2 = e[u2], d2 = e[s2]; return s2 >= e.length || a2 || typeof c2 == "string" || typeof d2 == "string" ? c2 : c2 * (1 - m2) + d2 * m2; } function f(e, n) { const t3 = n ? 1 : -1, l3 = v(n), i2 = p(n); if (!(!!e && ["esriFieldTypeDate", "esriFieldTypeString", "esriFieldTypeGUID", "esriFieldTypeGlobalID", ...o].includes(e))) return (e2, n2) => typeof e2 == "number" && typeof n2 == "number" ? l3(e2, n2) : typeof e2 == "string" && typeof n2 == "string" ? i2(e2, n2) : t3; if (e === "esriFieldTypeDate") return (e2, n2) => { const i3 = new Date(e2).getTime(), a2 = new Date(n2).getTime(); return isNaN(i3) || isNaN(a2) ? t3 : l3(i3, a2); }; if (o.has(e)) return (e2, n2) => l3(e2, n2); if (e === "esriFieldTypeString") return (e2, n2) => i2(e2, n2); if (e === "esriFieldTypeGUID" || e === "esriFieldTypeGlobalID") { const e2 = p(n); return (n2, t4) => e2(b(n2), b(t4)); } return n ? (e2, n2) => 1 : (e2, n2) => -1; } function p(e) { return e ? (e2, n) => (e2 = e2?.toUpperCase(), n = n?.toUpperCase(), e2 > n ? -1 : e2 < n ? 1 : 0) : (e2, n) => (e2 = e2?.toUpperCase(), n = n?.toUpperCase(), e2 < n ? -1 : e2 > n ? 1 : 0); } function v(e) { return e ? (e2, n) => n - e2 : (e2, n) => e2 - n; } function b(e) { return e.substr(24, 12) + e.substr(19, 4) + e.substr(16, 2) + e.substr(14, 2) + e.substr(11, 2) + e.substr(9, 2) + e.substr(6, 2) + e.substr(4, 2) + e.substr(2, 2) + e.substr(0, 2); } function V(e, n) { let t3; for (t3 in e) u.includes(t3) && (Number.isFinite(e[t3]) || (e[t3] = null)); return n ? (["avg", "stddev", "variance"].forEach((n2) => { e[n2] != null && (e[n2] = Math.ceil(e[n2])); }), e) : e; } function g(e) { const n = {}; for (let t3 of e) (t3 == null || typeof t3 == "string" && t3.trim() === "") && (t3 = null), n[t3] == null ? n[t3] = { count: 1, data: t3 } : n[t3].count++; return { count: n }; } function h(e, n, t3) { const l3 = e.count, i2 = []; if (t3 && n && n.type === "coded-value") { n.codedValues.forEach((e2) => { const n2 = e2.code; l3.hasOwnProperty(n2) || (l3[n2] = { data: n2, count: 0 }); }); } for (const a2 in l3) { const e2 = l3[a2]; i2.push({ value: e2.data, count: e2.count, label: e2.label }); } return { uniqueValueInfos: i2 }; } function T(e, n, t3, l3) { let i2 = null; switch (n) { case "log": e !== 0 && (i2 = Math.log(e) * Math.LOG10E); break; case "percent-of-total": Number.isFinite(l3) && l3 !== 0 && (i2 = e / l3 * 100); break; case "field": Number.isFinite(t3) && t3 !== 0 && (i2 = e / t3); break; case "natural-log": e > 0 && (i2 = Math.log(e)); break; case "square-root": e > 0 && (i2 = e ** 0.5); } return i2; } function y(e, t3) { const l3 = x({ field: t3.field, normalizationType: t3.normalizationType, normalizationField: t3.normalizationField, classificationMethod: t3.classificationMethod, standardDeviationInterval: t3.standardDeviationInterval, breakCount: t3.numClasses || i }); return e = F(e, t3.minValue, t3.maxValue), t({ definition: l3, values: e, normalizationTotal: t3.normalizationTotal }); } function F(e, n, t3) { return n = n == null ? -1 / 0 : n, t3 = t3 == null ? 1 / 0 : t3, e.filter((e2) => Number.isFinite(e2) && e2 >= n && e2 <= t3); } function x(n) { const i2 = n.field, a2 = n.classificationMethod || t2, r2 = n.normalizationType, o2 = n.normalizationField, u2 = new l(); return u2.classificationField = i2, u2.breakCount = n.breakCount, u2.classificationMethod = a2, u2.standardDeviationInterval = a2 === "standard-deviation" ? n.standardDeviationInterval || l2 : void 0, u2.normalizationType = r2, u2.normalizationField = r2 === "field" ? o2 : void 0, u2; } function D(e, n) { let t3 = e.classBreaks; const l3 = t3.length, i2 = t3[0].minValue, a2 = t3[l3 - 1].maxValue, o2 = n === "standard-deviation", u2 = r; return t3 = t3.map((e2) => { const n2 = e2.label, t4 = { minValue: e2.minValue, maxValue: e2.maxValue, label: n2 }; if (o2 && n2) { const e3 = n2.match(u2).map((e4) => +e4.trim()); e3.length === 2 ? (t4.minStdDev = e3[0], t4.maxStdDev = e3[1], e3[0] < 0 && e3[1] > 0 && (t4.hasAvg = true)) : e3.length === 1 && (n2.includes("<") ? (t4.minStdDev = null, t4.maxStdDev = e3[0]) : n2.includes(">") && (t4.minStdDev = e3[0], t4.maxStdDev = null)); } return t4; }), { minValue: i2, maxValue: a2, classBreakInfos: t3, normalizationTotal: e.normalizationTotal }; } function z(e, n) { const { min: t3, max: l3, intervals: i2 } = I(e, n), a2 = i2.map((e2, n2) => ({ minValue: i2[n2][0], maxValue: i2[n2][1], count: 0 })); for (const r2 of e) if (r2 != null && r2 >= t3 && r2 <= l3) { const e2 = N(i2, r2); e2 > -1 && a2[e2].count++; } return { bins: a2, minValue: t3, maxValue: l3, normalizationTotal: n.normalizationTotal }; } function I(e, n) { const { field: t3, classificationMethod: l3, standardDeviationInterval: i2, normalizationType: r2, normalizationField: o2, normalizationTotal: u2, minValue: m2, maxValue: d2 } = n, f2 = n.numBins || a; let p2 = null, v2 = null, b2 = null; if ((!l3 || l3 === "equal-interval") && !r2) { if (m2 != null && d2 != null) p2 = m2, v2 = d2; else { const n2 = c({ values: e, minValue: m2, maxValue: d2, useSampleStdDev: !r2, supportsNullCount: s({ normalizationType: r2, normalizationField: o2, minValue: m2, maxValue: d2 }) }); p2 = n2.min, v2 = n2.max; } b2 = S(p2, v2, f2); } else { const { classBreaks: n2 } = y(e, { field: t3, normalizationType: r2, normalizationField: o2, normalizationTotal: u2, classificationMethod: l3, standardDeviationInterval: i2, minValue: m2, maxValue: d2, numClasses: f2 }); p2 = n2[0].minValue, v2 = n2[n2.length - 1].maxValue, b2 = n2.map((e2) => [e2.minValue, e2.maxValue]); } return { min: p2, max: v2, intervals: b2 }; } function N(e, n) { let t3 = -1; for (let l3 = e.length - 1; l3 >= 0; l3--) { if (n >= e[l3][0]) { t3 = l3; break; } } return t3; } function S(e, n, t3) { const l3 = (n - e) / t3, i2 = []; let a2, r2 = e; for (let o2 = 1; o2 <= t3; o2++) a2 = r2 + l3, a2 = Number(a2.toFixed(16)), i2.push([r2, o2 === t3 ? n : a2]), r2 = a2; return i2; } export { s, m, c, d, f, V, g, h, T, y, D, z }; //# sourceMappingURL=chunk-D5GG5NVO.js.map