ClarusC64 commited on
Commit
4067d76
·
verified ·
1 Parent(s): f4696d6

Create scorer.py

Browse files
Files changed (1) hide show
  1. scorer.py +260 -0
scorer.py ADDED
@@ -0,0 +1,260 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from dataclasses import dataclass
5
+ from typing import Dict, List, Optional
6
+
7
+ import pandas as pd
8
+
9
+
10
+ REQUIRED_COLS = [
11
+ "row_id",
12
+ "series_id",
13
+ "timepoint_h",
14
+ "host_model",
15
+ "drug",
16
+ "plasma_conc_mg_L",
17
+ "csf_conc_mg_L",
18
+ "expected_csf_conc_mg_L",
19
+ "csf_deviation",
20
+ "bbb_penetration_ratio",
21
+ "expected_bbb_ratio",
22
+ "ratio_deviation",
23
+ "neuro_marker_index",
24
+ "neuro_coherence_index",
25
+ "exposure_index",
26
+ "stress_index",
27
+ "later_severe_neuro_flag",
28
+ "assay_method",
29
+ "source_type",
30
+ "neuro_threshold_shift_signal",
31
+ "earliest_neuro_threshold_shift",
32
+ ]
33
+
34
+
35
+ @dataclass
36
+ class Thresholds:
37
+ min_points: int = 3
38
+
39
+ stress_min: float = 0.80
40
+ exposure_min: float = 0.80
41
+
42
+ baseline_ratio_max: float = 0.12
43
+
44
+ coherence_max: float = 0.40
45
+ ratio_dev_min: float = 0.04
46
+ neuro_marker_min: float = 0.50
47
+ require_two_consecutive: bool = True
48
+
49
+ spike_ratio_dev_min: float = 0.08
50
+ snapback_ratio_dev_max: float = 0.003
51
+
52
+
53
+ def _validate(df: pd.DataFrame) -> List[str]:
54
+ errs: List[str] = []
55
+ missing = [c for c in REQUIRED_COLS if c not in df.columns]
56
+ if missing:
57
+ errs.append(f"missing_columns: {missing}")
58
+
59
+ for c in ["exposure_index", "stress_index", "neuro_coherence_index", "neuro_marker_index"]:
60
+ if c in df.columns:
61
+ bad = ((df[c] < 0) | (df[c] > 1)).sum()
62
+ if bad:
63
+ errs.append(f"{c}_out_of_range count={int(bad)}")
64
+
65
+ for c in ["plasma_conc_mg_L", "csf_conc_mg_L", "expected_csf_conc_mg_L"]:
66
+ if c in df.columns:
67
+ bad = (df[c] <= 0).sum()
68
+ if bad:
69
+ errs.append(f"non_positive_values_in: {c} count={int(bad)}")
70
+
71
+ for c in ["later_severe_neuro_flag", "neuro_threshold_shift_signal", "earliest_neuro_threshold_shift"]:
72
+ if c in df.columns:
73
+ bad = (~df[c].isin([0, 1])).sum()
74
+ if bad:
75
+ errs.append(f"non_binary_values_in: {c} count={int(bad)}")
76
+
77
+ counts = df.groupby("series_id")["earliest_neuro_threshold_shift"].sum()
78
+ bad_series = counts[counts > 1].index.tolist()
79
+ if bad_series:
80
+ errs.append(f"multiple_earliest_neuro_threshold_shift_in_series: {bad_series}")
81
+
82
+ return errs
83
+
84
+
85
+ def _flag_spike_snap(g: pd.DataFrame, t: Thresholds) -> pd.Series:
86
+ flag = pd.Series([0] * len(g), index=g.index)
87
+ if len(g) < 3:
88
+ return flag
89
+
90
+ g = g.sort_values("timepoint_h").copy()
91
+ for i in range(1, len(g) - 1):
92
+ idx = g.index[i]
93
+ next_idx = g.index[i + 1]
94
+ v = float(g.loc[idx, "ratio_deviation"])
95
+ next_v = float(g.loc[next_idx, "ratio_deviation"])
96
+ if v >= t.spike_ratio_dev_min and next_v <= t.snapback_ratio_dev_max:
97
+ flag.loc[idx] = 1
98
+ return flag
99
+
100
+
101
+ def _f1(tp: int, fp: int, fn: int) -> float:
102
+ denom = 2 * tp + fp + fn
103
+ return 0.0 if denom == 0 else (2 * tp) / denom
104
+
105
+
106
+ def score(path: str) -> Dict[str, object]:
107
+ df = pd.read_csv(path)
108
+ errors = _validate(df)
109
+ if errors:
110
+ return {"ok": False, "errors": errors}
111
+
112
+ t = Thresholds()
113
+
114
+ df = df.sort_values(["series_id", "timepoint_h"]).reset_index(drop=True)
115
+ df["pred_earliest_neuro_threshold_shift"] = 0
116
+ df["pred_neuro_threshold_shift_signal"] = 0
117
+ df["flag_ratio_spike"] = 0
118
+
119
+ series_rows: List[Dict[str, object]] = []
120
+
121
+ for sid, g in df.groupby("series_id"):
122
+ g = g.sort_values("timepoint_h").copy()
123
+ df.loc[g.index, "flag_ratio_spike"] = _flag_spike_snap(g, t).astype(int)
124
+
125
+ if len(g) < t.min_points:
126
+ series_rows.append(
127
+ {
128
+ "series_id": sid,
129
+ "y_nts": int(g["neuro_threshold_shift_signal"].max()),
130
+ "p_nts": 0,
131
+ "true_transition_row_id": (str(g[g["earliest_neuro_threshold_shift"] == 1].iloc[0]["row_id"]) if (g["earliest_neuro_threshold_shift"] == 1).any() else None),
132
+ "pred_transition_row_id": None,
133
+ "flags": ["too_few_points"],
134
+ }
135
+ )
136
+ continue
137
+
138
+ baseline_ratio = float(g.iloc[0]["bbb_penetration_ratio"])
139
+ if baseline_ratio > t.baseline_ratio_max:
140
+ series_rows.append(
141
+ {
142
+ "series_id": sid,
143
+ "y_nts": int(g["neuro_threshold_shift_signal"].max()),
144
+ "p_nts": 0,
145
+ "true_transition_row_id": (str(g[g["earliest_neuro_threshold_shift"] == 1].iloc[0]["row_id"]) if (g["earliest_neuro_threshold_shift"] == 1).any() else None),
146
+ "pred_transition_row_id": None,
147
+ "flags": ["baseline_ratio_high"],
148
+ }
149
+ )
150
+ continue
151
+
152
+ hit: Optional[int] = None
153
+ for i in range(1, len(g)):
154
+ idx = g.index[i]
155
+ if int(df.loc[idx, "flag_ratio_spike"]) == 1:
156
+ continue
157
+
158
+ if float(df.loc[idx, "stress_index"]) < t.stress_min:
159
+ continue
160
+ if float(df.loc[idx, "exposure_index"]) < t.exposure_min:
161
+ continue
162
+
163
+ coh = float(df.loc[idx, "neuro_coherence_index"])
164
+ rdev = float(df.loc[idx, "ratio_deviation"])
165
+ nmark = float(df.loc[idx, "neuro_marker_index"])
166
+
167
+ if coh > t.coherence_max:
168
+ continue
169
+ if rdev < t.ratio_dev_min:
170
+ continue
171
+ if nmark < t.neuro_marker_min:
172
+ continue
173
+
174
+ if t.require_two_consecutive:
175
+ if i + 1 >= len(g):
176
+ continue
177
+ idx2 = g.index[i + 1]
178
+ if int(df.loc[idx2, "flag_ratio_spike"]) == 1:
179
+ continue
180
+ if float(df.loc[idx2, "stress_index"]) < t.stress_min:
181
+ continue
182
+ if float(df.loc[idx2, "exposure_index"]) < t.exposure_min:
183
+ continue
184
+ coh2 = float(df.loc[idx2, "neuro_coherence_index"])
185
+ rdev2 = float(df.loc[idx2, "ratio_deviation"])
186
+ nmark2 = float(df.loc[idx2, "neuro_marker_index"])
187
+ if coh2 > t.coherence_max or rdev2 < t.ratio_dev_min or nmark2 < t.neuro_marker_min:
188
+ continue
189
+
190
+ hit = idx
191
+ break
192
+
193
+ confirm = False
194
+ if hit is not None:
195
+ later = g[g.index > hit]
196
+ confirm = bool((later["later_severe_neuro_flag"] == 1).any())
197
+
198
+ if hit is not None and confirm:
199
+ df.loc[hit, "pred_earliest_neuro_threshold_shift"] = 1
200
+ df.loc[g[g.index >= hit].index, "pred_neuro_threshold_shift_signal"] = 1
201
+
202
+ y = int(g["neuro_threshold_shift_signal"].max())
203
+ p = int(df.loc[g.index, "pred_neuro_threshold_shift_signal"].max())
204
+
205
+ true_tr = g[g["earliest_neuro_threshold_shift"] == 1]
206
+ true_id: Optional[str] = None
207
+ if len(true_tr) == 1:
208
+ true_id = str(true_tr.iloc[0]["row_id"])
209
+
210
+ pred_tr_rows = df.loc[g.index][df.loc[g.index, "pred_earliest_neuro_threshold_shift"] == 1]
211
+ pred_id = str(pred_tr_rows.iloc[0]["row_id"]) if len(pred_tr_rows) == 1 else None
212
+
213
+ series_rows.append(
214
+ {
215
+ "series_id": sid,
216
+ "y_nts": y,
217
+ "p_nts": p,
218
+ "true_transition_row_id": true_id,
219
+ "pred_transition_row_id": pred_id,
220
+ "ratio_spike_flags": int(df.loc[g.index, "flag_ratio_spike"].sum()),
221
+ }
222
+ )
223
+
224
+ sr = pd.DataFrame(series_rows)
225
+
226
+ tp = int(((sr["y_nts"] == 1) & (sr["p_nts"] == 1)).sum())
227
+ fp = int(((sr["y_nts"] == 0) & (sr["p_nts"] == 1)).sum())
228
+ fn = int(((sr["y_nts"] == 1) & (sr["p_nts"] == 0)).sum())
229
+ tn = int(((sr["y_nts"] == 0) & (sr["p_nts"] == 0)).sum())
230
+
231
+ transition_hit = int(
232
+ (sr["true_transition_row_id"].notna() & (sr["true_transition_row_id"] == sr["pred_transition_row_id"])).sum()
233
+ )
234
+ transition_miss = int(
235
+ (sr["true_transition_row_id"].notna() & (sr["true_transition_row_id"] != sr["pred_transition_row_id"])).sum()
236
+ )
237
+
238
+ return {
239
+ "ok": True,
240
+ "path": path,
241
+ "counts": {"tp": tp, "fp": fp, "fn": fn, "tn": tn},
242
+ "metrics": {
243
+ "f1_series": _f1(tp, fp, fn),
244
+ "transition_hit": transition_hit,
245
+ "transition_miss": transition_miss,
246
+ "n_series": int(len(sr)),
247
+ },
248
+ "series_table": series_rows,
249
+ }
250
+
251
+
252
+ if __name__ == "__main__":
253
+ import argparse
254
+
255
+ ap = argparse.ArgumentParser()
256
+ ap.add_argument("--path", required=True)
257
+ args = ap.parse_args()
258
+
259
+ result = score(args.path)
260
+ print(json.dumps(result, indent=2))