ClarusC64 commited on
Commit
3af48a1
·
verified ·
1 Parent(s): 56564ea

Create scorer.py

Browse files
Files changed (1) hide show
  1. scorer.py +252 -0
scorer.py ADDED
@@ -0,0 +1,252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from dataclasses import dataclass
5
+ from typing import Dict, List, Optional
6
+
7
+ import pandas as pd
8
+
9
+
10
+ REQUIRED_COLS = [
11
+ "row_id",
12
+ "series_id",
13
+ "timepoint_h",
14
+ "host_model",
15
+ "abx_drug",
16
+ "abx_dose_mg",
17
+ "abx_conc_ng_mL",
18
+ "expected_abx_conc_ng_mL",
19
+ "abx_conc_deviation",
20
+ "concomitant_drug",
21
+ "concomitant_exposure_index",
22
+ "expected_interaction_shift",
23
+ "interaction_shift_deviation",
24
+ "ddi_coherence_index",
25
+ "host_stress_index",
26
+ "later_adverse_ddi_flag",
27
+ "assay_method",
28
+ "source_type",
29
+ "ddi_emergence_signal",
30
+ "earliest_ddi_emergence",
31
+ ]
32
+
33
+
34
+ @dataclass
35
+ class Thresholds:
36
+ min_points: int = 3
37
+
38
+ stress_min: float = 0.80
39
+ concom_min: float = 0.80
40
+
41
+ coherence_max: float = 0.40
42
+ shift_dev_min: float = 40.0
43
+ require_two_consecutive: bool = True
44
+
45
+ spike_shift_dev_min: float = 80.0
46
+ snapback_shift_dev_max: float = 3.0
47
+
48
+
49
+ def _validate(df: pd.DataFrame) -> List[str]:
50
+ errs: List[str] = []
51
+ missing = [c for c in REQUIRED_COLS if c not in df.columns]
52
+ if missing:
53
+ errs.append(f"missing_columns: {missing}")
54
+
55
+ for c in ["concomitant_exposure_index", "ddi_coherence_index", "host_stress_index"]:
56
+ if c in df.columns:
57
+ bad = ((df[c] < 0) | (df[c] > 1)).sum()
58
+ if bad:
59
+ errs.append(f"{c}_out_of_range count={int(bad)}")
60
+
61
+ for c in ["abx_dose_mg", "abx_conc_ng_mL", "expected_abx_conc_ng_mL"]:
62
+ if c in df.columns:
63
+ bad = (df[c] <= 0).sum()
64
+ if bad:
65
+ errs.append(f"non_positive_values_in: {c} count={int(bad)}")
66
+
67
+ for c in ["later_adverse_ddi_flag", "ddi_emergence_signal", "earliest_ddi_emergence"]:
68
+ if c in df.columns:
69
+ bad = (~df[c].isin([0, 1])).sum()
70
+ if bad:
71
+ errs.append(f"non_binary_values_in: {c} count={int(bad)}")
72
+
73
+ counts = df.groupby("series_id")["earliest_ddi_emergence"].sum()
74
+ bad_series = counts[counts > 1].index.tolist()
75
+ if bad_series:
76
+ errs.append(f"multiple_earliest_ddi_emergence_in_series: {bad_series}")
77
+
78
+ return errs
79
+
80
+
81
+ def _flag_spike_snap(g: pd.DataFrame, t: Thresholds) -> pd.Series:
82
+ flag = pd.Series([0] * len(g), index=g.index)
83
+ if len(g) < 3:
84
+ return flag
85
+
86
+ g = g.sort_values("timepoint_h").copy()
87
+ for i in range(1, len(g) - 1):
88
+ idx = g.index[i]
89
+ next_idx = g.index[i + 1]
90
+ v = float(g.loc[idx, "interaction_shift_deviation"])
91
+ next_v = float(g.loc[next_idx, "interaction_shift_deviation"])
92
+ if v >= t.spike_shift_dev_min and abs(next_v) <= t.snapback_shift_dev_max:
93
+ flag.loc[idx] = 1
94
+ return flag
95
+
96
+
97
+ def _f1(tp: int, fp: int, fn: int) -> float:
98
+ denom = 2 * tp + fp + fn
99
+ return 0.0 if denom == 0 else (2 * tp) / denom
100
+
101
+
102
+ def score(path: str) -> Dict[str, object]:
103
+ df = pd.read_csv(path)
104
+ errors = _validate(df)
105
+ if errors:
106
+ return {"ok": False, "errors": errors}
107
+
108
+ t = Thresholds()
109
+
110
+ df = df.sort_values(["series_id", "timepoint_h"]).reset_index(drop=True)
111
+ df["pred_earliest_ddi_emergence"] = 0
112
+ df["pred_ddi_emergence_signal"] = 0
113
+ df["flag_shift_spike"] = 0
114
+
115
+ series_rows: List[Dict[str, object]] = []
116
+
117
+ for sid, g in df.groupby("series_id"):
118
+ g = g.sort_values("timepoint_h").copy()
119
+ df.loc[g.index, "flag_shift_spike"] = _flag_spike_snap(g, t).astype(int)
120
+
121
+ if len(g) < t.min_points:
122
+ series_rows.append(
123
+ {
124
+ "series_id": sid,
125
+ "y_ddi": int(g["ddi_emergence_signal"].max()),
126
+ "p_ddi": 0,
127
+ "true_transition_row_id": (str(g[g["earliest_ddi_emergence"] == 1].iloc[0]["row_id"]) if (g["earliest_ddi_emergence"] == 1).any() else None),
128
+ "pred_transition_row_id": None,
129
+ "flags": ["too_few_points"],
130
+ }
131
+ )
132
+ continue
133
+
134
+ has_concom = bool((g["concomitant_exposure_index"] > 0).any()) and bool((g["concomitant_drug"].astype(str).str.lower() != "none").any())
135
+ if not has_concom:
136
+ series_rows.append(
137
+ {
138
+ "series_id": sid,
139
+ "y_ddi": int(g["ddi_emergence_signal"].max()),
140
+ "p_ddi": 0,
141
+ "true_transition_row_id": (str(g[g["earliest_ddi_emergence"] == 1].iloc[0]["row_id"]) if (g["earliest_ddi_emergence"] == 1).any() else None),
142
+ "pred_transition_row_id": None,
143
+ "flags": ["no_concomitant"],
144
+ }
145
+ )
146
+ continue
147
+
148
+ hit: Optional[int] = None
149
+ for i in range(1, len(g)):
150
+ idx = g.index[i]
151
+ if int(df.loc[idx, "flag_shift_spike"]) == 1:
152
+ continue
153
+
154
+ if float(df.loc[idx, "host_stress_index"]) < t.stress_min:
155
+ continue
156
+ if float(df.loc[idx, "concomitant_exposure_index"]) < t.concom_min:
157
+ continue
158
+
159
+ coh = float(df.loc[idx, "ddi_coherence_index"])
160
+ sdev = float(df.loc[idx, "interaction_shift_deviation"])
161
+
162
+ if coh > t.coherence_max:
163
+ continue
164
+ if abs(sdev) < t.shift_dev_min:
165
+ continue
166
+
167
+ if t.require_two_consecutive:
168
+ if i + 1 >= len(g):
169
+ continue
170
+ idx2 = g.index[i + 1]
171
+ if int(df.loc[idx2, "flag_shift_spike"]) == 1:
172
+ continue
173
+ if float(df.loc[idx2, "host_stress_index"]) < t.stress_min:
174
+ continue
175
+ if float(df.loc[idx2, "concomitant_exposure_index"]) < t.concom_min:
176
+ continue
177
+ coh2 = float(df.loc[idx2, "ddi_coherence_index"])
178
+ sdev2 = float(df.loc[idx2, "interaction_shift_deviation"])
179
+ if coh2 > t.coherence_max or abs(sdev2) < t.shift_dev_min:
180
+ continue
181
+
182
+ hit = idx
183
+ break
184
+
185
+ confirm = False
186
+ if hit is not None:
187
+ later = g[g.index > hit]
188
+ confirm = bool((later["later_adverse_ddi_flag"] == 1).any())
189
+
190
+ if hit is not None and confirm:
191
+ df.loc[hit, "pred_earliest_ddi_emergence"] = 1
192
+ df.loc[g[g.index >= hit].index, "pred_ddi_emergence_signal"] = 1
193
+
194
+ y = int(g["ddi_emergence_signal"].max())
195
+ p = int(df.loc[g.index, "pred_ddi_emergence_signal"].max())
196
+
197
+ true_tr = g[g["earliest_ddi_emergence"] == 1]
198
+ true_id: Optional[str] = None
199
+ if len(true_tr) == 1:
200
+ true_id = str(true_tr.iloc[0]["row_id"])
201
+
202
+ pred_tr_rows = df.loc[g.index][df.loc[g.index, "pred_earliest_ddi_emergence"] == 1]
203
+ pred_id = str(pred_tr_rows.iloc[0]["row_id"]) if len(pred_tr_rows) == 1 else None
204
+
205
+ series_rows.append(
206
+ {
207
+ "series_id": sid,
208
+ "y_ddi": y,
209
+ "p_ddi": p,
210
+ "true_transition_row_id": true_id,
211
+ "pred_transition_row_id": pred_id,
212
+ "shift_spike_flags": int(df.loc[g.index, "flag_shift_spike"].sum()),
213
+ }
214
+ )
215
+
216
+ sr = pd.DataFrame(series_rows)
217
+
218
+ tp = int(((sr["y_ddi"] == 1) & (sr["p_ddi"] == 1)).sum())
219
+ fp = int(((sr["y_ddi"] == 0) & (sr["p_ddi"] == 1)).sum())
220
+ fn = int(((sr["y_ddi"] == 1) & (sr["p_ddi"] == 0)).sum())
221
+ tn = int(((sr["y_ddi"] == 0) & (sr["p_ddi"] == 0)).sum())
222
+
223
+ transition_hit = int(
224
+ (sr["true_transition_row_id"].notna() & (sr["true_transition_row_id"] == sr["pred_transition_row_id"])).sum()
225
+ )
226
+ transition_miss = int(
227
+ (sr["true_transition_row_id"].notna() & (sr["true_transition_row_id"] != sr["pred_transition_row_id"])).sum()
228
+ )
229
+
230
+ return {
231
+ "ok": True,
232
+ "path": path,
233
+ "counts": {"tp": tp, "fp": fp, "fn": fn, "tn": tn},
234
+ "metrics": {
235
+ "f1_series": _f1(tp, fp, fn),
236
+ "transition_hit": transition_hit,
237
+ "transition_miss": transition_miss,
238
+ "n_series": int(len(sr)),
239
+ },
240
+ "series_table": series_rows,
241
+ }
242
+
243
+
244
+ if __name__ == "__main__":
245
+ import argparse
246
+
247
+ ap = argparse.ArgumentParser()
248
+ ap.add_argument("--path", required=True)
249
+ args = ap.parse_args()
250
+
251
+ result = score(args.path)
252
+ print(json.dumps(result, indent=2))