| |
| |
| |
|
|
| """Minimal inference example for screenpipe-pii-redactor. |
| |
| Usage: |
| pip install git+https://github.com/openai/privacy-filter.git |
| git lfs pull # if you haven't already, to get the model weights |
| python examples/inference.py |
| """ |
|
|
| from __future__ import annotations |
|
|
| from pathlib import Path |
|
|
| |
| from opf import OPF |
|
|
|
|
| CHECKPOINT = Path(__file__).resolve().parent.parent / "model" |
|
|
|
|
| SAMPLES = [ |
| |
| "Welcome | Marcus Chen — Confluence", |
| "Outlook — Inbox — marcus.chen@helios-ai.io", |
| "WezTerm — export OPENAI_API_KEY=sk-proj-AbCdEf123456GhIjKlMnOp", |
| "Slack — #engineering — alice-codes", |
| "Arc — github.com/acme/widget-engine — pull request 412", |
|
|
| |
| "Cursor — Welcome", |
|
|
| |
| ( |
| "Hi team, please reach out to Marcus Chen at marcus.chen@helios-ai.io " |
| "or call +1 (415) 555-0142. His office is at 123 Market Street, " |
| "San Francisco, CA. Onboarding doc is at " |
| "https://acme.com/welcome?token=tok_abc123xyz_long." |
| ), |
|
|
| |
| "psql postgres://aiden:S3cretP@ss@db.acme.com:5432/prod", |
| "cat ~/.ssh/id_ed25519 — -----BEGIN OPENSSH PRIVATE KEY-----", |
| ] |
|
|
|
|
| def main() -> None: |
| if not CHECKPOINT.exists(): |
| raise SystemExit( |
| f"checkpoint not found at {CHECKPOINT}\n" |
| "did you run 'git lfs pull' to download the weights?" |
| ) |
|
|
| print(f"loading model from {CHECKPOINT}…", flush=True) |
| |
| import torch |
| device = "cuda" if torch.cuda.is_available() else "cpu" |
| filt = OPF(model=str(CHECKPOINT), device=device) |
| print(f" device: {device}\n", flush=True) |
|
|
| for text in SAMPLES: |
| out = filt.redact(text) |
| print(f" input: {text!r}") |
| if not out.detected_spans: |
| print(" spans: (none)\n") |
| continue |
| for s in out.detected_spans: |
| print(f" [{s.start:>4}:{s.end:>4}] {s.label:<18s} {s.text!r}") |
| print() |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|