1
0
Fork 0

small fix

This commit is contained in:
Arthur K. 2026-04-25 13:18:04 +03:00
parent 5349befcf4
commit 89b85b321e
Signed by: wzray
GPG key ID: B97F30FDC4636357
8 changed files with 251 additions and 211 deletions

2
.gitignore vendored
View file

@ -3,3 +3,5 @@
.ruff_cache/
__pycache__/
/data/
.mypy_cache/
.opencode/

View file

@ -1,188 +0,0 @@
# Plan: rewrite token selection around a simple disk-first state model
## Goal
Throw away the current layered selection/cooldown/state model and replace it with a small implementation that:
- reads the main JSON file on every `/token` request
- keeps only the minimum necessary account fields on disk
- decides from file state first
- refreshes usage only when missing or stale
- validates the selected token before returning it
- moves invalid accounts to `failed.json`
- does not touch the helper scripts in this pass
## Required file model
### Main state file
`accounts.json`
```json
{
"active_account": "user@example.com",
"accounts": [
{
"email": "user@example.com",
"access_token": "...",
"refresh_token": "...",
"token_refresh_at": 1710000000,
"usage": {
"primary": {
"used_percent": 72,
"reset_at": 1710018000
},
"secondary": {
"used_percent": 18,
"reset_at": 1710600000
}
},
"usage_checked_at": 1710000000,
"disabled": false
}
]
}
```
Only these fields should exist for account state.
### Failed state file
`failed.json`
```json
{
"accounts": [
{
"email": "bad@example.com",
"access_token": "...",
"refresh_token": "...",
"token_refresh_at": 1710000000,
"usage": {
"primary": {
"used_percent": 100,
"reset_at": 1710018000
},
"secondary": {
"used_percent": 100,
"reset_at": 1710600000
}
},
"usage_checked_at": 1710000000,
"disabled": false
}
]
}
```
Top-level must contain only `accounts`.
## Selection rules
### Active account first
For each `/token` request:
1. Read `accounts.json` fresh from disk.
2. Resolve `active_account` by email.
3. Evaluate active first.
### When an account is usable
An account is usable when:
- `disabled == false`
- `secondary.used_percent < 100`
- `primary.used_percent < GIBBY_EXHAUSTED_USAGE_THRESHOLD`
Default threshold remains `95`.
### Usage freshness
Usage must be refreshed only when missing or stale.
Add env:
- `GIBBY_USAGE_STALE_SECONDS`, default `3600`
Usage is stale when:
- `usage` is missing
- `usage_checked_at` is missing
- `now - usage_checked_at > GIBBY_USAGE_STALE_SECONDS`
If active account usage is stale or missing, refresh usage for that account before deciding if it is usable.
### Fallback selection
If active account cannot be used, choose the next account by:
- filtering to usable accounts
- sorting by highest primary `used_percent`
- using file order as the tie-breaker
If a new account is chosen, write its email into `active_account` in `accounts.json`.
## Token flow
For the chosen account:
1. Ensure token is fresh enough.
2. If `token_refresh_at` says refresh is needed, refresh token and persist new values.
3. After selection decisions are finished and the token is ready, validate it by calling:
`https://chatgpt.com/backend-api/codex/models`
4. Only return the token if validation returns `200`.
## Invalid account handling
If refresh, usage auth, or final validation shows the token/account is invalid:
1. Read current main state.
2. Remove that full account object from `accounts.json`.
3. Append the same full account object to `failed.json.accounts`.
4. If it was the active account, clear `active_account` before reselection.
5. Persist both files atomically.
No `failed.txt` in the rewritten core flow.
## Files to rewrite
- `/home/wzray/AI/gibby/src/gibby/settings.py`
- keep only env needed for the new flow
- `/home/wzray/AI/gibby/src/gibby/store.py`
- rewrite as simple JSON read/write helpers for `accounts.json` and `failed.json`
- `/home/wzray/AI/gibby/src/gibby/client.py`
- keep only token refresh, usage fetch, and token validation calls
- `/home/wzray/AI/gibby/src/gibby/manager.py`
- rewrite into one small service for `/token`
- `/home/wzray/AI/gibby/src/gibby/app.py`
- keep thin FastAPI wiring for `/health` and `/token`
## Files to remove or stop using
- `/home/wzray/AI/gibby/src/gibby/models.py`
- `/home/wzray/AI/gibby/src/gibby/account_ops.py`
Their logic should be folded into the new minimal data model and service flow instead of preserved.
## Out of scope for this pass
- do not touch `scripts/oauth_helper.py`
- do not touch `scripts/refresh_limits.py`
- do not preserve old cooldown, failed.txt, dual-state, or derived snapshot machinery unless absolutely required to keep app booting during rewrite
## Verification
- `uv run pytest -q`
- API tests for:
- `/health` returns `ok`
- `/token` returns `503` when file has no usable accounts
- `/token` prefers active account when usable
- `/token` rereads the file between requests
- stale usage triggers a refresh before decision
- fresh usage skips refresh
- invalid token moves full account object to `failed.json`
- fallback chooses highest primary usage among usable non-disabled accounts
- direct file tests for exact `accounts.json` and `failed.json` schema

View file

@ -3,6 +3,7 @@ from __future__ import annotations
import argparse
import asyncio
import sys
import time
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parents[1] / "src"))
@ -28,7 +29,19 @@ async def run(data_dir: Path | None = None) -> None:
try:
for account in list(state.accounts):
try:
if account.token_refresh_at <= int(time.time()) + settings.token_refresh_buffer_seconds:
access_token, refresh_token, refresh_at = await client.refresh_access_token(
account.refresh_token
)
account.access_token = access_token
account.refresh_token = refresh_token
account.token_refresh_at = refresh_at
payload = await client.fetch_usage_payload(account.access_token)
email = payload.get("email")
if isinstance(email, str) and email:
previous_email = account.email
account.email = email
store.update_active_account(state, previous_email, email)
account.usage = parse_usage_payload(payload)
account.usage_checked_at = account.usage.checked_at
print(
@ -40,6 +53,14 @@ async def run(data_dir: Path | None = None) -> None:
)
except OpenAIAPIError as exc:
if exc.permanent:
usage = account.usage
print(
f"moving account to failed.json: email={account.email} reason=usage refresh auth failure: {exc} "
f"primary={usage.primary_window.used_percent if usage and usage.primary_window else 0}% "
f"primary reset in {format_reset_in(usage.primary_window.reset_at if usage and usage.primary_window else None)} "
f"secondary={usage.secondary_window.used_percent if usage and usage.secondary_window else 0}% "
f"secondary reset in {format_reset_in(usage.secondary_window.reset_at if usage and usage.secondary_window else None)}"
)
store.move_to_failed(state, account.email)
print(f"{account.email}: removed={exc}")
else:

View file

@ -112,13 +112,21 @@ class OpenAIClient:
headers=self._headers(access_token),
params={"client_version": "2.3.9"},
)
if response.status_code != 200:
logger.warning(
"token validation failed: status=%s body=%s",
response.status_code,
_extract_error_text(response),
)
return response.status_code == 200
if response.status_code == 200:
return True
body = _extract_error_text(response)
logger.warning(
"token validation failed: status=%s body=%s",
response.status_code,
body,
)
if response.status_code in {401, 403}:
return False
raise OpenAIAPIError(
f"token validation transient failure: status={response.status_code} body={body}",
permanent=False,
status_code=response.status_code,
)
@staticmethod
def _headers(access_token: str) -> dict[str, str]:

View file

@ -6,7 +6,15 @@ from dataclasses import asdict
from typing import Any
from gibby.client import OpenAIAPIError, OpenAIClient
from gibby.models import AccountRecord, StateFile, UsageSnapshot, build_limit, now_ts, parse_usage_payload
from gibby.models import (
AccountRecord,
StateFile,
UsageSnapshot,
build_limit,
format_reset_in,
now_ts,
parse_usage_payload,
)
from gibby.settings import Settings
from gibby.store import JsonStateStore
@ -45,9 +53,19 @@ class AccountManager:
"primary": item.usage.primary_window.used_percent
if item.usage and item.usage.primary_window
else None,
"primary_reset_in": format_reset_in(
item.usage.primary_window.reset_at
if item.usage and item.usage.primary_window
else None
),
"secondary": item.usage.secondary_window.used_percent
if item.usage and item.usage.secondary_window
else None,
"secondary_reset_in": format_reset_in(
item.usage.secondary_window.reset_at
if item.usage and item.usage.secondary_window
else None
),
}
for item in state.accounts
],
@ -63,16 +81,46 @@ class AccountManager:
continue
self.store.save(state)
if not await self.client.validate_token(account.access_token):
logger.warning("account %s failed token validation", account.email)
self.store.move_to_failed(state, account.email)
self.store.save(state)
continue
try:
if not await self.client.validate_token(account.access_token):
logger.warning(
"token validation auth failed for %s, refreshing token and retrying",
account.email,
)
if not await self._ensure_fresh_token(state, account, force=True):
continue
self.store.save(state)
if not await self.client.validate_token(account.access_token):
self._move_to_failed(
state,
account,
"token validation returned auth failure after forced refresh",
)
self.store.save(state)
continue
except OpenAIAPIError as exc:
logger.warning(
"token validation skipped for %s: transient validation error: %s",
account.email,
exc,
)
raise NoUsableAccountError("Token validation temporarily failed") from exc
state.active_account = account.email
self.store.save(state)
usage = account.usage or UsageSnapshot(checked_at=now_ts())
logger.info("token issued for %s", account.email)
logger.info(
"token issued for %s, primary %s%% reset in %s, secondary %s%% reset in %s",
account.email,
usage.primary_window.used_percent if usage.primary_window else 0,
format_reset_in(
usage.primary_window.reset_at if usage.primary_window else None
),
usage.secondary_window.used_percent if usage.secondary_window else 0,
format_reset_in(
usage.secondary_window.reset_at if usage.secondary_window else None
),
)
return {
"token": account.access_token,
"limit": build_limit(usage),
@ -123,8 +171,7 @@ class AccountManager:
return None
except OpenAIAPIError as exc:
if exc.permanent:
logger.warning("account %s failed usage refresh", account.email)
self.store.move_to_failed(state, account.email)
self._move_to_failed(state, account, f"usage refresh auth failure: {exc}")
self.store.save(state)
return None
if self._is_usable(account):
@ -134,7 +181,19 @@ class AccountManager:
async def _refresh_usage(self, state: StateFile, account: AccountRecord) -> bool:
if not await self._ensure_fresh_token(state, account):
return False
payload = await self.client.fetch_usage_payload(account.access_token)
try:
payload = await self.client.fetch_usage_payload(account.access_token)
except OpenAIAPIError as exc:
if not exc.permanent:
raise
logger.warning(
"usage fetch auth failed for %s, refreshing token and retrying",
account.email,
)
if not await self._ensure_fresh_token(state, account, force=True):
return False
self.store.save(state)
payload = await self.client.fetch_usage_payload(account.access_token)
email = payload.get("email")
if isinstance(email, str) and email:
account.email = email
@ -143,8 +202,14 @@ class AccountManager:
self.store.save(state)
return True
async def _ensure_fresh_token(self, state: StateFile, account: AccountRecord) -> bool:
if account.token_refresh_at > now_ts() + self.settings.token_refresh_buffer_seconds:
async def _ensure_fresh_token(
self, state: StateFile, account: AccountRecord, *, force: bool = False
) -> bool:
if (
not force
and account.token_refresh_at
> now_ts() + self.settings.token_refresh_buffer_seconds
):
return True
try:
access_token, refresh_token, refresh_at = await self.client.refresh_access_token(
@ -152,8 +217,7 @@ class AccountManager:
)
except OpenAIAPIError as exc:
if exc.permanent:
logger.warning("account %s failed token refresh", account.email)
self.store.move_to_failed(state, account.email)
self._move_to_failed(state, account, f"token refresh auth failure: {exc}")
self.store.save(state)
return False
raise
@ -186,3 +250,20 @@ class AccountManager:
if account.usage is None or account.usage.primary_window is None:
return 0
return account.usage.primary_window.used_percent
def _move_to_failed(
self, state: StateFile, account: AccountRecord, reason: str
) -> None:
usage = account.usage
logger.error(
"moving account to failed.json: email=%s reason=%s primary=%s%% primary_reset_in=%s secondary=%s%% secondary_reset_in=%s disabled=%s usage_checked_at=%s",
account.email,
reason,
usage.primary_window.used_percent if usage and usage.primary_window else 0,
format_reset_in(usage.primary_window.reset_at if usage and usage.primary_window else None),
usage.secondary_window.used_percent if usage and usage.secondary_window else 0,
format_reset_in(usage.secondary_window.reset_at if usage and usage.secondary_window else None),
account.disabled,
account.usage_checked_at,
)
self.store.move_to_failed(state, account.email)

View file

@ -1,6 +1,7 @@
from __future__ import annotations
import json
import logging
import os
from pathlib import Path
from tempfile import NamedTemporaryFile
@ -8,6 +9,8 @@ from typing import Any
from gibby.models import AccountRecord, StateFile, UsageSnapshot, UsageWindow
logger = logging.getLogger(__name__)
class JsonStateStore:
def __init__(self, path: Path, failed_path: Path | None = None):
@ -58,7 +61,13 @@ class JsonStateStore:
def move_to_failed(self, state: StateFile, email: str) -> None:
account = next((account for account in state.accounts if account.email == email), None)
if account is None:
logger.error("move_to_failed called for missing account: email=%s", email)
return
logger.error(
"moving account to failed.json: email=%s accounts_before=%s",
email,
len(state.accounts),
)
self.remove_account(state, email)
self.append_failed_account(account)
@ -108,6 +117,8 @@ class JsonStateStore:
return {
"primary": JsonStateStore._window_to_dict(snapshot.primary_window),
"secondary": JsonStateStore._window_to_dict(snapshot.secondary_window),
"limit_reached": snapshot.limit_reached,
"allowed": snapshot.allowed,
}
@staticmethod
@ -141,6 +152,8 @@ class JsonStateStore:
checked_at=int(checked_at or 0),
primary_window=JsonStateStore._window_from_dict(payload.get("primary")),
secondary_window=JsonStateStore._window_from_dict(payload.get("secondary")),
limit_reached=bool(payload.get("limit_reached", False)),
allowed=bool(payload.get("allowed", True)),
)
@staticmethod

View file

@ -19,11 +19,17 @@ class FakeClient(OpenAIClient):
usage_by_token=None,
refresh_map=None,
invalid_tokens=None,
transient_validation_tokens=None,
auth_failing_usage_tokens=None,
auth_failing_validation_tokens=None,
permanent_refresh_tokens=None,
):
self.usage_by_token = usage_by_token or {}
self.refresh_map = refresh_map or {}
self.invalid_tokens = set(invalid_tokens or [])
self.transient_validation_tokens = set(transient_validation_tokens or [])
self.auth_failing_usage_tokens = set(auth_failing_usage_tokens or [])
self.auth_failing_validation_tokens = set(auth_failing_validation_tokens or [])
self.permanent_refresh_tokens = set(permanent_refresh_tokens or [])
self.fetched_usage_tokens: list[str] = []
self.validated_tokens: list[str] = []
@ -38,6 +44,8 @@ class FakeClient(OpenAIClient):
async def fetch_usage_payload(self, access_token: str):
self.fetched_usage_tokens.append(access_token)
if access_token in self.auth_failing_usage_tokens:
raise OpenAIAPIError("usage auth failed", permanent=True, status_code=401)
usage = self.usage_by_token[access_token]
return {
"email": f"{access_token}@example.com",
@ -61,6 +69,10 @@ class FakeClient(OpenAIClient):
async def validate_token(self, access_token: str) -> bool:
self.validated_tokens.append(access_token)
if access_token in self.auth_failing_validation_tokens:
return False
if access_token in self.transient_validation_tokens:
raise OpenAIAPIError("validation 502", permanent=False, status_code=502)
return access_token not in self.invalid_tokens
@ -224,20 +236,105 @@ async def test_refreshes_token_before_validation(tmp_path: Path) -> None:
@pytest.mark.asyncio
async def test_invalid_token_moves_account_to_failed_json(tmp_path: Path) -> None:
bad = make_account("bad@example.com", token="tok-bad", usage=make_usage(20, 0))
bad = make_account(
"bad@example.com",
token="tok-bad",
refresh_token="ref-bad",
usage=make_usage(20, 0),
)
good = make_account("good@example.com", token="tok-good", usage=make_usage(30, 0))
store = make_store(tmp_path, StateFile(active_account="bad@example.com", accounts=[bad, good]))
client = FakeClient(invalid_tokens={"tok-bad"})
client = FakeClient(
refresh_map={"ref-bad": ("tok-bad-2", "ref-bad-2", int(time.time()) + 600)},
auth_failing_validation_tokens={"tok-bad", "tok-bad-2"},
)
payload = await make_manager(store, client).issue_token_response()
state = store.load()
failed = json.loads((tmp_path / "failed.json").read_text())
assert payload["token"] == "tok-good"
assert client.validated_tokens == ["tok-bad", "tok-bad-2", "tok-good"]
assert [account.email for account in state.accounts] == ["good@example.com"]
assert failed["accounts"][0]["email"] == "bad@example.com"
@pytest.mark.asyncio
async def test_transient_validation_error_does_not_move_account_to_failed(
tmp_path: Path,
) -> None:
account = make_account("a@example.com", token="tok-a", usage=make_usage(20, 0))
store = make_store(
tmp_path, StateFile(active_account="a@example.com", accounts=[account])
)
client = FakeClient(transient_validation_tokens={"tok-a"})
with pytest.raises(NoUsableAccountError):
await make_manager(store, client).issue_token_response()
state = store.load()
assert [account.email for account in state.accounts] == ["a@example.com"]
assert not (tmp_path / "failed.json").exists()
@pytest.mark.asyncio
async def test_usage_auth_failure_refreshes_token_before_failed_json(
tmp_path: Path,
) -> None:
stale = int(time.time()) - 7200
account = make_account(
"a@example.com",
token="old-token",
refresh_token="ref-a",
token_refresh_at=int(time.time()) + 600,
usage=make_usage(20, 0, checked_at=stale),
)
store = make_store(
tmp_path, StateFile(active_account="a@example.com", accounts=[account])
)
client = FakeClient(
usage_by_token={"new-token": make_usage(21, 0)},
refresh_map={"ref-a": ("new-token", "new-refresh", int(time.time()) + 600)},
auth_failing_usage_tokens={"old-token"},
)
payload = await make_manager(store, client).issue_token_response()
state = store.load()
assert payload["token"] == "new-token"
assert client.fetched_usage_tokens == ["old-token", "new-token"]
assert [account.email for account in state.accounts] == ["new-token@example.com"]
assert not (tmp_path / "failed.json").exists()
@pytest.mark.asyncio
async def test_validation_auth_failure_refreshes_token_before_failed_json(
tmp_path: Path,
) -> None:
account = make_account(
"a@example.com",
token="old-token",
refresh_token="ref-a",
token_refresh_at=int(time.time()) + 600,
usage=make_usage(20, 0),
)
store = make_store(
tmp_path, StateFile(active_account="a@example.com", accounts=[account])
)
client = FakeClient(
refresh_map={"ref-a": ("new-token", "new-refresh", int(time.time()) + 600)},
auth_failing_validation_tokens={"old-token"},
)
payload = await make_manager(store, client).issue_token_response()
state = store.load()
assert payload["token"] == "new-token"
assert client.validated_tokens == ["old-token", "new-token"]
assert [account.email for account in state.accounts] == ["a@example.com"]
assert not (tmp_path / "failed.json").exists()
@pytest.mark.asyncio
async def test_rereads_disk_between_requests(tmp_path: Path) -> None:
first = make_account("a@example.com", token="tok-a", usage=make_usage(20, 0))

View file

@ -42,6 +42,8 @@ def test_store_writes_minimal_accounts_schema(tmp_path) -> None:
"usage": {
"primary": {"used_percent": 70, "reset_at": 1300},
"secondary": {"used_percent": 20, "reset_at": 4600},
"limit_reached": False,
"allowed": True,
},
"usage_checked_at": 1000,
"disabled": False,
@ -65,6 +67,8 @@ def test_store_load_reconstructs_account_state(tmp_path) -> None:
"usage": {
"primary": {"used_percent": 80, "reset_at": 1300},
"secondary": {"used_percent": 15, "reset_at": 4600},
"limit_reached": True,
"allowed": False,
},
"usage_checked_at": 1000,
"disabled": True,
@ -82,6 +86,8 @@ def test_store_load_reconstructs_account_state(tmp_path) -> None:
assert state.accounts[0].usage is not None
assert state.accounts[0].usage.primary_window is not None
assert state.accounts[0].usage.primary_window.used_percent == 80
assert state.accounts[0].usage.limit_reached is True
assert state.accounts[0].usage.allowed is False
assert state.accounts[0].disabled is True