60 lines
1.9 KiB
Python
60 lines
1.9 KiB
Python
import json
|
|
from pathlib import Path
|
|
|
|
from providers.base import ProviderTokens
|
|
from providers.chatgpt import tokens as t
|
|
|
|
|
|
def test_normalize_state_backward_compatible():
|
|
raw = {"access_token": "a", "refresh_token": "r", "expires_at": 1}
|
|
normalized = t._normalize_state(raw)
|
|
assert normalized["active"]["access_token"] == "a"
|
|
assert normalized["next_account"] is None
|
|
|
|
|
|
def test_promote_next_tokens(tmp_path, monkeypatch):
|
|
file_path = tmp_path / "chatgpt_tokens.json"
|
|
monkeypatch.setattr(t, "TOKENS_FILE", file_path)
|
|
|
|
active = ProviderTokens("a1", "r1", 100)
|
|
nxt = ProviderTokens("a2", "r2", 200)
|
|
t.save_state(active, nxt)
|
|
|
|
assert t.promote_next_tokens() is True
|
|
cur, next_cur = t.load_state()
|
|
assert cur is not None
|
|
assert cur.access_token == "a2"
|
|
assert next_cur is None
|
|
|
|
|
|
def test_save_tokens_preserves_next(tmp_path, monkeypatch):
|
|
file_path = tmp_path / "chatgpt_tokens.json"
|
|
monkeypatch.setattr(t, "TOKENS_FILE", file_path)
|
|
|
|
active = ProviderTokens("a1", "r1", 100)
|
|
nxt = ProviderTokens("a2", "r2", 200)
|
|
t.save_state(active, nxt)
|
|
|
|
t.save_tokens(ProviderTokens("a3", "r3", 300))
|
|
cur, next_cur = t.load_state()
|
|
assert cur is not None and cur.access_token == "a3"
|
|
assert next_cur is not None and next_cur.access_token == "a2"
|
|
|
|
|
|
def test_atomic_write_produces_valid_json(tmp_path, monkeypatch):
|
|
file_path = tmp_path / "chatgpt_tokens.json"
|
|
monkeypatch.setattr(t, "TOKENS_FILE", file_path)
|
|
|
|
t.save_state(ProviderTokens("x", "y", 123), None)
|
|
with open(file_path) as f:
|
|
data = json.load(f)
|
|
assert "active" in data
|
|
assert data["active"]["access_token"] == "x"
|
|
|
|
|
|
def test_load_state_from_missing_file(tmp_path, monkeypatch):
|
|
file_path = tmp_path / "missing.json"
|
|
monkeypatch.setattr(t, "TOKENS_FILE", file_path)
|
|
active, nxt = t.load_state()
|
|
assert active is None
|
|
assert nxt is None
|