1
0
Fork 0
megacode/tests/test_tokens.py

72 lines
2 KiB
Python

import json
from tokens import (
ProviderTokens,
load_state,
save_state,
save_tokens,
promote_next_tokens,
clear_next_tokens,
)
import tokens as t
def test_save_and_load_state(tmp_path, monkeypatch):
monkeypatch.setattr(t, "TOKENS_FILE", tmp_path / "tokens.json")
active = ProviderTokens("key1", None, 0)
nxt = ProviderTokens("key2", None, 0)
save_state(active, nxt)
a, n = load_state()
assert a is not None and a.access_token == "key1"
assert n is not None and n.access_token == "key2"
def test_promote_next_tokens(tmp_path, monkeypatch):
monkeypatch.setattr(t, "TOKENS_FILE", tmp_path / "tokens.json")
save_state(ProviderTokens("key1", None, 0), ProviderTokens("key2", None, 0))
assert promote_next_tokens() is True
a, n = load_state()
assert a is not None and a.access_token == "key2"
assert n is None
def test_clear_next_tokens(tmp_path, monkeypatch):
monkeypatch.setattr(t, "TOKENS_FILE", tmp_path / "tokens.json")
save_state(ProviderTokens("key1", None, 0), ProviderTokens("key2", None, 0))
clear_next_tokens()
a, n = load_state()
assert a is not None and a.access_token == "key1"
assert n is None
def test_save_tokens_preserves_next(tmp_path, monkeypatch):
monkeypatch.setattr(t, "TOKENS_FILE", tmp_path / "tokens.json")
save_state(ProviderTokens("key1", None, 0), ProviderTokens("key2", None, 0))
save_tokens(ProviderTokens("key3", None, 0))
a, n = load_state()
assert a is not None and a.access_token == "key3"
assert n is not None and n.access_token == "key2"
def test_load_missing_file(tmp_path, monkeypatch):
monkeypatch.setattr(t, "TOKENS_FILE", tmp_path / "missing.json")
a, n = load_state()
assert a is None and n is None
def test_atomic_write(tmp_path, monkeypatch):
f = tmp_path / "tokens.json"
monkeypatch.setattr(t, "TOKENS_FILE", f)
save_state(ProviderTokens("x", None, 0), None)
with open(f) as fp:
data = json.load(fp)
assert data["active"]["access_token"] == "x"