Merge branch 'main' of github.com:lnbits/lnbits-legend

This commit is contained in:
callebtc 2022-07-26 12:26:34 +02:00
commit 80dd330ec0
4 changed files with 20 additions and 13 deletions

1
.gitignore vendored
View file

@ -31,6 +31,7 @@ venv
__bundle__ __bundle__
coverage.xml
node_modules node_modules
lnbits/static/bundle.* lnbits/static/bundle.*
docker docker

View file

@ -36,7 +36,6 @@ requirements.txt: Pipfile.lock
cat Pipfile.lock | jq -r '.default | map_values(.version) | to_entries | map("\(.key)\(.value)") | join("\n")' > requirements.txt cat Pipfile.lock | jq -r '.default | map_values(.version) | to_entries | map("\(.key)\(.value)") | join("\n")' > requirements.txt
test: test:
rm -rf ./tests/data
mkdir -p ./tests/data mkdir -p ./tests/data
LNBITS_BACKEND_WALLET_CLASS="FakeWallet" \ LNBITS_BACKEND_WALLET_CLASS="FakeWallet" \
FAKE_WALLET_SECRET="ToTheMoon1" \ FAKE_WALLET_SECRET="ToTheMoon1" \
@ -45,14 +44,12 @@ test:
./venv/bin/pytest --durations=1 -s --cov=lnbits --cov-report=xml tests ./venv/bin/pytest --durations=1 -s --cov=lnbits --cov-report=xml tests
test-real-wallet: test-real-wallet:
rm -rf ./tests/data
mkdir -p ./tests/data mkdir -p ./tests/data
LNBITS_DATA_FOLDER="./tests/data" \ LNBITS_DATA_FOLDER="./tests/data" \
PYTHONUNBUFFERED=1 \ PYTHONUNBUFFERED=1 \
./venv/bin/pytest --durations=1 -s --cov=lnbits --cov-report=xml tests ./venv/bin/pytest --durations=1 -s --cov=lnbits --cov-report=xml tests
test-pipenv: test-pipenv:
rm -rf ./tests/data
mkdir -p ./tests/data mkdir -p ./tests/data
LNBITS_BACKEND_WALLET_CLASS="FakeWallet" \ LNBITS_BACKEND_WALLET_CLASS="FakeWallet" \
FAKE_WALLET_SECRET="ToTheMoon1" \ FAKE_WALLET_SECRET="ToTheMoon1" \

View file

@ -279,7 +279,7 @@ class CreateLNURLData(BaseModel):
@core_app.post("/api/v1/payments/lnurl") @core_app.post("/api/v1/payments/lnurl")
async def api_payments_pay_lnurl( async def api_payments_pay_lnurl(
data: CreateLNURLData, wallet: WalletTypeInfo = Depends(get_key_type) data: CreateLNURLData, wallet: WalletTypeInfo = Depends(require_admin_key)
): ):
domain = urlparse(data.callback).netloc domain = urlparse(data.callback).netloc
@ -305,6 +305,12 @@ async def api_payments_pay_lnurl(
detail=f"{domain} said: '{params.get('reason', '')}'", detail=f"{domain} said: '{params.get('reason', '')}'",
) )
if not params.get("pr"):
raise HTTPException(
status_code=HTTPStatus.BAD_REQUEST,
detail=f"{domain} did not return a payment request.",
)
invoice = bolt11.decode(params["pr"]) invoice = bolt11.decode(params["pr"])
if invoice.amount_msat != data.amount: if invoice.amount_msat != data.amount:
raise HTTPException( raise HTTPException(
@ -312,11 +318,11 @@ async def api_payments_pay_lnurl(
detail=f"{domain} returned an invalid invoice. Expected {data.amount} msat, got {invoice.amount_msat}.", detail=f"{domain} returned an invalid invoice. Expected {data.amount} msat, got {invoice.amount_msat}.",
) )
# if invoice.description_hash != data.description_hash: if invoice.description_hash != data.description_hash:
# raise HTTPException( raise HTTPException(
# status_code=HTTPStatus.BAD_REQUEST, status_code=HTTPStatus.BAD_REQUEST,
# detail=f"{domain} returned an invalid invoice. Expected description_hash == {data.description_hash}, got {invoice.description_hash}.", detail=f"{domain} returned an invalid invoice. Expected description_hash == {data.description_hash}, got {invoice.description_hash}.",
# ) )
extra = {} extra = {}

View file

@ -102,6 +102,8 @@ def insert_to_pg(query, data):
print(e) print(e)
print(f"Failed to insert {d}") print(f"Failed to insert {d}")
else: else:
print("query:", query)
print("data:", d)
raise ValueError(f"Failed to insert {d}") raise ValueError(f"Failed to insert {d}")
connection.commit() connection.commit()
@ -259,9 +261,10 @@ def migrate_ext(sqlite_db_file, schema, ignore_missing=True):
open_time, open_time,
used, used,
usescsv, usescsv,
webhook_url webhook_url,
custom_url
) )
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s); VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
""" """
insert_to_pg(q, res.fetchall()) insert_to_pg(q, res.fetchall())
# WITHDRAW HASH CHECK # WITHDRAW HASH CHECK