Merge remote-tracking branch 'origin/main' into codex/whe-31-adapters

# Conflicts:
#	tsconfig.json
This commit is contained in:
2026-03-08 22:17:26 +04:00
25 changed files with 763 additions and 23 deletions

1
.bun-version Normal file
View File

@@ -0,0 +1 @@
1.3.10

View File

@@ -85,7 +85,7 @@ jobs:
if: ${{ needs.check-secrets.outputs.db_secret_ok == 'true' }}
uses: oven-sh/setup-bun@v2
with:
bun-version: 1.3.10
bun-version-file: .bun-version
- name: Install dependencies for migrations
if: ${{ needs.check-secrets.outputs.db_secret_ok == 'true' }}

View File

@@ -38,7 +38,7 @@ jobs:
- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
bun-version: 1.3.10
bun-version-file: .bun-version
- name: Restore Bun cache
uses: actions/cache@v4

View File

@@ -13,6 +13,7 @@ COPY packages/db/package.json packages/db/package.json
COPY packages/domain/package.json packages/domain/package.json
COPY packages/observability/package.json packages/observability/package.json
COPY packages/ports/package.json packages/ports/package.json
COPY scripts/package.json scripts/package.json
RUN bun install --frozen-lockfile

View File

@@ -50,4 +50,13 @@ describe('extractPurchaseTopicCandidate', () => {
expect(record).toBeNull()
})
test('skips slash commands in purchase topic', () => {
const record = extractPurchaseTopicCandidate(
candidate({ rawText: '/statement 2026-03' }),
config
)
expect(record).toBeNull()
})
})

View File

@@ -36,6 +36,10 @@ export function extractPurchaseTopicCandidate(
value: PurchaseTopicCandidate,
config: PurchaseTopicIngestionConfig
): PurchaseTopicRecord | null {
if (value.rawText.trim().startsWith('/')) {
return null
}
if (value.chatId !== config.householdChatId) {
return null
}
@@ -195,14 +199,16 @@ export function registerPurchaseTopicIngestion(
llmFallback?: PurchaseParserLlmFallback
} = {}
): void {
bot.on('message:text', async (ctx) => {
bot.on('message:text', async (ctx, next) => {
const candidate = toCandidateFromContext(ctx)
if (!candidate) {
await next()
return
}
const record = extractPurchaseTopicCandidate(candidate, config)
if (!record) {
await next()
return
}

View File

@@ -13,6 +13,7 @@ COPY packages/db/package.json packages/db/package.json
COPY packages/domain/package.json packages/domain/package.json
COPY packages/observability/package.json packages/observability/package.json
COPY packages/ports/package.json packages/ports/package.json
COPY scripts/package.json scripts/package.json
RUN bun install --frozen-lockfile

View File

@@ -8,6 +8,7 @@
"@types/bun": "1.3.10",
"@typescript/native-preview": "7.0.0-dev.20260304.1",
"drizzle-kit": "^0.31.4",
"lefthook": "2.1.2",
"oxlint": "^1.51.0",
"typescript": "^5.9.2",
},
@@ -82,6 +83,14 @@
"@household/domain": "workspace:*",
},
},
"scripts": {
"name": "@household/scripts",
"devDependencies": {
"@household/config": "workspace:*",
"@household/db": "workspace:*",
"drizzle-orm": "^0.44.5",
},
},
},
"packages": {
"@babel/code-frame": ["@babel/code-frame@7.29.0", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.28.5", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw=="],
@@ -202,6 +211,8 @@
"@household/ports": ["@household/ports@workspace:packages/ports"],
"@household/scripts": ["@household/scripts@workspace:scripts"],
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="],
"@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ=="],
@@ -456,6 +467,28 @@
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
"lefthook": ["lefthook@2.1.2", "", { "optionalDependencies": { "lefthook-darwin-arm64": "2.1.2", "lefthook-darwin-x64": "2.1.2", "lefthook-freebsd-arm64": "2.1.2", "lefthook-freebsd-x64": "2.1.2", "lefthook-linux-arm64": "2.1.2", "lefthook-linux-x64": "2.1.2", "lefthook-openbsd-arm64": "2.1.2", "lefthook-openbsd-x64": "2.1.2", "lefthook-windows-arm64": "2.1.2", "lefthook-windows-x64": "2.1.2" }, "bin": { "lefthook": "bin/index.js" } }, "sha512-HdAMl4g47kbWSkrUkCx3Kucq54omFS6piMJtXwXNtmCAfB40UaybTJuYtFW4hNzZ5SvaEimtxTp7P/MNIkEfsA=="],
"lefthook-darwin-arm64": ["lefthook-darwin-arm64@2.1.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-AgHu93YuJtj1l9bcKlCbo4Tg8N8xFl9iD6BjXCGaGMu46LSjFiXbJFlkUdpgrL8fIbwoCjJi5FNp3POpqs4Wdw=="],
"lefthook-darwin-x64": ["lefthook-darwin-x64@2.1.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-exooc9Ectz13OLJJOXM9AzaFQbqzf9QCF8JuVvGfbr4RYABYK+BwwtydjlPQrA76/n/h4tsS11MH5bBULnLkYA=="],
"lefthook-freebsd-arm64": ["lefthook-freebsd-arm64@2.1.2", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-E1QMlJPEU21n9eewv6ePfh+JmoTSg5R1jaYcKCky10kfbMdohNucI3xV91F2LcerE+p3UejKDqr/1wWO2RMGeQ=="],
"lefthook-freebsd-x64": ["lefthook-freebsd-x64@2.1.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-/5zp+x8055Thj46x9S7hgnneZxvWhHQvPWkkgISCab1Lh6eLrbxvhE1qTb1lU3DqTnNmH9NeXdq1xPHc9uGluA=="],
"lefthook-linux-arm64": ["lefthook-linux-arm64@2.1.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-UK5FvDTkwKO7tOznY8iEZzuTsM1jXMZAG5BMRs7olN1k1K6m2unR6oKABP0hCd0wDErK6DZKDJDJfB564Rzqtw=="],
"lefthook-linux-x64": ["lefthook-linux-x64@2.1.2", "", { "os": "linux", "cpu": "x64" }, "sha512-4eOtz4PNh8GbJ+nA8YVDfW/eMirQWdZqMP/V/MVtoVBGobf6oXvvuDOySvAPOgNYEFN0Boegytmuji/851Vstg=="],
"lefthook-openbsd-arm64": ["lefthook-openbsd-arm64@2.1.2", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-lJXRJ6iJIBKwomuNBA3CUNSclj2/rKuxGAQoUra214B92VB6jL9zaY5YEs6h/ie9jQrzSnllEeg7xyDIsuVCrQ=="],
"lefthook-openbsd-x64": ["lefthook-openbsd-x64@2.1.2", "", { "os": "openbsd", "cpu": "x64" }, "sha512-GyOje4W0DIqkmR7/Of5D+mZ0vWqMvtGAVedtJR6d1239xNeMzCS8Q+/a3O1xigceZa5xhlqq0BWlssB/QYPQnA=="],
"lefthook-windows-arm64": ["lefthook-windows-arm64@2.1.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-MZKMqTULEpX/8N3fKXAR0A9RjsGKkEEY0japLqrHOIpxsJXry1DRz0FvQo2kkY4WW3rtFegV9m6eesOymuDrUg=="],
"lefthook-windows-x64": ["lefthook-windows-x64@2.1.2", "", { "os": "win32", "cpu": "x64" }, "sha512-NZUgObuaSxc0EXAwC/CzkMf7TuQc++GGIk6TLPdaUpoSsNSJSZEwBVz5DtFB1cG+eMkfO/wOKplls+yjimTTtQ=="],
"lightningcss": ["lightningcss@1.31.1", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.31.1", "lightningcss-darwin-arm64": "1.31.1", "lightningcss-darwin-x64": "1.31.1", "lightningcss-freebsd-x64": "1.31.1", "lightningcss-linux-arm-gnueabihf": "1.31.1", "lightningcss-linux-arm64-gnu": "1.31.1", "lightningcss-linux-arm64-musl": "1.31.1", "lightningcss-linux-x64-gnu": "1.31.1", "lightningcss-linux-x64-musl": "1.31.1", "lightningcss-win32-arm64-msvc": "1.31.1", "lightningcss-win32-x64-msvc": "1.31.1" } }, "sha512-l51N2r93WmGUye3WuFoN5k10zyvrVs0qfKBhyC5ogUQ6Ew6JUSswh78mbSO+IU3nTWsyOArqPCcShdQSadghBQ=="],
"lightningcss-android-arm64": ["lightningcss-android-arm64@1.31.1", "", { "os": "android", "cpu": "arm64" }, "sha512-HXJF3x8w9nQ4jbXRiNppBCqeZPIAfUo8zE/kOEGbW5NZvGc/K7nMxbhIr+YlFlHW5mpbg/YFPdbnCh1wAXCKFg=="],

View File

@@ -0,0 +1,66 @@
# End-to-End Smoke Tests
## Overview
The `scripts/e2e/billing-flow.ts` script runs a deterministic end-to-end
smoke test for the billing pipeline. It exercises:
- Purchase ingestion from a simulated topic message
- Utility bill entry via bot commands
- Monthly statement generation and balance verification
## Prerequisites
- Bun 1.3+ installed
- A running Supabase/Postgres database with the schema applied
- `DATABASE_URL` set (via `.env` or environment)
- `E2E_SMOKE_ALLOW_WRITE=true` set explicitly (safety guard)
## Running locally
```bash
# 1. Ensure .env has a valid DATABASE_URL
cp .env.example .env
# edit .env with real DATABASE_URL
# 2. Apply database migrations
bun run db:migrate
# 3. Run the e2e smoke test
E2E_SMOKE_ALLOW_WRITE=true bun run test:e2e
```
The test seeds its own data (household + 3 roommates), runs the full
purchase → utility → statement flow, asserts deterministic totals, and
cleans up after itself.
## Expected output
On success:
```text
E2E smoke passed: purchase ingestion, utility updates, and statements are deterministic
```
On failure the script exits with code 1 and prints the assertion error.
## CI integration
Run the e2e smoke test with `bun run test:e2e` locally or in a dedicated
CI job. If you wire it into CI, gate it on `DATABASE_URL` and
`E2E_SMOKE_ALLOW_WRITE` to avoid false failures. The test is **not**
part of the standard CI quality matrix by default.
## Test data
The test creates temporary records with random UUIDs:
| Entity | Details |
| --------- | -------------------------- |
| Household | "E2E Smoke Household" |
| Alice | Admin, telegram ID 900001 |
| Bob | Member, telegram ID 900002 |
| Carol | Member, telegram ID 900003 |
All test data is cleaned up in a `finally` block via cascade delete on
the household row.

View File

@@ -18,7 +18,7 @@ gcloud auth application-default login
```bash
cp infra/terraform/terraform.tfvars.example infra/terraform/terraform.tfvars
terraform -chdir=infra/terraform init
terraform -chdir=infra/terraform init -backend-config="bucket=<terraform-state-bucket>"
terraform -chdir=infra/terraform plan
terraform -chdir=infra/terraform apply
```
@@ -35,10 +35,21 @@ bun run infra:validate
After first apply, add secret versions:
```bash
echo -n "<telegram-bot-token>" | gcloud secrets versions add telegram-bot-token --data-file=- --project <project_id>
echo -n "<telegram-webhook-secret>" | gcloud secrets versions add telegram-webhook-secret --data-file=- --project <project_id>
echo -n "<scheduler-shared-secret>" | gcloud secrets versions add scheduler-shared-secret --data-file=- --project <project_id>
```
If you set optional secret IDs such as `database_url_secret_id` or
`openai_api_key_secret_id`, add versions for those secrets too.
Keep bot runtime config that is not secret in your `*.tfvars` file:
- `bot_household_id`
- `bot_household_chat_id`
- `bot_purchase_topic_id`
- optional `bot_parser_model`
## Environment strategy
- Keep separate states for `dev` and `prod`.

View File

@@ -0,0 +1,77 @@
# HOUSEBOT-061: Local End-to-End Smoke Tests for Billing Flow
## Summary
Add a pragmatic local smoke test that exercises the main billing path against a real database with deterministic assertions.
## Goals
- Provide `bun run test:e2e` for local pre-deploy confidence.
- Cover purchase ingestion, manual utility entry, and statement generation in one flow.
- Ensure smoke data is isolated and cleaned up automatically.
## Non-goals
- Full browser or Telegram API end-to-end automation.
- Running destructive write tests in the default CI quality matrix.
- Comprehensive scenario coverage for every finance edge case.
## Scope
- In: write-gated smoke script, docs, typed env for the smoke test, deterministic assertions, cleanup.
- Out: full staging environment orchestration.
## Interfaces and Contracts
- Command: `bun run test:e2e`
- Required env:
- `DATABASE_URL`
- `E2E_SMOKE_ALLOW_WRITE=true`
- Script behavior:
- creates temporary household/member/cycle data
- simulates Telegram topic purchase ingestion
- simulates finance commands for rent, utilities, and statements
- deletes created data in `finally`
## Domain Rules
- Use integer minor units only.
- Statement totals must match deterministic settlement behavior.
- Purchase-topic ingestion must not swallow non-purchase slash commands.
## Data Model Changes
- None.
## Security and Privacy
- Test writes are disabled unless `E2E_SMOKE_ALLOW_WRITE=true`.
- No production secrets are logged by the smoke script.
## Observability
- Script prints a single success line on pass.
- Failures surface assertion or runtime errors with non-zero exit code.
## Edge Cases and Failure Modes
- Missing `DATABASE_URL`: fail fast in env validation.
- Missing explicit write guard: fail fast before DB writes.
- Middleware ordering regression: smoke test should fail when commands stop emitting statements.
## Test Plan
- Unit: parser/topic candidate tests cover slash-command exclusion.
- Integration: `bun run test:e2e` against a migrated dev database.
- E2E: same smoke script verifies purchase ingestion -> statement -> recalculated statement after utility update.
## Acceptance Criteria
- [ ] `bun run test:e2e` executes locally with deterministic output.
- [ ] Purchase ingestion and utility updates are both covered in the same smoke flow.
- [ ] Docs explain required env and safety guard.
## Rollout Plan
- Keep the smoke test local-first.
- Consider adding an opt-in CI job later once a dedicated disposable database is available.

View File

@@ -29,7 +29,7 @@ This directory contains baseline IaC for deploying the household bot platform on
1. Initialize:
```bash
terraform -chdir=infra/terraform init
terraform -chdir=infra/terraform init -backend-config="bucket=<terraform-state-bucket>"
```
2. Prepare variables:
@@ -53,10 +53,14 @@ terraform -chdir=infra/terraform apply
5. Add secret values (after apply):
```bash
echo -n "<telegram-bot-token>" | gcloud secrets versions add telegram-bot-token --data-file=- --project <project_id>
echo -n "<value>" | gcloud secrets versions add telegram-webhook-secret --data-file=- --project <project_id>
echo -n "<value>" | gcloud secrets versions add scheduler-shared-secret --data-file=- --project <project_id>
```
If you configure optional secret IDs such as `database_url_secret_id` or
`openai_api_key_secret_id`, add versions for those secrets as well.
## Environments
Recommended approach:
@@ -64,6 +68,11 @@ Recommended approach:
- Keep one state per environment (dev/prod) using separate backend configs or workspaces
- Use `terraform.tfvars` per environment (`dev.tfvars`, `prod.tfvars`)
- Keep `project_id` separate for dev/prod when possible
- Keep non-secret bot config in `*.tfvars`:
- `bot_household_id`
- `bot_household_chat_id`
- `bot_purchase_topic_id`
- optional `bot_parser_model`
## CI validation

View File

@@ -16,7 +16,10 @@ locals {
var.telegram_webhook_secret_id,
var.scheduler_shared_secret_id,
var.supabase_url_secret_id,
var.supabase_publishable_key_secret_id
var.supabase_publishable_key_secret_id,
var.database_url_secret_id,
var.telegram_bot_token_secret_id,
var.openai_api_key_secret_id
]))
api_services = toset([

View File

@@ -77,9 +77,23 @@ module "bot_api_service" {
max_instance_count = var.bot_max_instances
labels = local.common_labels
env = {
NODE_ENV = var.environment
}
env = merge(
{
NODE_ENV = var.environment
},
var.bot_household_id == null ? {} : {
HOUSEHOLD_ID = var.bot_household_id
},
var.bot_household_chat_id == null ? {} : {
TELEGRAM_HOUSEHOLD_CHAT_ID = var.bot_household_chat_id
},
var.bot_purchase_topic_id == null ? {} : {
TELEGRAM_PURCHASE_TOPIC_ID = tostring(var.bot_purchase_topic_id)
},
var.bot_parser_model == null ? {} : {
PARSER_MODEL = var.bot_parser_model
}
)
secret_env = merge(
{
@@ -91,6 +105,15 @@ module "bot_api_service" {
},
var.supabase_publishable_key_secret_id == null ? {} : {
SUPABASE_PUBLISHABLE_KEY = var.supabase_publishable_key_secret_id
},
var.database_url_secret_id == null ? {} : {
DATABASE_URL = var.database_url_secret_id
},
var.telegram_bot_token_secret_id == null ? {} : {
TELEGRAM_BOT_TOKEN = var.telegram_bot_token_secret_id
},
var.openai_api_key_secret_id == null ? {} : {
OPENAI_API_KEY = var.openai_api_key_secret_id
}
)

View File

@@ -5,8 +5,13 @@ service_prefix = "household"
artifact_repository_id = "household-bot"
bot_api_image = "europe-west1-docker.pkg.dev/my-gcp-project/household-bot/bot-api:latest"
mini_app_image = "europe-west1-docker.pkg.dev/my-gcp-project/household-bot/mini-app:latest"
bot_api_image = "europe-west1-docker.pkg.dev/my-gcp-project/household-bot/bot:latest"
mini_app_image = "europe-west1-docker.pkg.dev/my-gcp-project/household-bot/miniapp:latest"
bot_household_id = "11111111-1111-4111-8111-111111111111"
bot_household_chat_id = "-1001234567890"
bot_purchase_topic_id = 777
bot_parser_model = "gpt-4.1-mini"
scheduler_cron = "0 9 * * *"
scheduler_timezone = "Asia/Tbilisi"

View File

@@ -70,6 +70,55 @@ variable "supabase_publishable_key_secret_id" {
nullable = true
}
variable "database_url_secret_id" {
description = "Optional Secret Manager ID for DATABASE_URL"
type = string
default = null
nullable = true
}
variable "telegram_bot_token_secret_id" {
description = "Secret Manager ID for TELEGRAM_BOT_TOKEN"
type = string
default = "telegram-bot-token"
}
variable "bot_household_id" {
description = "Optional HOUSEHOLD_ID value for bot runtime"
type = string
default = null
nullable = true
}
variable "bot_household_chat_id" {
description = "Optional TELEGRAM_HOUSEHOLD_CHAT_ID value for bot runtime"
type = string
default = null
nullable = true
}
variable "bot_purchase_topic_id" {
description = "Optional TELEGRAM_PURCHASE_TOPIC_ID value for bot runtime"
type = number
default = null
nullable = true
}
variable "bot_parser_model" {
description = "Optional PARSER_MODEL override for bot runtime"
type = string
default = null
nullable = true
}
variable "openai_api_key_secret_id" {
description = "Optional Secret Manager ID for OPENAI_API_KEY"
type = string
default = null
nullable = true
}
variable "scheduler_path" {
description = "Reminder endpoint path on bot API"
type = string

View File

@@ -1,6 +1,11 @@
terraform {
required_version = ">= 1.8.0"
backend "gcs" {
# The bucket will need to be configured via `terraform init -backend-config="bucket=<YOUR_BUCKET>"`
# or you can hardcode the bucket name here. Since it's a generic module, we leave it to be configured via init args.
}
required_providers {
google = {
source = "hashicorp/google"

21
lefthook.yml Normal file
View File

@@ -0,0 +1,21 @@
# lefthook.yml
pre-commit:
parallel: true
commands:
format:
glob: '*.{ts,tsx,js,jsx,json,md}'
run: bun run format:check
lint:
glob: '*.{ts,tsx,js,jsx}'
run: bun run lint
pre-push:
parallel: true
commands:
typecheck:
run: bun run typecheck
test:
run: bun run test
build:
run: bun run build

View File

@@ -4,12 +4,14 @@
"type": "module",
"workspaces": [
"apps/*",
"packages/*"
"packages/*",
"scripts"
],
"scripts": {
"build": "bun run --filter '*' build",
"typecheck": "bun run --filter '*' typecheck",
"test": "bun run --filter '*' test",
"prepare": "[ -d .git ] && lefthook install || true",
"lint": "oxlint .",
"lint:fix": "oxlint --fix .",
"format": "bunx oxfmt .",
@@ -29,12 +31,14 @@
"docker:build:bot": "docker build -f apps/bot/Dockerfile -t household-bot:local .",
"docker:build:miniapp": "docker build -f apps/miniapp/Dockerfile -t household-miniapp:local .",
"docker:build": "bun run docker:build:bot && bun run docker:build:miniapp",
"docker:smoke": "docker compose up --build"
"docker:smoke": "docker compose up --build",
"test:e2e": "bun run scripts/e2e/billing-flow.ts"
},
"devDependencies": {
"@types/bun": "1.3.10",
"@typescript/native-preview": "7.0.0-dev.20260304.1",
"drizzle-kit": "^0.31.4",
"lefthook": "2.1.2",
"oxlint": "^1.51.0",
"typescript": "^5.9.2"
}

View File

@@ -0,0 +1,21 @@
import { createEnv } from '@t3-oss/env-core'
import { z } from 'zod'
const server = {
DATABASE_URL: z.string().url(),
E2E_SMOKE_ALLOW_WRITE: z
.enum(['true', 'false'])
.default('false')
.transform((v) => v === 'true')
}
export const e2eEnv = createEnv({
server,
runtimeEnv: process.env,
emptyStringAsUndefined: true,
onValidationError: (issues) => {
console.error('Invalid e2e environment variables:')
console.error(JSON.stringify(issues, null, 2))
throw new Error('E2E environment validation failed')
}
})

View File

@@ -1 +1,2 @@
export { env } from './env'
export { e2eEnv } from './env-e2e'

349
scripts/e2e/billing-flow.ts Normal file
View File

@@ -0,0 +1,349 @@
import assert from 'node:assert/strict'
import { randomUUID } from 'node:crypto'
import { eq } from 'drizzle-orm'
import { createDbClient, schema } from '@household/db'
import { createTelegramBot } from '../../apps/bot/src/bot'
import { createFinanceCommandsService } from '../../apps/bot/src/finance-commands'
import {
createPurchaseMessageRepository,
registerPurchaseTopicIngestion
} from '../../apps/bot/src/purchase-topic-ingestion'
const chatId = '-100123456'
const purchaseTopicId = 77
const commandChatIdNumber = -100123456
function unixSeconds(year: number, month: number, day: number): number {
return Math.floor(Date.UTC(year, month - 1, day, 12, 0, 0) / 1000)
}
function commandUpdate(params: {
updateId: number
fromUserId: string
fromName: string
text: string
unixTime: number
}) {
const commandToken = params.text.split(' ')[0] ?? params.text
return {
update_id: params.updateId,
message: {
message_id: params.updateId,
date: params.unixTime,
chat: {
id: commandChatIdNumber,
type: 'supergroup'
},
from: {
id: Number(params.fromUserId),
is_bot: false,
first_name: params.fromName
},
text: params.text,
entities: [
{
offset: 0,
length: commandToken.length,
type: 'bot_command'
}
]
}
}
}
function topicPurchaseUpdate(params: {
updateId: number
fromUserId: string
fromName: string
text: string
unixTime: number
}) {
return {
update_id: params.updateId,
message: {
message_id: params.updateId,
date: params.unixTime,
chat: {
id: commandChatIdNumber,
type: 'supergroup'
},
from: {
id: Number(params.fromUserId),
is_bot: false,
first_name: params.fromName
},
is_topic_message: true,
message_thread_id: purchaseTopicId,
text: params.text
}
}
}
function parseStatement(text: string): Map<string, string> {
const lines = text.split('\n').slice(1)
const amounts = new Map<string, string>()
for (const line of lines) {
const match = /^-\s(.+?):\s([+-]?\d+\.\d{2})\s(?:USD|GEL)$/.exec(line.trim())
if (!match) {
continue
}
amounts.set(match[1]!, match[2]!)
}
return amounts
}
async function loadE2eConfig(): Promise<{ databaseUrl: string }> {
const { e2eEnv } = await import('@household/config')
if (!e2eEnv.E2E_SMOKE_ALLOW_WRITE) {
throw new Error('Set E2E_SMOKE_ALLOW_WRITE=true to run e2e smoke test')
}
return {
databaseUrl: e2eEnv.DATABASE_URL
}
}
async function run(): Promise<void> {
const { databaseUrl } = await loadE2eConfig()
const ids = {
household: randomUUID(),
admin: randomUUID(),
bob: randomUUID(),
carol: randomUUID()
}
const telegram = {
admin: '900001',
bob: '900002',
carol: '900003'
}
let coreClient: ReturnType<typeof createDbClient> | undefined
let ingestionClient: ReturnType<typeof createPurchaseMessageRepository> | undefined
let financeService: ReturnType<typeof createFinanceCommandsService> | undefined
const bot = createTelegramBot('000000:test-token')
const replies: string[] = []
bot.botInfo = {
id: 999000,
is_bot: true,
first_name: 'Household Test Bot',
username: 'household_test_bot',
can_join_groups: true,
can_read_all_group_messages: false,
supports_inline_queries: false,
can_connect_to_business: false,
has_main_web_app: false,
has_topics_enabled: true,
allows_users_to_create_topics: false
}
bot.api.config.use(async (_prev, method, payload) => {
if (method === 'sendMessage') {
const p = payload as any
const messageText = typeof p?.text === 'string' ? p.text : ''
replies.push(messageText)
return {
ok: true,
result: {
message_id: replies.length,
date: Math.floor(Date.now() / 1000),
chat: {
id: commandChatIdNumber,
type: 'supergroup'
},
text: messageText
}
} as any
}
return { ok: true, result: true } as any
})
try {
coreClient = createDbClient(databaseUrl, {
max: 2,
prepare: false
})
ingestionClient = createPurchaseMessageRepository(databaseUrl)
financeService = createFinanceCommandsService(databaseUrl, {
householdId: ids.household
})
registerPurchaseTopicIngestion(
bot,
{
householdId: ids.household,
householdChatId: chatId,
purchaseTopicId
},
ingestionClient.repository
)
financeService.register(bot)
await coreClient.db.insert(schema.households).values({
id: ids.household,
name: 'E2E Smoke Household'
})
await coreClient.db.insert(schema.members).values([
{
id: ids.admin,
householdId: ids.household,
telegramUserId: telegram.admin,
displayName: 'Alice',
isAdmin: 1
},
{
id: ids.bob,
householdId: ids.household,
telegramUserId: telegram.bob,
displayName: 'Bob',
isAdmin: 0
},
{
id: ids.carol,
householdId: ids.household,
telegramUserId: telegram.carol,
displayName: 'Carol',
isAdmin: 0
}
])
let updateId = 1000
const march12 = unixSeconds(2026, 3, 12)
await bot.handleUpdate(
commandUpdate({
updateId: ++updateId,
fromUserId: telegram.admin,
fromName: 'Alice',
text: '/cycle_open 2026-03 USD',
unixTime: march12
}) as never
)
await bot.handleUpdate(
commandUpdate({
updateId: ++updateId,
fromUserId: telegram.admin,
fromName: 'Alice',
text: '/rent_set 700 USD 2026-03',
unixTime: march12
}) as never
)
await bot.handleUpdate(
topicPurchaseUpdate({
updateId: ++updateId,
fromUserId: telegram.bob,
fromName: 'Bob',
text: 'Bought soap 30 USD',
unixTime: march12
}) as never
)
await bot.handleUpdate(
commandUpdate({
updateId: ++updateId,
fromUserId: telegram.admin,
fromName: 'Alice',
text: '/utility_add electricity 120 USD',
unixTime: march12
}) as never
)
await bot.handleUpdate(
commandUpdate({
updateId: ++updateId,
fromUserId: telegram.admin,
fromName: 'Alice',
text: '/statement 2026-03',
unixTime: march12
}) as never
)
const firstStatement = replies.find((entry) => entry.startsWith('Statement for 2026-03'))
assert.ok(firstStatement, 'First statement message was not emitted')
const firstTotals = parseStatement(firstStatement)
assert.equal(firstTotals.get('Alice'), '283.34')
assert.equal(firstTotals.get('Bob'), '253.33')
assert.equal(firstTotals.get('Carol'), '283.33')
await bot.handleUpdate(
commandUpdate({
updateId: ++updateId,
fromUserId: telegram.admin,
fromName: 'Alice',
text: '/utility_add water 30 USD',
unixTime: march12
}) as never
)
await bot.handleUpdate(
commandUpdate({
updateId: ++updateId,
fromUserId: telegram.admin,
fromName: 'Alice',
text: '/statement 2026-03',
unixTime: march12
}) as never
)
const secondStatement = replies.at(-1)
assert.ok(secondStatement?.startsWith('Statement for 2026-03'), 'Second statement missing')
const secondTotals = parseStatement(secondStatement ?? '')
assert.equal(secondTotals.get('Alice'), '293.34')
assert.equal(secondTotals.get('Bob'), '263.33')
assert.equal(secondTotals.get('Carol'), '293.33')
const purchaseRows = await coreClient.db
.select({
status: schema.purchaseMessages.processingStatus,
amountMinor: schema.purchaseMessages.parsedAmountMinor,
senderMemberId: schema.purchaseMessages.senderMemberId
})
.from(schema.purchaseMessages)
.where(eq(schema.purchaseMessages.householdId, ids.household))
assert.equal(purchaseRows.length, 1, 'Expected one ingested purchase message')
assert.equal(purchaseRows[0]?.status, 'parsed')
assert.equal(purchaseRows[0]?.amountMinor, 3000n)
assert.equal(purchaseRows[0]?.senderMemberId, ids.bob)
console.log(
'E2E smoke passed: purchase ingestion, utility updates, and statements are deterministic'
)
} finally {
await Promise.allSettled([
coreClient
? coreClient.db.delete(schema.households).where(eq(schema.households.id, ids.household))
: undefined,
coreClient?.queryClient.end({ timeout: 5 }),
ingestionClient?.close(),
financeService?.close()
])
}
}
try {
await run()
} catch (error) {
console.error('E2E smoke failed', error)
process.exitCode = 1
}

13
scripts/package.json Normal file
View File

@@ -0,0 +1,13 @@
{
"name": "@household/scripts",
"private": true,
"type": "module",
"scripts": {
"typecheck": "tsgo --project tsconfig.json --noEmit"
},
"devDependencies": {
"drizzle-orm": "^0.44.5",
"@household/config": "workspace:*",
"@household/db": "workspace:*"
}
}

9
scripts/tsconfig.json Normal file
View File

@@ -0,0 +1,9 @@
{
"extends": "../tsconfig.base.json",
"compilerOptions": {
"paths": {
"@household/*": ["../packages/*/src", "../apps/*/src"]
}
},
"include": ["**/*.ts", "../apps/bot/src/**/*.ts"]
}

View File

@@ -1,15 +1,38 @@
{
"files": [],
"references": [
{ "path": "./apps/bot" },
{ "path": "./apps/miniapp" },
{ "path": "./packages/domain" },
{ "path": "./packages/application" },
{ "path": "./packages/ports" },
{ "path": "./packages/contracts" },
{ "path": "./packages/observability" },
{ "path": "./packages/config" },
{ "path": "./packages/db" },
{ "path": "./packages/adapters-db" }
{
"path": "./apps/bot"
},
{
"path": "./apps/miniapp"
},
{
"path": "./packages/domain"
},
{
"path": "./packages/application"
},
{
"path": "./packages/ports"
},
{
"path": "./packages/contracts"
},
{
"path": "./packages/observability"
},
{
"path": "./packages/config"
},
{
"path": "./packages/db"
},
{
"path": "./packages/adapters-db"
},
{
"path": "./scripts"
}
]
}