Initial commit of akmon project
This commit is contained in:
1738
server/gateway-mqtt-node/package-lock.json
generated
Normal file
1738
server/gateway-mqtt-node/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
26
server/gateway-mqtt-node/package.json
Normal file
26
server/gateway-mqtt-node/package.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"name": "gateway-mqtt-node",
|
||||
"version": "0.1.0",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "node --env-file=.env --watch src/index.js",
|
||||
"start": "node --env-file=.env src/index.js",
|
||||
"worker": "node --env-file=.env --watch src/worker.js",
|
||||
"start:worker": "node --env-file=.env src/worker.js",
|
||||
"simulate:webhook": "node --env-file=.env scripts/simulate_webhook.js",
|
||||
"simulate:downlink": "node --env-file=.env scripts/simulate_downlink.js",
|
||||
"simulate:chat:downlink": "node --env-file=.env scripts/simulate_chat_downlink.js",
|
||||
"simulate:ack": "node --env-file=.env scripts/simulate_ack.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@supabase/supabase-js": "^2.88.0",
|
||||
"dotenv": "^16.4.5",
|
||||
"express": "^4.19.2",
|
||||
"mqtt": "^5.7.0",
|
||||
"kafkajs": "^2.2.4",
|
||||
"pino": "^9.3.2",
|
||||
"redis": "^4.6.14",
|
||||
"uuid": "^9.0.1"
|
||||
}
|
||||
}
|
||||
34
server/gateway-mqtt-node/scripts/simulate_ack.js
Normal file
34
server/gateway-mqtt-node/scripts/simulate_ack.js
Normal file
@@ -0,0 +1,34 @@
|
||||
import 'dotenv/config'
|
||||
import mqtt from 'mqtt'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
const MQTT_URL = process.env.MQTT_URL
|
||||
if (!MQTT_URL) throw new Error('Missing MQTT_URL')
|
||||
|
||||
const pattern = process.env.ACK_TOPIC_PATTERN || 'device/+/ack'
|
||||
const target = process.env.SIM_ACK_TARGET // e.g. userId or deviceId to fill the '+'
|
||||
const correlationId = process.env.SIM_CORRELATION_ID || uuidv4()
|
||||
|
||||
const topic = (() => {
|
||||
const parts = pattern.split('/')
|
||||
const tParts = []
|
||||
let used = false
|
||||
for (const p of parts) {
|
||||
if (p === '+') { tParts.push(target || 'test'); used = true } else tParts.push(p)
|
||||
}
|
||||
if (pattern.includes('+') && !used) throw new Error('Pattern contains + but could not fill it')
|
||||
return tParts.join('/')
|
||||
})()
|
||||
|
||||
const payload = JSON.stringify({ correlation_id: correlationId, ok: true, t: Date.now() })
|
||||
|
||||
console.log('Publishing ACK', { topic, correlationId })
|
||||
const client = mqtt.connect(MQTT_URL, { clientId: `ack-sim-${Math.random().toString(16).slice(2)}` })
|
||||
client.on('connect', () => {
|
||||
client.publish(topic, payload, { qos: 1 }, (err) => {
|
||||
if (err) console.error('publish error', err)
|
||||
else console.log('ACK published')
|
||||
setTimeout(() => client.end(true, () => process.exit(err ? 1 : 0)), 200)
|
||||
})
|
||||
})
|
||||
client.on('error', (e) => { console.error('mqtt error', e); process.exit(2) })
|
||||
43
server/gateway-mqtt-node/scripts/simulate_chat_downlink.js
Normal file
43
server/gateway-mqtt-node/scripts/simulate_chat_downlink.js
Normal file
@@ -0,0 +1,43 @@
|
||||
import 'dotenv/config'
|
||||
import { createClient } from '@supabase/supabase-js'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
async function main() {
|
||||
const SUPABASE_URL = process.env.SUPABASE_URL
|
||||
const SUPABASE_SERVICE_ROLE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY
|
||||
if (!SUPABASE_URL || !SUPABASE_SERVICE_ROLE_KEY) throw new Error('Missing SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY')
|
||||
|
||||
const conversationId = process.env.SIM_CHAT_CONVERSATION_ID
|
||||
if (!conversationId) throw new Error('SIM_CHAT_CONVERSATION_ID required')
|
||||
|
||||
const targetUserId = process.env.SIM_TARGET_USER_ID || ''
|
||||
const topic = process.env.SIM_TOPIC || (targetUserId ? `device/${targetUserId}/down` : '')
|
||||
if (!topic) throw new Error('Provide SIM_TOPIC or SIM_TARGET_USER_ID to derive topic')
|
||||
|
||||
const correlationId = process.env.SIM_CORRELATION_ID || uuidv4()
|
||||
const payloadObj = process.env.SIM_PAYLOAD
|
||||
? JSON.parse(process.env.SIM_PAYLOAD)
|
||||
: { type: 'ping', t: Date.now(), correlation_id: correlationId }
|
||||
const payload = typeof payloadObj === 'string' ? payloadObj : JSON.stringify(payloadObj)
|
||||
const qos = parseInt(process.env.SIM_QOS || '1', 10)
|
||||
const retain = /^true$/i.test(process.env.SIM_RETAIN || 'false')
|
||||
|
||||
const supa = createClient(SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY, { auth: { autoRefreshToken: false, persistSession: false } })
|
||||
const row = {
|
||||
conversation_id: conversationId,
|
||||
target_user_id: targetUserId || null,
|
||||
topic,
|
||||
payload,
|
||||
payload_encoding: 'utf8',
|
||||
qos,
|
||||
retain,
|
||||
status: 'pending',
|
||||
scheduled_at: new Date().toISOString(),
|
||||
correlation_id: correlationId
|
||||
}
|
||||
const { data, error } = await supa.from('chat_mqtt_downlinks').insert(row).select('*').single()
|
||||
if (error) throw error
|
||||
console.log('Inserted chat downlink:', { id: data.id, correlation_id: correlationId, topic })
|
||||
}
|
||||
|
||||
main().catch((e) => { console.error(e); process.exit(1) })
|
||||
38
server/gateway-mqtt-node/scripts/simulate_downlink.js
Normal file
38
server/gateway-mqtt-node/scripts/simulate_downlink.js
Normal file
@@ -0,0 +1,38 @@
|
||||
import 'dotenv/config'
|
||||
import { createClient } from '@supabase/supabase-js'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
const SUPABASE_URL = process.env.SUPABASE_URL
|
||||
const SUPABASE_SERVICE_ROLE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY
|
||||
if (!SUPABASE_URL || !SUPABASE_SERVICE_ROLE_KEY) throw new Error('Missing SUPABASE_* envs')
|
||||
|
||||
const supa = createClient(SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY, { auth: { autoRefreshToken: false, persistSession: false } })
|
||||
|
||||
const topic = process.env.SIM_DOWNLINK_TOPIC || 'device/demo-001/down'
|
||||
const payload = process.env.SIM_DOWNLINK_PAYLOAD || JSON.stringify({ cmd: 'beep', duration_ms: 500 })
|
||||
const payloadEncoding = process.env.SIM_DOWNLINK_ENCODING || 'json'
|
||||
const qos = parseInt(process.env.SIM_DOWNLINK_QOS || '1', 10)
|
||||
const retain = /^true$/i.test(process.env.SIM_DOWNLINK_RETAIN || 'false')
|
||||
|
||||
const row = {
|
||||
id: uuidv4(),
|
||||
topic,
|
||||
payload,
|
||||
payload_encoding: payloadEncoding,
|
||||
qos,
|
||||
retain,
|
||||
status: 'pending',
|
||||
scheduled_at: new Date().toISOString(),
|
||||
created_by: null
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const { data, error } = await supa.from('mqtt_downlinks').insert(row).select('id, topic, payload_encoding, qos, retain, status').single()
|
||||
if (error) {
|
||||
console.error('insert error:', error)
|
||||
process.exit(1)
|
||||
}
|
||||
console.log('inserted:', data)
|
||||
}
|
||||
|
||||
main().catch((e) => { console.error(e); process.exit(1) })
|
||||
52
server/gateway-mqtt-node/scripts/simulate_webhook.js
Normal file
52
server/gateway-mqtt-node/scripts/simulate_webhook.js
Normal file
@@ -0,0 +1,52 @@
|
||||
import 'dotenv/config'
|
||||
import http from 'http'
|
||||
|
||||
const HTTP_PORT = parseInt(process.env.HTTP_PORT || '3000', 10)
|
||||
const WEBHOOK_TOKEN = process.env.WEBHOOK_TOKEN || ''
|
||||
|
||||
const conversationId = process.env.SIM_CONVERSATION_ID || '00000000-0000-0000-0000-000000000000'
|
||||
const senderId = process.env.SIM_SENDER_ID || '00000000-0000-0000-0000-000000000001'
|
||||
|
||||
const env = {
|
||||
id: process.env.SIM_MESSAGE_ID || undefined,
|
||||
ts: new Date().toISOString(),
|
||||
type: 'chat.message',
|
||||
source: 'webhook.sim',
|
||||
conversation_id: conversationId,
|
||||
sender_id: senderId,
|
||||
content: process.env.SIM_CONTENT || 'hello from webhook',
|
||||
content_type: 'text',
|
||||
metadata: { sim: true }
|
||||
}
|
||||
|
||||
const body = JSON.stringify({
|
||||
event: 'message.publish',
|
||||
topic: `chat/send/${conversationId}`,
|
||||
// emulate raw string payload
|
||||
payload: JSON.stringify(env)
|
||||
})
|
||||
|
||||
const options = {
|
||||
hostname: '127.0.0.1',
|
||||
port: HTTP_PORT,
|
||||
path: '/webhooks/mqtt',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
'content-length': Buffer.byteLength(body),
|
||||
...(WEBHOOK_TOKEN ? { 'x-webhook-token': WEBHOOK_TOKEN } : {})
|
||||
}
|
||||
}
|
||||
|
||||
const req = http.request(options, (res) => {
|
||||
let data = ''
|
||||
res.on('data', (chunk) => data += chunk)
|
||||
res.on('end', () => {
|
||||
console.log('status:', res.statusCode)
|
||||
console.log('body :', data)
|
||||
})
|
||||
})
|
||||
|
||||
req.on('error', (err) => console.error('request error:', err))
|
||||
req.write(body)
|
||||
req.end()
|
||||
610
server/gateway-mqtt-node/src/index.js
Normal file
610
server/gateway-mqtt-node/src/index.js
Normal file
@@ -0,0 +1,610 @@
|
||||
import 'dotenv/config'
|
||||
import mqtt from 'mqtt'
|
||||
import express from 'express'
|
||||
import pino from 'pino'
|
||||
import { createClient } from '@supabase/supabase-js'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createClient as createRedisClient } from 'redis'
|
||||
import { Kafka } from 'kafkajs'
|
||||
|
||||
const log = pino({ level: process.env.LOG_LEVEL || 'info' })
|
||||
|
||||
// env
|
||||
const MQTT_URL = process.env.MQTT_URL
|
||||
const MQTT_CLIENT_ID = process.env.MQTT_CLIENT_ID || `gateway-${Math.random().toString(16).slice(2)}`
|
||||
const MQTT_USERNAME = process.env.MQTT_USERNAME
|
||||
const MQTT_PASSWORD = process.env.MQTT_PASSWORD
|
||||
const SUPABASE_URL = process.env.SUPABASE_URL
|
||||
const SUPABASE_SERVICE_ROLE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY
|
||||
const MQTT_ECHO_CONFIRMED = /^true$/i.test(process.env.MQTT_ECHO_CONFIRMED || 'true')
|
||||
// optional: jwt verify via external gateway (placeholder) or envelope token
|
||||
const JWT_JWKS_URL = process.env.JWT_JWKS_URL
|
||||
const JWT_AUDIENCE = process.env.JWT_AUDIENCE
|
||||
const JWT_ISSUER = process.env.JWT_ISSUER
|
||||
// optional: redis for idempotency
|
||||
const REDIS_URL = process.env.REDIS_URL
|
||||
const IDEMPOTENCY_TTL_SEC = parseInt(process.env.IDEMPOTENCY_TTL_SEC || '86400', 10)
|
||||
// optional: kafka
|
||||
const KAFKA_BROKERS = (process.env.KAFKA_BROKERS || '').split(',').filter(Boolean)
|
||||
const KAFKA_CLIENT_ID = process.env.KAFKA_CLIENT_ID || MQTT_CLIENT_ID
|
||||
const KAFKA_TOPIC_INBOUND = process.env.KAFKA_TOPIC_INBOUND || 'chat.inbound'
|
||||
// reporting
|
||||
const GATEWAY_NAME = process.env.GATEWAY_NAME || 'mqtt-gateway'
|
||||
const GATEWAY_REGION = process.env.GATEWAY_REGION || ''
|
||||
const GATEWAY_VERSION = process.env.GATEWAY_VERSION || '0.1.0'
|
||||
const HEARTBEAT_INTERVAL_MS = parseInt(process.env.HEARTBEAT_INTERVAL_MS || '15000', 10)
|
||||
const ENABLE_HEARTBEAT = /^true$/i.test(process.env.ENABLE_HEARTBEAT || 'true')
|
||||
|
||||
// counters for reporting (declare early to avoid TDZ issues)
|
||||
const counters = {
|
||||
msgs_in: 0,
|
||||
msgs_out: 0,
|
||||
msgs_dropped: 0,
|
||||
errors: 0,
|
||||
acl_denied: 0,
|
||||
kafka_produced: 0
|
||||
}
|
||||
|
||||
if (!MQTT_URL) throw new Error('Missing MQTT_URL')
|
||||
if (!SUPABASE_URL || !SUPABASE_SERVICE_ROLE_KEY) throw new Error('Missing SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY')
|
||||
|
||||
const supa = createClient(SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY, {
|
||||
auth: { autoRefreshToken: false, persistSession: false }
|
||||
})
|
||||
|
||||
// topic layout
|
||||
const topicSendPrefix = 'chat/send/'
|
||||
const topicRecvPrefix = 'chat/recv/'
|
||||
// optional: standardized device downlink prefix example: device/{deviceId}/down
|
||||
const ACK_ENABLE = /^true$/i.test(process.env.ACK_ENABLE || 'true')
|
||||
const ACK_TOPIC_PATTERN = process.env.ACK_TOPIC_PATTERN || 'device/+/ack'
|
||||
|
||||
// ------------------ Supabase table names (configurable for prefixes) ------------------
|
||||
const TBL_CHAT_PARTICIPANTS = process.env.TBL_CHAT_PARTICIPANTS || 'chat_participants'
|
||||
const TBL_CHAT_MESSAGES = process.env.TBL_CHAT_MESSAGES || 'chat_messages'
|
||||
const TBL_CHAT_MQTT_DOWNLINKS = process.env.TBL_CHAT_MQTT_DOWNLINKS || 'chat_mqtt_downlinks'
|
||||
const TBL_MQTT_DOWNLINKS = process.env.TBL_MQTT_DOWNLINKS || 'chat_mqtt_downlinks'
|
||||
const TBL_GATEWAY_NODES = process.env.TBL_GATEWAY_NODES || 'gateway_nodes'
|
||||
const TBL_GATEWAY_HEARTBEATS = process.env.TBL_GATEWAY_HEARTBEATS || 'gateway_heartbeats'
|
||||
|
||||
// ACL check: ensure user is a participant of conversation
|
||||
async function isParticipant(conversationId, userId) {
|
||||
const { data, error } = await supa
|
||||
.from(TBL_CHAT_PARTICIPANTS)
|
||||
.select('id')
|
||||
.eq('conversation_id', conversationId)
|
||||
.eq('user_id', userId)
|
||||
.limit(1)
|
||||
if (error) throw error
|
||||
return Array.isArray(data) && data.length > 0
|
||||
}
|
||||
|
||||
// persist message into chat_messages
|
||||
async function persistMessage(evt) {
|
||||
const payload = {
|
||||
id: evt.id || uuidv4(),
|
||||
conversation_id: evt.conversation_id,
|
||||
sender_id: evt.sender_id,
|
||||
content: evt.content,
|
||||
content_type: evt.content_type || 'text',
|
||||
metadata: evt.metadata || null
|
||||
}
|
||||
const { data, error, status } = await supa
|
||||
.from(TBL_CHAT_MESSAGES)
|
||||
.insert(payload)
|
||||
.select('*')
|
||||
.single()
|
||||
return { data, error, status }
|
||||
}
|
||||
|
||||
function parseEnvelope(buf) {
|
||||
try {
|
||||
const s = buf.toString('utf8')
|
||||
return JSON.parse(s)
|
||||
} catch (e) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
function recvTopicFor(conversationId) {
|
||||
return `${topicRecvPrefix}${conversationId}`
|
||||
}
|
||||
|
||||
// idempotency
|
||||
let redis
|
||||
if (REDIS_URL) {
|
||||
redis = createRedisClient({ url: REDIS_URL })
|
||||
redis.on('error', (e) => log.error({ e }, 'redis error'))
|
||||
redis.connect().then(() => log.info('redis connected')).catch((e) => log.error({ e }, 'redis connect fail'))
|
||||
}
|
||||
async function isDuplicate(messageId) {
|
||||
if (!redis || !messageId) return false
|
||||
const key = `chat:msg:${messageId}`
|
||||
const set = await redis.set(key, '1', { NX: true, EX: IDEMPOTENCY_TTL_SEC })
|
||||
return set !== 'OK'
|
||||
}
|
||||
|
||||
// kafka producer
|
||||
let kafka, producer
|
||||
if (KAFKA_BROKERS.length > 0) {
|
||||
kafka = new Kafka({ clientId: KAFKA_CLIENT_ID, brokers: KAFKA_BROKERS })
|
||||
producer = kafka.producer()
|
||||
producer.connect().then(() => log.info('kafka connected')).catch((e) => log.error({ e }, 'kafka connect fail'))
|
||||
}
|
||||
async function produceInbound(env) {
|
||||
if (!producer) return false
|
||||
try {
|
||||
await producer.send({
|
||||
topic: KAFKA_TOPIC_INBOUND,
|
||||
messages: [{ key: env.conversation_id, value: JSON.stringify(env) }]
|
||||
})
|
||||
return true
|
||||
} catch (e) {
|
||||
log.error({ e }, 'kafka produce failed')
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// jwt validation placeholder (can be extended to real JWKS verification)
|
||||
async function validateTokenAndExtractUser(token) {
|
||||
// For now, trust envelope.sender_id; optionally add real JWT verify here using JWKS
|
||||
// You can integrate jose/jwt + JWKS URL
|
||||
return { valid: !!token, sub: null }
|
||||
}
|
||||
|
||||
async function processMessage(conversationId, env, { forceDirectPersist = false } = {}) {
|
||||
if (!env) { counters.msgs_dropped++; log.warn({ conversationId }, 'invalid env'); return { ok: false, reason: 'invalid_env' } }
|
||||
if (!env.sender_id) { counters.msgs_dropped++; log.warn({ conversationId }, 'missing sender_id'); return { ok: false, reason: 'missing_sender' } }
|
||||
counters.msgs_in++
|
||||
|
||||
if (JWT_JWKS_URL && (env.token || MQTT_USERNAME)) {
|
||||
try {
|
||||
const { valid } = await validateTokenAndExtractUser(env.token)
|
||||
if (!valid) { counters.msgs_dropped++; log.warn('jwt invalid'); return { ok: false, reason: 'jwt_invalid' } }
|
||||
} catch (e) {
|
||||
counters.errors++; counters.msgs_dropped++
|
||||
log.error({ e }, 'jwt validate failed')
|
||||
return { ok: false, reason: 'jwt_error' }
|
||||
}
|
||||
}
|
||||
|
||||
const messageId = env.id || null
|
||||
if (await isDuplicate(messageId)) {
|
||||
counters.msgs_dropped++
|
||||
log.info({ id: messageId }, 'duplicate dropped')
|
||||
return { ok: false, reason: 'duplicate' }
|
||||
}
|
||||
|
||||
try {
|
||||
const ok = await isParticipant(conversationId, env.sender_id)
|
||||
if (!ok) {
|
||||
counters.acl_denied++; counters.msgs_dropped++
|
||||
log.warn({ conversationId, userId: env.sender_id }, 'not participant, drop')
|
||||
return { ok: false, reason: 'acl_denied' }
|
||||
}
|
||||
} catch (e) {
|
||||
counters.errors++
|
||||
log.error({ e }, 'acl check failed')
|
||||
return { ok: false, reason: 'acl_error' }
|
||||
}
|
||||
|
||||
try {
|
||||
const fullEnv = { ...env, id: messageId || uuidv4(), conversation_id: conversationId }
|
||||
const useKafka = !forceDirectPersist && !!producer
|
||||
if (useKafka) {
|
||||
const ok = await produceInbound(fullEnv)
|
||||
if (ok) {
|
||||
counters.kafka_produced++
|
||||
log.info({ id: fullEnv.id, conversationId }, 'produced to kafka')
|
||||
return { ok: true, id: fullEnv.id, via: 'kafka' }
|
||||
}
|
||||
// if produce failed, fallthrough to direct persist
|
||||
}
|
||||
|
||||
const { data, error } = await persistMessage(fullEnv)
|
||||
if (error) {
|
||||
counters.errors++
|
||||
log.error({ error }, 'persist failed')
|
||||
return { ok: false, reason: 'persist_failed', error }
|
||||
}
|
||||
log.info({ id: data.id, conversationId }, 'persist ok')
|
||||
counters.msgs_out++
|
||||
if (MQTT_ECHO_CONFIRMED) {
|
||||
const out = JSON.stringify(data)
|
||||
client.publish(recvTopicFor(conversationId), out, { qos: 1 }, (err) => {
|
||||
if (err) { counters.errors++; log.error({ err }, 'echo publish failed') }
|
||||
})
|
||||
}
|
||||
return { ok: true, id: data.id, via: 'direct' }
|
||||
} catch (e) {
|
||||
counters.errors++
|
||||
log.error({ e }, 'persist exception')
|
||||
return { ok: false, reason: 'exception', error: e }
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSend(topic, msg) {
|
||||
const conversationId = topic.slice(topicSendPrefix.length)
|
||||
const env = parseEnvelope(msg)
|
||||
return await processMessage(conversationId, env, {})
|
||||
}
|
||||
|
||||
const client = mqtt.connect(MQTT_URL, {
|
||||
clientId: MQTT_CLIENT_ID,
|
||||
username: MQTT_USERNAME,
|
||||
password: MQTT_PASSWORD,
|
||||
keepalive: 30,
|
||||
reconnectPeriod: 1500,
|
||||
clean: true
|
||||
})
|
||||
|
||||
client.on('connect', () => {
|
||||
log.info({ MQTT_URL }, 'connected')
|
||||
client.subscribe(`${topicSendPrefix}#`, { qos: 1 }, (err) => {
|
||||
if (err) log.error({ err }, 'subscribe failed')
|
||||
else log.info('subscribed chat/send/#')
|
||||
})
|
||||
if (ACK_ENABLE) {
|
||||
client.subscribe(ACK_TOPIC_PATTERN, { qos: 1 }, (err) => {
|
||||
if (err) log.error({ err }, 'subscribe ack failed')
|
||||
else log.info({ pattern: ACK_TOPIC_PATTERN }, 'subscribed ack topic')
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
client.on('reconnect', () => log.warn('reconnect'))
|
||||
client.on('close', () => log.warn('close'))
|
||||
client.on('error', (err) => log.error({ err }, 'mqtt error'))
|
||||
client.on('message', async (topic, payload) => {
|
||||
try {
|
||||
if (ACK_ENABLE) {
|
||||
// naive match: if topic matches pattern device/+/ack
|
||||
const parts = topic.split('/')
|
||||
const ackParts = (ACK_TOPIC_PATTERN || '').split('/')
|
||||
const maybeAck = ackParts.length === parts.length && ackParts.every((p, i) => p === '+' || p === parts[i])
|
||||
if (maybeAck) return await handleAck(topic, payload)
|
||||
}
|
||||
if (topic.startsWith(topicSendPrefix)) return await handleSend(topic, payload)
|
||||
} catch (e) {
|
||||
counters.errors++
|
||||
log.error({ e }, 'handle message failed')
|
||||
}
|
||||
})
|
||||
|
||||
process.on('SIGINT', () => { log.info('bye'); client.end(true, () => process.exit(0)) })
|
||||
|
||||
// ------------------ Reporting (gateway_nodes + heartbeats) ------------------
|
||||
let gatewayId = null
|
||||
|
||||
async function upsertGatewayNode() {
|
||||
try {
|
||||
const { data: existed, error: qErr } = await supa
|
||||
.from(TBL_GATEWAY_NODES)
|
||||
.select('id')
|
||||
.eq('mqtt_client_id', MQTT_CLIENT_ID)
|
||||
.maybeSingle()
|
||||
if (qErr) throw qErr
|
||||
if (existed && existed.id) {
|
||||
gatewayId = existed.id
|
||||
await supa.from(TBL_GATEWAY_NODES).update({
|
||||
name: GATEWAY_NAME,
|
||||
version: GATEWAY_VERSION,
|
||||
region: GATEWAY_REGION,
|
||||
tags: { hostname: process.env.HOSTNAME || null }
|
||||
}).eq('id', gatewayId)
|
||||
return gatewayId
|
||||
} else {
|
||||
const { data: created, error: iErr } = await supa
|
||||
.from(TBL_GATEWAY_NODES)
|
||||
.insert({
|
||||
name: GATEWAY_NAME,
|
||||
mqtt_client_id: MQTT_CLIENT_ID,
|
||||
version: GATEWAY_VERSION,
|
||||
region: GATEWAY_REGION,
|
||||
tags: { hostname: process.env.HOSTNAME || null }
|
||||
})
|
||||
.select('id')
|
||||
.single()
|
||||
if (iErr) throw iErr
|
||||
gatewayId = created.id
|
||||
return gatewayId
|
||||
}
|
||||
} catch (e) {
|
||||
log.error({ e }, 'upsert gateway node failed')
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
function bytesToMb(x) { return x ? Math.round(x / 1024 / 1024) : null }
|
||||
|
||||
async function sendHeartbeat() {
|
||||
if (!ENABLE_HEARTBEAT) return
|
||||
try {
|
||||
if (!gatewayId) await upsertGatewayNode()
|
||||
if (!gatewayId) return
|
||||
const mem = process.memoryUsage()
|
||||
const payload = {
|
||||
gateway_id: gatewayId,
|
||||
uptime_sec: Math.floor(process.uptime()),
|
||||
mem_rss_mb: bytesToMb(mem.rss),
|
||||
heap_used_mb: bytesToMb(mem.heapUsed),
|
||||
mqtt_connected: client.connected,
|
||||
kafka_connected: !!producer,
|
||||
redis_connected: !!redis && redis.isOpen,
|
||||
msgs_in: counters.msgs_in,
|
||||
msgs_out: counters.msgs_out,
|
||||
msgs_dropped: counters.msgs_dropped,
|
||||
errors: counters.errors,
|
||||
acl_denied: counters.acl_denied,
|
||||
kafka_produced: counters.kafka_produced,
|
||||
extra: { env: process.env.NODE_ENV || 'dev' }
|
||||
}
|
||||
const { error } = await supa.from(TBL_GATEWAY_HEARTBEATS).insert(payload)
|
||||
if (error) {
|
||||
log.error({ error }, 'heartbeat insert failed')
|
||||
} else {
|
||||
// reset counters after successful push
|
||||
counters.msgs_in = 0
|
||||
counters.msgs_out = 0
|
||||
counters.msgs_dropped = 0
|
||||
counters.errors = 0
|
||||
counters.acl_denied = 0
|
||||
counters.kafka_produced = 0
|
||||
}
|
||||
} catch (e) {
|
||||
log.error({ e }, 'heartbeat exception')
|
||||
}
|
||||
}
|
||||
|
||||
if (ENABLE_HEARTBEAT) {
|
||||
upsertGatewayNode().then(() => {
|
||||
setInterval(sendHeartbeat, HEARTBEAT_INTERVAL_MS)
|
||||
log.info({ intervalMs: HEARTBEAT_INTERVAL_MS }, 'heartbeat scheduled')
|
||||
})
|
||||
}
|
||||
|
||||
// ------------------ HTTP Webhook (for MQTT broker webhooks) ------------------
|
||||
const HTTP_PORT = parseInt(process.env.HTTP_PORT || '3000', 10)
|
||||
const WEBHOOK_TOKEN = process.env.WEBHOOK_TOKEN || ''
|
||||
|
||||
const app = express()
|
||||
app.use(express.json({ limit: '1mb' }))
|
||||
|
||||
function authorizeWebhook(req, res, next) {
|
||||
if (!WEBHOOK_TOKEN) return next()
|
||||
const token = req.headers['x-webhook-token'] || req.headers['x-api-key']
|
||||
if (token === WEBHOOK_TOKEN) return next()
|
||||
return res.status(401).json({ error: 'unauthorized' })
|
||||
}
|
||||
|
||||
function decodeWebhookPayload(body) {
|
||||
// Support EMQX: payload may be base64; Mosquitto custom can post raw json string
|
||||
// Prefer explicit *_base64
|
||||
if (body.payload_base64) return Buffer.from(body.payload_base64, 'base64')
|
||||
if (body.message && body.message.payload_base64) return Buffer.from(body.message.payload_base64, 'base64')
|
||||
|
||||
// EMQX: payload + encoding or payload_encoding
|
||||
const encoding = body.encoding || body.payload_encoding || (body.message && (body.message.encoding || body.message.payload_encoding))
|
||||
if (typeof body.payload === 'string') {
|
||||
if (encoding && String(encoding).toLowerCase() === 'base64') {
|
||||
return Buffer.from(body.payload, 'base64')
|
||||
}
|
||||
return Buffer.from(body.payload, 'utf8')
|
||||
}
|
||||
if (body.message && typeof body.message.payload === 'string') {
|
||||
const mEnc = (body.message.encoding || body.message.payload_encoding || '').toLowerCase()
|
||||
if (mEnc === 'base64') return Buffer.from(body.message.payload, 'base64')
|
||||
return Buffer.from(body.message.payload, 'utf8')
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
app.post('/webhooks/mqtt', authorizeWebhook, async (req, res) => {
|
||||
try {
|
||||
const evt = req.body || {}
|
||||
const event = evt.event || evt.action || 'message.publish'
|
||||
const topic = evt.topic || (evt.message && evt.message.topic)
|
||||
if (!topic) return res.status(400).json({ error: 'missing topic' })
|
||||
|
||||
if (!topic.startsWith(topicSendPrefix)) {
|
||||
// Not our business path; just 200 OK so webhook pipeline continues.
|
||||
return res.json({ ok: true, ignored: true })
|
||||
}
|
||||
|
||||
const buf = decodeWebhookPayload(evt)
|
||||
if (!buf) return res.status(400).json({ error: 'missing payload' })
|
||||
const env = parseEnvelope(buf)
|
||||
const conversationId = topic.slice(topicSendPrefix.length)
|
||||
const result = await processMessage(conversationId, env, { forceDirectPersist: true })
|
||||
if (!result.ok) return res.status(202).json(result)
|
||||
return res.json({ ok: true, id: result.id, via: result.via, event })
|
||||
} catch (e) {
|
||||
counters.errors++
|
||||
log.error({ e }, 'webhook handler failed')
|
||||
return res.status(500).json({ error: 'internal_error' })
|
||||
}
|
||||
})
|
||||
|
||||
app.get('/healthz', (req, res) => res.json({ ok: true }))
|
||||
app.listen(HTTP_PORT, () => log.info({ HTTP_PORT }, 'http webhook listening'))
|
||||
|
||||
// ------------------ Downlink via Supabase Realtime (outline) ------------------
|
||||
// Runnable minimal worker: polling + claim via scheduled_at bump, simple backoff
|
||||
const DOWNLINK_ENABLE = /^true$/i.test(process.env.DOWNLINK_ENABLE || 'true')
|
||||
const DOWNLINK_POLL_INTERVAL_MS = parseInt(process.env.DOWNLINK_POLL_INTERVAL_MS || '1000', 10)
|
||||
const DOWNLINK_BATCH = parseInt(process.env.DOWNLINK_BATCH || '5', 10)
|
||||
const DOWNLINK_CLAIM_MS = parseInt(process.env.DOWNLINK_CLAIM_MS || '5000', 10)
|
||||
const DOWNLINK_BACKOFF_BASE_MS = parseInt(process.env.DOWNLINK_BACKOFF_BASE_MS || '2000', 10)
|
||||
const DOWNLINK_BACKOFF_MAX_MS = parseInt(process.env.DOWNLINK_BACKOFF_MAX_MS || '60000', 10)
|
||||
|
||||
function decodeDownlinkPayload(row) {
|
||||
const enc = (row.payload_encoding || 'utf8').toLowerCase()
|
||||
if (enc === 'base64') return Buffer.from(row.payload, 'base64')
|
||||
// json 也按 utf8 文本发送,设备端自行解析
|
||||
return Buffer.from(row.payload, 'utf8')
|
||||
}
|
||||
|
||||
async function claimRow(row) {
|
||||
const nowIso = new Date().toISOString()
|
||||
const claimUntil = new Date(Date.now() + DOWNLINK_CLAIM_MS).toISOString()
|
||||
const { data, error } = await supa
|
||||
.from(TBL_MQTT_DOWNLINKS)
|
||||
.update({ scheduled_at: claimUntil })
|
||||
.eq('id', row.id)
|
||||
.eq('status', 'pending')
|
||||
.lte('scheduled_at', nowIso)
|
||||
.select('id')
|
||||
.single()
|
||||
if (error) return false
|
||||
return !!data
|
||||
}
|
||||
|
||||
function calcBackoffMs(attempt) {
|
||||
const ms = DOWNLINK_BACKOFF_BASE_MS * Math.pow(2, Math.max(0, attempt - 1))
|
||||
return Math.min(ms, DOWNLINK_BACKOFF_MAX_MS)
|
||||
}
|
||||
|
||||
async function handleDownlinkRow(row) {
|
||||
try {
|
||||
const claimed = await claimRow(row)
|
||||
if (!claimed) return // another worker took it or not ready
|
||||
const buf = decodeDownlinkPayload(row)
|
||||
await new Promise((resolve, reject) => client.publish(row.topic, buf, { qos: row.qos || 1, retain: !!row.retain }, (err) => err ? reject(err) : resolve()))
|
||||
await supa.from(TBL_MQTT_DOWNLINKS).update({ status: 'sent', sent_at: new Date().toISOString(), last_error: null }).eq('id', row.id)
|
||||
} catch (e) {
|
||||
log.error({ e, id: row.id }, 'downlink publish failed')
|
||||
const attempt = (row.retry_count || 0) + 1
|
||||
const delay = calcBackoffMs(attempt)
|
||||
await supa
|
||||
.from(TBL_MQTT_DOWNLINKS)
|
||||
.update({ last_error: String(e), retry_count: attempt, scheduled_at: new Date(Date.now() + delay).toISOString() })
|
||||
.eq('id', row.id)
|
||||
}
|
||||
}
|
||||
|
||||
async function pollDownlinksOnce() {
|
||||
const nowIso = new Date().toISOString()
|
||||
const { data, error } = await supa
|
||||
.from(TBL_MQTT_DOWNLINKS)
|
||||
.select('*')
|
||||
.eq('status', 'pending')
|
||||
.lte('scheduled_at', nowIso)
|
||||
.order('scheduled_at', { ascending: true })
|
||||
.limit(DOWNLINK_BATCH)
|
||||
if (error) {
|
||||
log.error({ error }, 'downlink query failed')
|
||||
return
|
||||
}
|
||||
if (!Array.isArray(data) || data.length === 0) return
|
||||
for (const row of data) {
|
||||
if (!row.topic || !row.payload) {
|
||||
await supa.from(TBL_MQTT_DOWNLINKS).update({ status: 'failed', last_error: 'missing topic/payload' }).eq('id', row.id)
|
||||
continue
|
||||
}
|
||||
await handleDownlinkRow(row)
|
||||
}
|
||||
}
|
||||
|
||||
if (DOWNLINK_ENABLE) {
|
||||
setInterval(pollDownlinksOnce, DOWNLINK_POLL_INTERVAL_MS)
|
||||
log.info({ intervalMs: DOWNLINK_POLL_INTERVAL_MS, batch: DOWNLINK_BATCH }, 'downlink poller started')
|
||||
}
|
||||
|
||||
// ------------------ ACK handling ------------------
|
||||
function parseAckPayload(buf) {
|
||||
try { return JSON.parse(buf.toString('utf8')) } catch { return { text: buf.toString('utf8') } }
|
||||
}
|
||||
|
||||
function looksLikeUuid(s) {
|
||||
return typeof s === 'string' && /^[0-9a-fA-F-]{36}$/.test(s)
|
||||
}
|
||||
|
||||
async function markAckInTable(table, ackId, topic) {
|
||||
const nowIso = new Date().toISOString()
|
||||
// try id match
|
||||
let q = supa.from(table).update({ status: 'acked', ack_at: nowIso }).eq('id', ackId).in('status', ['pending','sent'])
|
||||
if (topic) q = q.eq('topic', topic)
|
||||
const r1 = await q.select('id')
|
||||
if (!r1.error && Array.isArray(r1.data) && r1.data.length > 0) return true
|
||||
// try correlation_id match
|
||||
let q2 = supa.from(table).update({ status: 'acked', ack_at: nowIso }).eq('correlation_id', ackId).in('status', ['pending','sent'])
|
||||
if (topic) q2 = q2.eq('topic', topic)
|
||||
const r2 = await q2.select('id')
|
||||
if (!r2.error && Array.isArray(r2.data) && r2.data.length > 0) return true
|
||||
return false
|
||||
}
|
||||
|
||||
async function handleAck(topic, buf) {
|
||||
const obj = parseAckPayload(buf)
|
||||
const ackId = obj.correlation_id || obj.id || obj.message_id || (typeof obj.text === 'string' ? obj.text.trim() : null)
|
||||
if (!looksLikeUuid(ackId)) {
|
||||
log.warn({ topic, ackId }, 'ack without uuid id, ignored')
|
||||
return
|
||||
}
|
||||
let ok = await markAckInTable(TBL_MQTT_DOWNLINKS, ackId, topic)
|
||||
if (!ok) ok = await markAckInTable(TBL_CHAT_MQTT_DOWNLINKS, ackId, undefined) // chat 行可能未指定 topic
|
||||
if (ok) log.info({ topic, ackId }, 'ack applied')
|
||||
else log.warn({ topic, ackId }, 'ack not matched')
|
||||
}
|
||||
|
||||
// ------------------ Chat downlink worker (polling) ------------------
|
||||
const CHAT_DOWNLINK_ENABLE = /^true$/i.test(process.env.CHAT_DOWNLINK_ENABLE || 'true')
|
||||
const CHAT_DOWNLINK_POLL_INTERVAL_MS = parseInt(process.env.CHAT_DOWNLINK_POLL_INTERVAL_MS || '1500', 10)
|
||||
const CHAT_DOWNLINK_BATCH = parseInt(process.env.CHAT_DOWNLINK_BATCH || '5', 10)
|
||||
|
||||
function deriveChatDownlinkTopic(row) {
|
||||
if (row.topic) return row.topic
|
||||
if (row.target_user_id) return `device/${row.target_user_id}/down`
|
||||
return null
|
||||
}
|
||||
|
||||
async function pollChatDownlinksOnce() {
|
||||
const nowIso = new Date().toISOString()
|
||||
const { data, error } = await supa
|
||||
.from(TBL_CHAT_MQTT_DOWNLINKS)
|
||||
.select('*')
|
||||
.eq('status', 'pending')
|
||||
.lte('scheduled_at', nowIso)
|
||||
.order('scheduled_at', { ascending: true })
|
||||
.limit(CHAT_DOWNLINK_BATCH)
|
||||
if (error) {
|
||||
log.error({ error }, 'chat downlink query failed')
|
||||
return
|
||||
}
|
||||
if (!Array.isArray(data) || data.length === 0) return
|
||||
for (const row of data) {
|
||||
const topic = deriveChatDownlinkTopic(row)
|
||||
if (!topic || !row.payload) {
|
||||
await supa.from(TBL_CHAT_MQTT_DOWNLINKS).update({ status: 'failed', last_error: 'missing topic/payload' }).eq('id', row.id)
|
||||
continue
|
||||
}
|
||||
try {
|
||||
// claim via scheduled_at bump to avoid duplicates
|
||||
const claimed = await supa
|
||||
.from(TBL_CHAT_MQTT_DOWNLINKS)
|
||||
.update({ scheduled_at: new Date(Date.now() + DOWNLINK_CLAIM_MS).toISOString() })
|
||||
.eq('id', row.id)
|
||||
.eq('status', 'pending')
|
||||
.lte('scheduled_at', nowIso)
|
||||
.select('id')
|
||||
.single()
|
||||
if (claimed.error) continue
|
||||
const buf = decodeDownlinkPayload(row)
|
||||
await new Promise((resolve, reject) => client.publish(topic, buf, { qos: row.qos || 1, retain: !!row.retain }, (err) => err ? reject(err) : resolve()))
|
||||
await supa.from(TBL_CHAT_MQTT_DOWNLINKS).update({ status: 'sent', sent_at: new Date().toISOString(), last_error: null }).eq('id', row.id)
|
||||
} catch (e) {
|
||||
const attempt = (row.retry_count || 0) + 1
|
||||
const delay = calcBackoffMs(attempt)
|
||||
await supa
|
||||
.from(TBL_CHAT_MQTT_DOWNLINKS)
|
||||
.update({ last_error: String(e), retry_count: attempt, scheduled_at: new Date(Date.now() + delay).toISOString() })
|
||||
.eq('id', row.id)
|
||||
log.error({ e, id: row.id }, 'chat downlink publish failed')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (CHAT_DOWNLINK_ENABLE) {
|
||||
setInterval(pollChatDownlinksOnce, CHAT_DOWNLINK_POLL_INTERVAL_MS)
|
||||
log.info({ intervalMs: CHAT_DOWNLINK_POLL_INTERVAL_MS, batch: CHAT_DOWNLINK_BATCH }, 'chat downlink poller started')
|
||||
}
|
||||
66
server/gateway-mqtt-node/src/worker.js
Normal file
66
server/gateway-mqtt-node/src/worker.js
Normal file
@@ -0,0 +1,66 @@
|
||||
import 'dotenv/config'
|
||||
import pino from 'pino'
|
||||
import { Kafka } from 'kafkajs'
|
||||
import { createClient as createSupabaseClient } from '@supabase/supabase-js'
|
||||
|
||||
const log = pino({ level: process.env.LOG_LEVEL || 'info' })
|
||||
|
||||
const SUPABASE_URL = process.env.SUPABASE_URL
|
||||
const SUPABASE_SERVICE_ROLE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY
|
||||
const KAFKA_BROKERS = (process.env.KAFKA_BROKERS || '').split(',').filter(Boolean)
|
||||
const KAFKA_CLIENT_ID = process.env.KAFKA_CLIENT_ID || 'persist-worker'
|
||||
const KAFKA_TOPIC_INBOUND = process.env.KAFKA_TOPIC_INBOUND || 'chat.inbound'
|
||||
|
||||
if (!SUPABASE_URL || !SUPABASE_SERVICE_ROLE_KEY) throw new Error('Missing SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY')
|
||||
if (KAFKA_BROKERS.length === 0) throw new Error('KAFKA_BROKERS is required for worker')
|
||||
|
||||
const supa = createSupabaseClient(SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY, {
|
||||
auth: { autoRefreshToken: false, persistSession: false }
|
||||
})
|
||||
|
||||
async function persistMessage(evt) {
|
||||
const payload = {
|
||||
id: evt.id,
|
||||
conversation_id: evt.conversation_id,
|
||||
sender_id: evt.sender_id,
|
||||
content: evt.content,
|
||||
content_type: evt.content_type || 'text',
|
||||
metadata: evt.metadata || null
|
||||
}
|
||||
const { data, error, status } = await supa
|
||||
.from('chat_messages')
|
||||
.insert(payload)
|
||||
.select('*')
|
||||
.single()
|
||||
return { data, error, status }
|
||||
}
|
||||
|
||||
const kafka = new Kafka({ clientId: KAFKA_CLIENT_ID, brokers: KAFKA_BROKERS })
|
||||
const consumer = kafka.consumer({ groupId: `${KAFKA_CLIENT_ID}-group` })
|
||||
|
||||
async function run() {
|
||||
await consumer.connect()
|
||||
await consumer.subscribe({ topic: KAFKA_TOPIC_INBOUND, fromBeginning: false })
|
||||
log.info({ KAFKA_TOPIC_INBOUND }, 'worker started')
|
||||
|
||||
await consumer.run({
|
||||
eachMessage: async ({ topic, partition, message }) => {
|
||||
try {
|
||||
const s = message.value?.toString('utf8') || '{}'
|
||||
const evt = JSON.parse(s)
|
||||
const { data, error } = await persistMessage(evt)
|
||||
if (error) log.error({ error, evt }, 'persist failed')
|
||||
else log.info({ id: data.id, conversation_id: data.conversation_id }, 'persist ok')
|
||||
} catch (e) {
|
||||
log.error({ e }, 'worker error')
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
run().catch((e) => { log.error({ e }, 'worker start fail'); process.exit(1) })
|
||||
|
||||
process.on('SIGINT', async () => {
|
||||
try { await consumer.disconnect() } catch {}
|
||||
process.exit(0)
|
||||
})
|
||||
BIN
server/mqtt-simulator.rar
Normal file
BIN
server/mqtt-simulator.rar
Normal file
Binary file not shown.
16
server/mqtt-simulator/create_rpc_function.sql
Normal file
16
server/mqtt-simulator/create_rpc_function.sql
Normal file
@@ -0,0 +1,16 @@
|
||||
-- 在 Supabase SQL 编辑器中运行此 SQL 以创建 RPC 函数
|
||||
-- 解决 varchar 类型的 watch_id 无法直接通过 API 进行数值比较的问题
|
||||
|
||||
CREATE OR REPLACE FUNCTION get_sim_devices(min_id int)
|
||||
RETURNS TABLE (watch_id varchar)
|
||||
LANGUAGE plpgsql
|
||||
SECURITY DEFINER -- 使用定义者的权限运行,绕过 RLS (如果需要)
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN QUERY
|
||||
SELECT d.watch_id
|
||||
FROM ak_devices d
|
||||
WHERE d.watch_id ~ '^[0-9]+$' -- 确保只选择纯数字的 ID
|
||||
AND d.watch_id::int > min_id; -- 转换为 int 进行比较
|
||||
END;
|
||||
$$;
|
||||
193
server/mqtt-simulator/main.py
Normal file
193
server/mqtt-simulator/main.py
Normal file
@@ -0,0 +1,193 @@
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import random
|
||||
import logging
|
||||
from urllib.parse import urlparse
|
||||
from dotenv import load_dotenv
|
||||
import paho.mqtt.client as mqtt
|
||||
from supabase import create_client, Client
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Load environment variables from the sibling directory
|
||||
# Assuming this script is in server/mqtt-simulator/main.py and .env is in server/mqtt-simulator/.env
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
env_path = os.path.join(current_dir, '.env')
|
||||
|
||||
if os.path.exists(env_path):
|
||||
logger.info(f"Loading .env from: {env_path}")
|
||||
load_dotenv(env_path, override=True)
|
||||
else:
|
||||
logger.warning(f".env file not found at {env_path}")
|
||||
|
||||
# Get MQTT configuration
|
||||
mqtt_url_str = os.getenv('MQTT_URL')
|
||||
if mqtt_url_str:
|
||||
mqtt_url_str = mqtt_url_str.strip()
|
||||
|
||||
mqtt_username = os.getenv('MQTT_USERNAME')
|
||||
if mqtt_username:
|
||||
mqtt_username = mqtt_username.strip()
|
||||
|
||||
mqtt_password = os.getenv('MQTT_PASSWORD')
|
||||
if mqtt_password:
|
||||
mqtt_password = mqtt_password.strip()
|
||||
|
||||
if not mqtt_url_str:
|
||||
logger.error("MQTT_URL not found in environment variables")
|
||||
# Fallback or exit? Let's exit to be safe as we need a broker.
|
||||
exit(1)
|
||||
|
||||
# Parse MQTT URL
|
||||
try:
|
||||
parsed_url = urlparse(mqtt_url_str)
|
||||
mqtt_host = parsed_url.hostname
|
||||
mqtt_port = parsed_url.port or 1883
|
||||
mqtt_protocol = parsed_url.scheme
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to parse MQTT_URL: {e}")
|
||||
exit(1)
|
||||
|
||||
logger.info(f"MQTT Configuration: Host={mqtt_host}, Port={mqtt_port}, Protocol={mqtt_protocol}")
|
||||
|
||||
# Mock Configuration
|
||||
def str_to_bool(v):
|
||||
return str(v).strip().lower() in ("true", "1", "t", "yes")
|
||||
|
||||
enable_watch_mock = str_to_bool(os.getenv('WATCH_MOCK', 'true'))
|
||||
enable_ap_mock = str_to_bool(os.getenv('AP_REPORT_MOCK', 'false'))
|
||||
logger.info(f"Mock Configuration: WATCH_MOCK={enable_watch_mock}, AP_REPORT_MOCK={enable_ap_mock}")
|
||||
|
||||
# Supabase Configuration & Device Fetching
|
||||
device_ids = []
|
||||
if enable_watch_mock:
|
||||
supabase_url = os.getenv('SUPABASE_URL')
|
||||
supabase_key = os.getenv('SUPABASE_SERVICE_ROLE_KEY')
|
||||
|
||||
if supabase_url:
|
||||
supabase_url = supabase_url.strip()
|
||||
if supabase_key:
|
||||
supabase_key = supabase_key.strip()
|
||||
|
||||
if supabase_url and supabase_key:
|
||||
try:
|
||||
logger.info("Fetching devices from Supabase...")
|
||||
supabase: Client = create_client(supabase_url, supabase_key)
|
||||
# Fetch all devices and filter in Python because watch_id is varchar
|
||||
# '10000' < '900' in string comparison, so .gt('watch_id', 900) fails for 5-digit IDs
|
||||
response = supabase.table('ak_devices').select('watch_id').execute()
|
||||
if response.data:
|
||||
device_ids = []
|
||||
for d in response.data:
|
||||
wid = d.get('watch_id')
|
||||
# Check if watch_id is valid number and > 900
|
||||
if wid and str(wid).isdigit() and int(wid) > 900:
|
||||
device_ids.append(wid)
|
||||
|
||||
logger.info(f"Fetched {len(device_ids)} devices from Supabase (filtered > 900): {device_ids}")
|
||||
# exit(0)
|
||||
else:
|
||||
logger.warning("No devices found in Supabase with watch_id > 900")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching from Supabase: {e}")
|
||||
else:
|
||||
logger.warning("SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY not set in .env")
|
||||
|
||||
if not device_ids:
|
||||
device_ids = ["watch_sim_001"]
|
||||
logger.info(f"Using default device list: {device_ids}")
|
||||
|
||||
# Initialize MQTT Client
|
||||
client_id = f"python-simulator-{int(time.time())}"
|
||||
|
||||
# Handle paho-mqtt v2.0.0+ breaking changes
|
||||
if hasattr(mqtt, 'CallbackAPIVersion'):
|
||||
client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2, client_id=client_id)
|
||||
else:
|
||||
client = mqtt.Client(client_id=client_id)
|
||||
|
||||
if mqtt_username and mqtt_password:
|
||||
client.username_pw_set(mqtt_username, mqtt_password)
|
||||
|
||||
def on_connect(client, userdata, flags, rc, properties=None):
|
||||
# paho-mqtt v2 passes properties as well, but we might be in v1 or v2.
|
||||
# The signature for v2 is (client, userdata, flags, rc, properties)
|
||||
# The signature for v1 is (client, userdata, flags, rc)
|
||||
# However, the library handles the callback signature matching if we use the correct API version enum.
|
||||
# If we used VERSION2, rc is a ReasonCode object, which can be compared to 0 or checked with .is_failure
|
||||
|
||||
if hasattr(rc, 'value'): # It's an object in v2
|
||||
rc_val = rc.value
|
||||
else:
|
||||
rc_val = rc
|
||||
|
||||
if rc_val == 0:
|
||||
logger.info("Connected to MQTT Broker!")
|
||||
else:
|
||||
logger.error(f"Failed to connect, return code {rc}")
|
||||
|
||||
client.on_connect = on_connect
|
||||
|
||||
try:
|
||||
logger.info(f"Connecting to {mqtt_host}:{mqtt_port}...")
|
||||
client.connect(mqtt_host, mqtt_port, 60)
|
||||
client.loop_start()
|
||||
except Exception as e:
|
||||
logger.error(f"Connection failed: {e}")
|
||||
exit(1)
|
||||
|
||||
# Simulation loop
|
||||
try:
|
||||
print(device_ids)
|
||||
# exit(0)
|
||||
while True:
|
||||
if enable_watch_mock:
|
||||
# Iterate over all device IDs
|
||||
for current_device_id in device_ids:
|
||||
# Simulate Watch Data
|
||||
# Topic: watch/watch/data
|
||||
watch_data = {
|
||||
"ap": "WDDGW20000009",
|
||||
"battery": random.randint(700, 800),
|
||||
"ch": 22,
|
||||
"cmd": 4101,
|
||||
"heartrate": random.randint(60, 100),
|
||||
"id": int(current_device_id),
|
||||
"md": 109,
|
||||
"recvtime": int(time.time() * 1000),
|
||||
"rssi": random.randint(-90, -50),
|
||||
"spo2": random.randint(95, 100),
|
||||
"steps": random.randint(0, 100),
|
||||
"time": int(time.time()),
|
||||
"ver": 258
|
||||
}
|
||||
topic_watch = "watch/watch/data"
|
||||
client.publish(topic_watch, json.dumps(watch_data))
|
||||
|
||||
logger.info(f"Published to {topic_watch} for {len(device_ids)} devices")
|
||||
|
||||
if enable_ap_mock:
|
||||
# Simulate AP Report
|
||||
# Topic: watch/ap/report
|
||||
ap_report = {
|
||||
"apMac": "AA:BB:CC:DD:EE:FF",
|
||||
"timestamp": int(time.time() * 1000),
|
||||
"status": "online",
|
||||
"clients": [
|
||||
{"mac": "11:22:33:44:55:66", "rssi": random.randint(-80, -40)}
|
||||
],
|
||||
"uptime": int(time.time())
|
||||
}
|
||||
topic_ap = "watch/ap/report"
|
||||
client.publish(topic_ap, json.dumps(ap_report))
|
||||
logger.info(f"Published to {topic_ap}")
|
||||
|
||||
time.sleep(5)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Stopping simulator...")
|
||||
client.loop_stop()
|
||||
client.disconnect()
|
||||
2
server/mqtt-simulator/requirements.txt
Normal file
2
server/mqtt-simulator/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
paho-mqtt
|
||||
python-dotenv
|
||||
Reference in New Issue
Block a user