From 4d8871f970dfdbd5647496e441103e39393185e3 Mon Sep 17 00:00:00 2001 From: Tuan Date: Tue, 25 Mar 2025 21:18:53 +0700 Subject: [PATCH 1/6] Prepare for function calling --- index.ts | 30 ++++++++++++++++++++++++++---- 1 file changed, 26 insertions(+), 4 deletions(-) diff --git a/index.ts b/index.ts index cf83db5..def5985 100644 --- a/index.ts +++ b/index.ts @@ -148,21 +148,43 @@ const createAndProcessScheduledReport = async (env: Environment, reportType: 'ng return "⏰ Scheduled process completed"; }; -app.post('/assistant', async (c) => { - if (c.req.header('X-Telegram-Bot-Api-Secret-Token') !== c.env.TELEGRAM_BOT_SECRET_TOKEN) { +const assistantQuestion = (c) => { + +} + +const assistantOcr = (c) => { + +} + +const assistantAdhoc = (c) => { + +} + +const verifyAssistantRequest = async (c) => { + const secretToken = c.req.header('X-Telegram-Bot-Api-Secret-Token'); + if (!secretToken || secretToken !== c.env.TELEGRAM_BOT_SECRET_TOKEN) { console.error("Authentication failed. You are not welcome here"); return c.text("Unauthorized", 401); } - + const { message } = await c.req.json(); const bot = new Telegraf(c.env.TELEGRAM_BOT_TOKEN); if (message.from.id != c.env.TELEGRAM_CHAT_ID) { - console.warn("⚠️ Received new assistant request from unknown chat:", await c.req.json()); + console.warn("⚠️ Received new assistant request from unknown chat:", message); await sendTelegramMessage(bot, message.chat.id, "Bạn là người dùng không xác định, bạn không phải anh Ảgú"); return c.text("Unauthorized user"); } + return message; +} + +app.post('/assistant', async (c) => { + const message = await verifyAssistantRequest(c); + if (message instanceof Response) { + return message; // Stop execution if an error response is returned + } + console.info("🔫 Received new assistant request:", message.text); const openai = createOpenAIClient(c.env); From 8b7a28f386c5375ae9f4cf50b11a69e46a1fa797 Mon Sep 17 00:00:00 2001 From: Tuan Date: Tue, 25 Mar 2025 21:20:13 +0700 Subject: [PATCH 2/6] Delete unused checks --- .github/workflows/checks.yml | 26 -------------------------- 1 file changed, 26 deletions(-) delete mode 100644 .github/workflows/checks.yml diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml deleted file mode 100644 index b7f6e4c..0000000 --- a/.github/workflows/checks.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: "Check PR Description for Required Keywords" - -on: - pull_request: - types: - - opened - - edited - - synchronize - branches: - - main - -jobs: - check-description: - runs-on: ubuntu-latest - - steps: - - name: "Check PR description for required keywords" - env: - PR_BODY: ${{ github.event.pull_request.body }} - run: | - echo "Checking PR description for required keywords..." - if [[ ! "$PR_BODY" =~ (JIRA|Hotfix|Improvement|Added) ]]; then - echo "ERROR: Pull request description must contain one of the following words: 'JIRA', 'Hotfix', 'Improvement', 'Added'." - exit 1 - fi - echo "PR description contains a required keyword." From f50288532823952e39fe543a02a3b0e70b4299c0 Mon Sep 17 00:00:00 2001 From: Tuan Date: Tue, 25 Mar 2025 21:35:48 +0700 Subject: [PATCH 3/6] Reduce number of invoke of telegraf --- index.ts | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/index.ts b/index.ts index def5985..95daf9b 100644 --- a/index.ts +++ b/index.ts @@ -64,8 +64,10 @@ const createOpenAIClient = (env: Environment) => new OpenAI({ * @param {object} [options={}] - Additional options for the message (e.g. reply_to_message_id). * @returns {Promise} */ -const sendTelegramMessage = async (bot: Telegraf, chatId: string, message: string, options = {}) => - bot.telegram.sendMessage(chatId, normalize(message), { parse_mode: "MarkdownV2", ...options }); +const sendTelegramMessage = async (env: Environment, message: string, options = {}) => { + const bot = new Telegraf(env.TELEGRAM_BOT_TOKEN); + bot.telegram.sendMessage(env.TELEGRAM_CHAT_ID, normalize(message), { parse_mode: "MarkdownV2", ...options }); +} /** * Waits for an AI provider thread to complete. @@ -141,8 +143,7 @@ const createAndProcessScheduledReport = async (env: Environment, reportType: 'ng const msgContent = threadMessages[0]?.content[0]?.text?.value; const msg = `🥳 Báo cáo ${reportType} tới rồi đêi\n\n${msgContent}\n------------------`; - const bot = new Telegraf(env.TELEGRAM_BOT_TOKEN); - await sendTelegramMessage(bot, env.TELEGRAM_CHAT_ID, msg); + await sendTelegramMessage(env, msg); console.info(`⏰ ${reportType.charAt(0).toUpperCase() + reportType.slice(1)} message sent successfully`); return "⏰ Scheduled process completed"; @@ -168,11 +169,10 @@ const verifyAssistantRequest = async (c) => { } const { message } = await c.req.json(); - const bot = new Telegraf(c.env.TELEGRAM_BOT_TOKEN); if (message.from.id != c.env.TELEGRAM_CHAT_ID) { console.warn("⚠️ Received new assistant request from unknown chat:", message); - await sendTelegramMessage(bot, message.chat.id, "Bạn là người dùng không xác định, bạn không phải anh Ảgú"); + await sendTelegramMessage(c.env, "Bạn là người dùng không xác định, bạn không phải anh Ảgú"); return c.text("Unauthorized user"); } @@ -200,7 +200,7 @@ app.post('/assistant', async (c) => { console.info("🔫 Message processed successfully:", threadMessages); const msg = threadMessages[0]?.content[0]?.text?.value; - await sendTelegramMessage(bot, c.env.TELEGRAM_CHAT_ID, msg, { reply_to_message_id: message.message_id }); + await sendTelegramMessage(c.env, msg, { reply_to_message_id: message.message_id }); console.info("🔫 Telegram response sent successfully"); return c.text("Request completed"); @@ -418,8 +418,7 @@ export default { * @returns {Promise} */ async sendTelegramNotification(details: any, env: Environment) { - const bot = new Telegraf(env.TELEGRAM_BOT_TOKEN); const message = formatTransactionDetails(details); - await sendTelegramMessage(bot, env.TELEGRAM_CHAT_ID, message); + await sendTelegramMessage(env, message); }, }; From 70900391f861f509e8a3b08e4d8c65966174a559 Mon Sep 17 00:00:00 2001 From: Tuan Date: Tue, 25 Mar 2025 21:51:36 +0700 Subject: [PATCH 4/6] Small improvement for openai client too --- index.ts | 86 ++++++++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 68 insertions(+), 18 deletions(-) diff --git a/index.ts b/index.ts index 95daf9b..102dcf9 100644 --- a/index.ts +++ b/index.ts @@ -44,14 +44,19 @@ const formatTransactionDetails = (details: any) => ? `Transaction error: ${details.error}` : `💳 *Có giao dịch thẻ mới nè*\n\n${details.message}\n\n*Từ:* ${details.bank_name || "N/A"}\n*Ngày:* ${details.datetime || "N/A"}\n------------------`; -const createOpenAIClient = (env: Environment) => new OpenAI({ - project: env.OPENAI_PROJECT_ID, - apiKey: env.OPENAI_API_KEY, - // Your AI gateway, example: - // https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/openai - baseURL: env.AI_API_GATEWAY || "https://api.openai.com/v1", -}); +let openai: OpenAI | null = null; + +const initOpenAIClient = (env: Environment) => { + if (!openai ){ + openai = new OpenAI({ + project: env.OPENAI_PROJECT_ID, + apiKey: env.OPENAI_API_KEY, + baseURL: env.AI_API_GATEWAY || "https://api.openai.com/v1", + }); + } + return openai; +} /** * Sends a Telegram message with the provided message and options. @@ -64,10 +69,12 @@ const createOpenAIClient = (env: Environment) => new OpenAI({ * @param {object} [options={}] - Additional options for the message (e.g. reply_to_message_id). * @returns {Promise} */ +let bot: Telegraf | null = null; + const sendTelegramMessage = async (env: Environment, message: string, options = {}) => { - const bot = new Telegraf(env.TELEGRAM_BOT_TOKEN); - bot.telegram.sendMessage(env.TELEGRAM_CHAT_ID, normalize(message), { parse_mode: "MarkdownV2", ...options }); -} + if (!bot) bot = new Telegraf(env.TELEGRAM_BOT_TOKEN); + await bot.telegram.sendMessage(env.TELEGRAM_CHAT_ID, normalize(message), { parse_mode: "MarkdownV2", ...options }); +}; /** * Waits for an AI provider thread to complete. @@ -126,10 +133,10 @@ const formatDateForReport = (reportType: 'ngày' | 'tuần' | 'tháng') => { * @returns {Promise} A promise that resolves to a message indicating that the scheduled process has completed. */ const createAndProcessScheduledReport = async (env: Environment, reportType: 'ngày' | 'tuần' | 'tháng') => { - const openai = createOpenAIClient(env); const prompt = env.OPENAI_ASSISTANT_SCHEDULED_PROMPT.replace("%DATETIME%", formatDateForReport(reportType)); console.info(`⏰ Processing report for prompt ${prompt}`) + const openai = initOpenAIClient(env); const run = await openai.beta.threads.createAndRun({ assistant_id: env.OPENAI_ASSISTANT_ID, thread: { messages: [{ role: "user", content: prompt }] }, @@ -150,15 +157,15 @@ const createAndProcessScheduledReport = async (env: Environment, reportType: 'ng }; const assistantQuestion = (c) => { - + } const assistantOcr = (c) => { - + } const assistantAdhoc = (c) => { - + } const verifyAssistantRequest = async (c) => { @@ -169,7 +176,7 @@ const verifyAssistantRequest = async (c) => { } const { message } = await c.req.json(); - + if (message.from.id != c.env.TELEGRAM_CHAT_ID) { console.warn("⚠️ Received new assistant request from unknown chat:", message); await sendTelegramMessage(c.env, "Bạn là người dùng không xác định, bạn không phải anh Ảgú"); @@ -179,7 +186,7 @@ const verifyAssistantRequest = async (c) => { return message; } -app.post('/assistant', async (c) => { +app.post('/assistant', async (c) => { const message = await verifyAssistantRequest(c); if (message instanceof Response) { return message; // Stop execution if an error response is returned @@ -187,7 +194,50 @@ app.post('/assistant', async (c) => { console.info("🔫 Received new assistant request:", message.text); - const openai = createOpenAIClient(c.env); + const available_functions = [{ + "type": "function", + "function": { + "name": "assistantQuestion", + "description": "Get information of transactions when asked.", + "parameters": { + "type": "object", + "properties": { + "question": { + "type": "string", + "description": "Question in user's request" + } + }, + "required": [ + "question" + ], + "additionalProperties": false + }, + "strict": true + } + }, + { + "type": "function", + "function": { + "name": "assistantOcr", + "description": "Process image sent by user and extract information.", + "parameters": { + "type": "object", + "properties": { + "image": { + "type": "object", + "description": "Image sent by user" + } + }, + "required": [ + "image" + ], + "additionalProperties": false + }, + "strict": true + } + }] + + const openai = initOpenAIClient(c.env); const run = await openai.beta.threads.createAndRun({ assistant_id: c.env.OPENAI_ASSISTANT_ID, thread: { messages: [{ role: "user", content: message.text }] }, @@ -308,7 +358,7 @@ export default { * @returns {false | { result: string, datetime: string, message: string, amount: number, currency: string, bank_name: string, bank_icon: string }} */ async processEmail(emailData: string, env: Environment) { - const openai = createOpenAIClient(env); + const openai = initOpenAIClient(env); const completion = await openai.chat.completions.create({ messages: [ { role: "system", content: env.OPENAI_PROCESS_EMAIL_SYSTEM_PROMPT }, From 8a02c962b5cc7447897e9a5de3cae2547bf4361b Mon Sep 17 00:00:00 2001 From: Tuan Date: Wed, 26 Mar 2025 01:05:00 +0700 Subject: [PATCH 5/6] Pretty ugly implement how we can process, but it's work --- bun.lockb | Bin 54922 -> 54185 bytes index.ts | 546 +++++++++++++++++++++++++-------------------------- package.json | 9 +- 3 files changed, 268 insertions(+), 287 deletions(-) diff --git a/bun.lockb b/bun.lockb index 2d0ef126e85a75fc106d59d99c3c8fce546a2a3c..46ebf4821537669b144475ad565bd6b60159a76e 100755 GIT binary patch delta 7564 zcmeHMd014(mcO^@78*eoLAq%ak$r1ub^>%GE{KSLqR6H;OT>j86cj@vE-a$p!39tR zQG-#UND`H3T%txyKv7&0G*M6tQKBz#j4?iZr*8KZ=Y5$sGx=uzd&T$TcdAa+xmBl@ zTlaKnv)X|V)hficA1>W$`BC2}wKE6p&K;WkDC*gt9#Ae$fus@M_P81sh6 z0^}hriZVj}5qS*qH5H#iHpG0BimQ+fD9TQuPzs7=Dvm+c!;00Y)j`hATDCYIyB!7@ z9HW98Xak?5ut3|I$nBWN28xd`k0VjMMjnY=jsL8Wld|G-7B5Sr&I&0?AM+n0k4COR z#s?^hRpr^Lyj-OZRh9dw$}N%6bb&%oRqzT2OUC&C85N2yWMcRfGBJD*nLKd2Dqo09 z>ZKx+dQr&4A5&C%CzW2L%AZ8z#Lsy+Da76jWPPfDQrty>c;F{wGV)tyNdd3G|Xkd`dZN=8mhdOrt=%6UJH z5GBEO#Ish&BMVR%ivl0H-%`0EqS4Rj6vb_15%O7N;+fm1Ck;E1$y8lHChbnDctu*~ z(xl8pYND-ja6BkPp7E@G)y>i5PX*mg`$4@bjeNuQi zC?8a*;sRtc1@bJ%Y<3g2=&6d#y zbS6}4+QS*X2K^9%M$722P=f!xp?$QBSC3vxfR52VJT=?^az;u-X#pdB`Bs{sOHpx5 zSpiBhM54cg5QqW;D9vEX zK7T8z;}TPr%|>Y|(b7#&B9PJj&@Pbi1-PcA=yMK)YkATM5G0h*+o41# z0N-53cQvOdXT}iL;3Z&g254*g@J^!S1tIoET5h;=-i(hn{KF+I9FFh>yyvL$Ad84^ zXF*Xe*qE6_y;#h-k{mW5X(TRYYqE6tn^8j^G6KS}x2G*&x`UK6XaS|>QjWwDE}Khr zSK(%#Ncw>dFAllUpy1n(wj4Dpm~J8EWLZI}g_Qp*o*A>S8V!{?o}4MxplK=PZ?>kW z$yi811z*{t)(1=J!slU`KR!pB7Ri^ zoG8vKdnmP$asnLSvW=8ii-(mjbeQ?@ZlT0X7RMY9EL$lj-4QO^O8Fg*ICa>Xi${ki zpC_g$oI>Ip^jLrxO6{ba95Gx*+A9W4dk?7Kb6eLsI@96!x}dxfBoW5FM}?9MroY6X z?PLwUP6C>O5GT_D;_N8K+MhF7Ip`gV)QeTsBTUNYBS<+qn@NPpVu?qPe&!-b`WIaA z-&Od3*5kQC(z`t*Bg#XlLAkRL9CFrR`uG8d$ zg-C5d5c@!O2r;}BLDK(*d5|J8f_#aF3IaVcJ#`3Tv7RuTk{C(IgBlSe{ZE+Fy4un6 z-)occ*SkxH6#U=i|GhT<;@b46AK@Q+&F$dg{MV1)+qA^|!XC8;(=r2mql(;?7cR2Z z_ZVZpW2!yvLtol`eyUH{9iEn?ed5Zx(lXtv6Vm;EIQT{W*p01me$ROR`L3l8Q`X#k z;jejVq}T2GoT{!xqQa(_Si5mO;iF>bSUodOoVzKSuYHN<9orLdV%kp?``1T2nX`UX z=a#EG~;>4~!Qx7HmyQ@-d{cA)EV-}$o>d@`)mqnADvz0xyly5qcX z`)`*Ih37q=_U!Q_ZkNgIDS^KpJpQuyJ(<2q>%&RuV+wXV+^(COP(D8+G_$v(X=-;_ zTg}RIO9htJGwl=GCY=krp{eG%ykPQ5>^T~<1NkN(`Pkvk**H?T0$2$dwlQ*Y+_T5k0+CF?d zOW5Ne9-kfc`d(VlCqnh0F<-!`*wJ$gGrqhFgVBN+7cU0>_#fZtjWcRkP+lT!AGa&U zYD3T*+0F|OHgu<$Ui)_TwOLEz?{~|VcYh@xTFzBG22@wfGrN&0zyQ)+G3pMThQ=rCn&nPT^?-iVB$ zzc&;;ak97Yx0>4hOULg{mv6+mZCdZ&@Q%?GXjv2rVX=k~_kJ*)20_u0pb@7H&C$Uj z8E>6IQQWLiZ$6wlTvKhSTPm=s7)DEY|6NPW)Mqj9;rHVovilrKE& zxUIKl#3AZ=SEkSQfF6%#EC16wpgB$eaWTPk795C)q_csJjihrR3jZ&M7W}^g_;HbR zF39nJ9-N4agxrPN1{~OC;v3eC=Q&Mr@sFZby*r#3d%0GXs3qi5**v*g zTTtSWu&NcUdFvFr8m&*~>ffG`&{xBImTFS4Hs-O<$#b3Fv6lJzxdww1^_P65<6q)` z{}SUvE~Se`^WQaWQ#(gN<)D*S1aG9O6%O~MfJ(P{lC7k-XdA? zS=rg&mmNtwel43mH_@-DI&;mL)#tVEChq&e>9oXs@ASd!jinlo{yucZK0zV(yA!_o zc;hyeVd4q)olPur#R2U;4f$I~NAr)KXrYQ{ira$6?>Y3|g4LqpM?(eixf+_j5h$sg~4eUiyw5_AkW+k z=~-O*c57wIF%G1sS7@^Vf;7DQYhi^((q>DB2CoP&=ObI?nL0GyO|?if&RItgXNo!A zVwj&fzcoE;$XH)cu5?O-iZy+qf0J9l&>LkU$}$8FnS>db#OBG3!~#Kd8&#}OnfYWh ztl#Wm=!wN12>hZUcX4uurre=pbEteWO8$r$h#*8DVmcxiF%2;j5rV+a3-TdLO+rW! zQxQ`T_;Er$btoBv+_HTT_+i3K^aA9CP*@Qez5=C6L|bnZNqVi5QN z!`x0+B9ont_d}`*K`wQ>5%@7f;dc>pH^+@d?LpKd8W6<2IIq+_(s@kTThOf&K(OVA zA$cVbrxCN{JWNNVwSK$Bhb|yfN7Si^6vQ&bQp6Gj@c@}DGI}yX1%j9$_h&K|;<4q3 zOhgtU2a$~+8;I;q(nE5rkdYK3i1&zhiT8*%(R-$h8;J{vTb1kR&6_6TB}_{i;bN7) zr^HR{lAM;BMv7LXSe~|Ro*u!W=evnL#qKze^rTcd4k6=HMTJ_$kyb|I?$TpH498@gmg7{*MyAz25kz_U!|u{==R*{TW3I@hUyw0QJ9u8 z9+v!jzJe?JG!VN=4C9KAfu}V>cQ(@cb8+zjo43dq-7GB|4&o1Ju9@bQW zU#(EY25-;3ba;9A(%_z9{VB+(71GzB3~RBW-&bW5%|Ezaoiki(2u{}u-PusFiRkyT zhpsv|hxKeQ`RTm7TSra*)M!`}2--V^A~yJZ<7jfl7qtUYE-k@<5x*3`#GOKVC&b|Y zHn86%q;G@XE};n<);8nn)c+9C(mUM3XcT4_pvWG;E!EG5gW7an)lJwpy#_Ea_A2vVhQQK1vu3S-Pstny1@d2RRaNsao%t_ zkOjd;wO^!eTVtTP-<;97iX{})1gqpG!`4J6N!UbQzFtM?)Cp~O=x z@y4*#8+-~Ic&-yUw_P7dh#uCnp=sxUngz#3=x~NLg%Gk^C}Km|=cu0ptp+&$!}^0z zxLZh{f?BM_2Ez9o_+ZJ;8kX0FYdwG~yM^v-;Jm9rckFqafzV++8(O#1w{fM*`Wl8c zmN2$nC}Lyr@nc@|bxaLj4C_N7yk1DlVJX&PBl+O-#C%D4P4RH8V%S|TbY~;~mtH7L z^Y_kyVLkiXfQFvb{y1Ir&9LSy45H<{@yZB$*fn=6Z~G_HaTZ*0`8iY6T`+49(m#Pu zgHZGWYw6=wI-K$F=a-WQUo*SUi_Dd>)-5P$nCYsDeb+A0NO8=hw) z)4KF4P33lm-o0)@HcHP1s2S@x!OrkzpFo6LoDJ)<5pAqM_J{I!AOW3qW;K9c1%}yFT3hkHGb7(I-fXLCrM)hSc)9u^T2aCzk^mdQz_!@5T{bq}$8zvl>teOWm z7phAPNA?OP_Fw>S6!zt+yFoW>Y=Y3dOc62{s?Ig!JxF zP;|p$G_$}_$1r110^uje!`&N(V0%gg+Bcj)dif)!PmeEasqvXnkY_?mrvapQn!vp) zeDFhgW2dI7Yy?Dh3b9;WO`e$o6=%jl#}#9keAR-Ofv(TZaAd}??uz+4d1d=6on!w~ W*}W^4@8msMxpm{!Eg02ZyyG9M=w3qr delta 8053 zcmeHMcUTnJvhQvNVaQ3D84v@YlExv!qJs)1f*4i-6^VlmIVy_Eu;8v)pbVfQQCBd( z!GNxDS0AXDcEz0at_kB_SG|h2PWQC<_U`w+`@XmTz1QE={OVLy*Ey$7=+j*d-)k-2 zrd1+!7;QP~M4XmZh^lxoldQG(?U1{5Vie1K?)cowk- zVzUP85xZk~g$DBxEhx%SHJxKr6Ervs(G(3;s5M5+$xczmVY9sg;vhziv{MB=z!qc1}w70%a`XCM;0iHO8*2qKAxvqs-T zqh~bbI&@Cr9D{>G>brqxMio%1pO7IDIFCqrey9%@rxkbuF`TUFxg3!=NL8jPD7<7? zCK0kn#GlF<|AdGEcHF@3MZ`|3JnbpU3eg6UbkGy!WabPSvP(-M^NN{Tw;z%_h!dT#s-Y+#5Z*RWNHWN4ph z@H!%iWNfxePPf>&6h*TW91}>;5LIOK^JIS%=yDJB{&MltQf5eZ!nzYeSs`OV^3s(y0iSW|Y391c+ zYNb(_0DO#P@X*YFPKRCkC>H9{r@_J8pB8`;|9eA&xxe;zI4tq7&D>A>F>mgq78P=AM1#zQRd4f@tX#pcMJjZO|a{7k)yi90!#FR|9W42pq)z^n6f? z{e`W#Du%I|m-^n?-Eqwf0vA0$dK@@N{OKxCO8l8;J)lt{*LKGF84sxvKl&s%Sov%3 z$N4)PT&(=GKOr}goNOU- zVXW*Da-7xXid6|$b zofr%Y1-p#Bg*jLnizO|1DfDKZ_lBfCa@r3X`uNlH;1T|}fkR(^p*imC#7uy*$XghX zrFhmwrr4W40guo^Vv9Qi>j*8FAX{*>mov+4A<15DdKq;?i4GT0Vy6d9qnG|8`rq&)BQTP;vfm~%q-a;Af#e=Y<3%exV+9_E23Oe-s zgqsM5a)$KEpG!pPmfoti`&hA#K57>F2GzgOtcZF}#Bu8B)>} z9d1{_A(Keqv7KCd6mEuJL8{14_ziO8c6J2{o3P}?HpbP_Tlg4D#6>r7)f)&VdII%* zXBizynt<(RFM_x>sWDvfe}yzCr2T0M7D#)+R;j3fxQx*F{Sq0JO-3R;!(nGHf<%s^ z%F3s)Qi>u-CN53`ww<#y@_z%_`l)}zkahXj+u^b!7jaB_nukPslCLSxMWo=yKuflr z^EA@=8sr*}Clz9a*O!P-d}=Wg@wxf>|5Dv;L;l9j-JjbbZt4H5;r;j*Z!pY^Q$lv$}H7 z3+*8 zs3SB`bY$1d-&PN3PFj4{Wz_~~8zvfZ)4zSo>blM&CBq%+mzupy8>aJg=dKGqS5|IK zEPkvoa!X9SzWc(sQuW&tg*AuWUl#B5@eSGVi_MGJn-&{q>fUlx+XrOyfR9sxX(f!F z8V){_4MRM}>>nDvr>cE=Zr!`(S`RWO-z>;*I^=xqeN;)K$Y$DwrS~7-S#sRs>WsQt z+gatA7Wrmt1%Z2lC#I#!^$p(FAIDvYraFx~5tHE^I(<0V5Tc@j=~QTl3WxG3hF~}? z7?0eM)52lYR6{t3rA!b;hr?SerA7xAWRqZPF2T_0j!|WI-B#sZ(_K4h=I`s9tJH6O z5|57EqFnQO{kwaOzti)XkVTejhKKz+e6POV63V;o`pfb~CTBt)1^8)?XvfjVrs%5B zUk>jSQ6M^P&@$=%QnR|4wR%RTYhL9qxt+A(`nMM=Cl*v)F=~2Uy(($y!m|7ZD4Wx) z{q2vj^n{!deslBo?7S4Vs$l^WX~=ez(ltS6xnDZoXw*3)d*q6rYGZ#3{K1-cG{~THOryYpO8Y{QlaNx>*G(ds(u}(JQNxf(HH^To-N-6VZFEU(put zZaJrd57`wt-!yo5=Ro(%p90N>2Dc5~!aC;W(rjpr4TpWvhTswxOy|SwxNv$doX7up z;1C~1rTy3Ri(74T0>UqBP6@cSaB-5)$9kiR*#m0leb3D@E(D9d zY+R4APg+YP#TCVtx#Mmb2Gt6Ra}~c&Ht`j`skgKdzK#?yeO@`Aj1oKVvJYe4(7hDvgkjClbd%xJ1;uc=G^4Q4T4|kR?bFy3Yv}5<*t{FGE z&s!L9_}PwsJUV=L`DN1spElG_ywFcnw_Em&r1Hl|Y2I%iUpjp;Kue%v3NChqVaSJy zX7}RsCew)}8%qzw1dnqMd;Qug`GIVocBk^0-}9i|4db@WcRds-+rH`BxD$(P%k#FJ zI<@fKm53J`FNc?2#KHKygDiv6$>EX9zwCJ6sl*>gEhxR87aLtQ_Wt&)eX1etAB#>O zA6dCH!b$P&NQKYWp#!(x)G3(#!-wO=qwZCGclvmKW%;mQ*SuQrG3n%!FTAgSp;N*@ zOV0vde+25yiMIv1gYmuC5YOcJ$zZ8Ano@_=eNy%^#K46AmU9{(ws=S zSySo5Qkb^P%M@QwC{HAOZK2$eJfLA&m?8iS6SXUUA{bk$0b|NaT%RHqsmVhDd`>w9C_F!At}`xOZ?~p(o@pwVx)W|(gI?Vg_Mbufs~Gv zjFf_uhLnm#T1)yyo*3kDM4ok|2$ihezIsBn<5)b(NBd z$$w+jpE-QE|(3>E++|SC}fLqWr-py4B{CAJUZn#O&9ZqdHS}vw1 zz_oI*75`nm@GR4RLJ>1hKx%X+&CCbm3b7~uA^zA~=eIjrI%>Q0_2|Dl)|;_OzVDm- zCskdVW{9c~Tk)Uv-!B zDQ7s3wnN|vTJbpm*|4?Id%GDkT~;%oSEbmK&lwz}o{jAHh#AtQ=d%jR=0Tf1x~2ZD zOH&W2m0~MC3sKuJKk1pS{e>?5DX6Iw(|6ziTJbpyN3#L$^pbluT~-G09P8(k9&tTB z2#tGLyy?>OX^~%7=e(-%e{-NqGYST85L@wS6!3d$o3w20!Y+L}WNr}Sxn~Jl@ktn6 zQ@PVVdgbXZtIcp2^)bDK4e)LuK7Un45uvnDa4VP|E*#NV{z^7*JKiw}m z();6i)X1b>Btn>*v8fYzNL{!b&W!BspWYj9QCg6l7MB7(VNqj*tvu&x@pa_&-*hZ! z`Cv~}xlwSv$|HnN-h9rakSKYRCCR6E_*@GcKX$S5i5))6qS3fPNwvkGX*fJs_2(?n z7e-{$#1`@SAwF5d-p=mY*dj>*pDyBaH*8;Br9NGWrhjegpR`C**$nc0#%r?%x#N^F zEjuSRB}JO8NS&$3k`B+yQDmjXrcB7mQo@Eh34;fMy>&w^FKcdkb&+$THz?Of-rE&Q z%0%k^_2ad*Jf$A$xea03v>Tk>9@ZZxwKQk?98$H+1y60Ru2MIdo2Sf;M8y3w4*@%5 zuw+Mq7RuGHb~IUntaS@pxY=r;MHuKfErEG$24LLg3e(zpA$0@8wjMB~O%Kj*wg%;y zuNXI1u-R^ltU0TepY8cyMf%Y1jLny#d2Kpy_m3h2_^GWITj#CDKI($AWm;H)6@2MI7i3}mD0;tD%X*+Nx|kru9AsA=kBL*@>u3AnJ6hIBq7dAdu^>xO9^ qDB%Uf6VIbP?&zcj$ZU~XYQ`KbG@J|06sZr~Y3v0TTDs%$y8NG1s?SRR diff --git a/index.ts b/index.ts index 102dcf9..370565d 100644 --- a/index.ts +++ b/index.ts @@ -44,38 +44,6 @@ const formatTransactionDetails = (details: any) => ? `Transaction error: ${details.error}` : `💳 *Có giao dịch thẻ mới nè*\n\n${details.message}\n\n*Từ:* ${details.bank_name || "N/A"}\n*Ngày:* ${details.datetime || "N/A"}\n------------------`; - -let openai: OpenAI | null = null; - -const initOpenAIClient = (env: Environment) => { - if (!openai ){ - openai = new OpenAI({ - project: env.OPENAI_PROJECT_ID, - apiKey: env.OPENAI_API_KEY, - baseURL: env.AI_API_GATEWAY || "https://api.openai.com/v1", - }); - } - return openai; -} - -/** - * Sends a Telegram message with the provided message and options. - * - * The message is normalized before sending (special characters are escaped and any "source" markers are removed). - * - * @param {Telegraf} bot - The Telegram bot instance. - * @param {string} chatId - The chat ID to send the message to. - * @param {string} message - The message to send. - * @param {object} [options={}] - Additional options for the message (e.g. reply_to_message_id). - * @returns {Promise} - */ -let bot: Telegraf | null = null; - -const sendTelegramMessage = async (env: Environment, message: string, options = {}) => { - if (!bot) bot = new Telegraf(env.TELEGRAM_BOT_TOKEN); - await bot.telegram.sendMessage(env.TELEGRAM_CHAT_ID, normalize(message), { parse_mode: "MarkdownV2", ...options }); -}; - /** * Waits for an AI provider thread to complete. * @@ -96,6 +64,25 @@ const waitForCompletion = async (openai: OpenAI, threadId: string, runId: string return run; }; +/** + * Sends a Telegram message with the provided message and options. + * + * The message is normalized before sending (special characters are escaped and any "source" markers are removed). + * + * @param {Telegraf} bot - The Telegram bot instance. + * @param {string} chatId - The chat ID to send the message to. + * @param {string} message - The message to send. + * @param {object} [options={}] - Additional options for the message (e.g. reply_to_message_id). + * @returns {Promise} + */ +let bot: Telegraf | null = null; + +const sendTelegramMessage = async (env: Environment, message: string, options = {}) => { + if (!bot) bot = new Telegraf(env.TELEGRAM_BOT_TOKEN); + await bot.telegram.sendMessage(env.TELEGRAM_CHAT_ID, normalize(message), { parse_mode: "MarkdownV2", ...options }); + console.info("🔫 Telegram response sent successfully"); +}; + /** * Formats a date for a report. * @@ -136,7 +123,14 @@ const createAndProcessScheduledReport = async (env: Environment, reportType: 'ng const prompt = env.OPENAI_ASSISTANT_SCHEDULED_PROMPT.replace("%DATETIME%", formatDateForReport(reportType)); console.info(`⏰ Processing report for prompt ${prompt}`) - const openai = initOpenAIClient(env); + const openai = new OpenAI({ + project: env.OPENAI_PROJECT_ID, + apiKey: env.OPENAI_API_KEY, + + // Your AI gateway, example: + // https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/openai + baseURL: env.AI_API_GATEWAY || "https://api.openai.com/v1", + }); const run = await openai.beta.threads.createAndRun({ assistant_id: env.OPENAI_ASSISTANT_ID, thread: { messages: [{ role: "user", content: prompt }] }, @@ -156,16 +150,44 @@ const createAndProcessScheduledReport = async (env: Environment, reportType: 'ng return "⏰ Scheduled process completed"; }; -const assistantQuestion = (c) => { +const assistantQuestion = async (c, message) => { + const openai = new OpenAI({ + project: c.env.OPENAI_PROJECT_ID, + apiKey: c.env.OPENAI_API_KEY, + + // Your AI gateway, example: + // https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/openai + baseURL: c.env.AI_API_GATEWAY || "https://api.openai.com/v1", + }); + + const run = await openai.beta.threads.createAndRun({ + assistant_id: c.env.OPENAI_ASSISTANT_ID, + thread: { messages: [{ role: "user", content: message.text }] }, + }); + + console.info("🔫 Thread created successfully:", run.thread_id); + await waitForCompletion(openai, run.thread_id, run.id); + + const { data: threadMessages } = await openai.beta.threads.messages.list(run.thread_id, { run_id: run.id }); + console.info("🔫 Message processed successfully:", threadMessages); + + const msg = threadMessages[0]?.content[0]?.text?.value; + await sendTelegramMessage(c.env, msg, { reply_to_message_id: message.message_id }); + return c.text("Request completed"); } const assistantOcr = (c) => { } -const assistantAdhoc = (c) => { +const assistantManualTransaction = async (transaction, env: Environment) => { + console.info("🔫 Processing manual transaction:", transaction); + const transactionDetails = await processEmail(transaction, env); + if (!transactionDetails) return "Not okay"; + await Promise.all([storeTransaction(transactionDetails, env), notifyServices(transactionDetails, env)]); + return "📬 Email processed successfully"; } const verifyAssistantRequest = async (c) => { @@ -192,283 +214,241 @@ app.post('/assistant', async (c) => { return message; // Stop execution if an error response is returned } - console.info("🔫 Received new assistant request:", message.text); + const openai = new OpenAI({ + project: c.env.OPENAI_PROJECT_ID, + apiKey: c.env.OPENAI_API_KEY, + + // Your AI gateway, example: + // https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/openai + baseURL: c.env.AI_API_GATEWAY || "https://api.openai.com/v1", + }); const available_functions = [{ - "type": "function", - "function": { - "name": "assistantQuestion", - "description": "Get information of transactions when asked.", - "parameters": { - "type": "object", - "properties": { - "question": { - "type": "string", - "description": "Question in user's request" - } - }, - "required": [ - "question" - ], - "additionalProperties": false + type: "function", + name: "assistantQuestion", + description: "Get information of transactions when asked.", + parameters: { + type: "object", + properties: { + question: { + type: "string", + description: "Question in user's request" + } }, - "strict": true - } - }, - { - "type": "function", - "function": { - "name": "assistantOcr", - "description": "Process image sent by user and extract information.", - "parameters": { - "type": "object", - "properties": { - "image": { - "type": "object", - "description": "Image sent by user" - } - }, - "required": [ - "image" - ], - "additionalProperties": false + required: [ + "question" + ], + additionalProperties: false + }, + strict: false + }, { + type: "function", + name: "assistantOcr", + description: "Process image sent by user and extract information.", + parameters: { + type: "object", + properties: { + image: { + type: "string", + description: "Image sent by user" + } }, - "strict": true - } - }] - - const openai = initOpenAIClient(c.env); - const run = await openai.beta.threads.createAndRun({ - assistant_id: c.env.OPENAI_ASSISTANT_ID, - thread: { messages: [{ role: "user", content: message.text }] }, + required: [ + "image" + ], + additionalProperties: false + }, + strict: false + }, { + type: "function", + name: "assistantManualTransaction", + description: "Add a transaction manually when user defined.", + parameters: { + type: "object", + properties: { + transaction: { + type: "string", + description: "Content of transaction" + } + }, + required: [ + "transaction" + ], + additionalProperties: false + }, + strict: false + }]; + + console.log("🔫 /assistant/OpenAiResponse request:", message.text); + const response = await openai.responses.create({ + model: "gpt-4o", + input: [ + { + role: "user", + content: message.text + } + ], + tools: available_functions }); - console.info("🔫 Thread created successfully:", run.thread_id); - await waitForCompletion(openai, run.thread_id, run.id); + console.log("🔫 /assistant/OpenAiResponse response:", response); + switch (response.output[0].name) { + case "assistantManualTransaction": + console.log("🔫 Processing case assistantManualTransaction"); + await assistantManualTransaction(JSON.parse(response.output[0].arguments).transaction, c.env); + break; + } - const { data: threadMessages } = await openai.beta.threads.messages.list(run.thread_id, { run_id: run.id }); - console.info("🔫 Message processed successfully:", threadMessages); + return c.text("Success"); +}); - const msg = threadMessages[0]?.content[0]?.text?.value; - await sendTelegramMessage(c.env, msg, { reply_to_message_id: message.message_id }); +const email = async (message, env: Environment) => { + const parser = new PostalMime(); + const body = await new Response(message.raw).arrayBuffer(); + const email = await parser.parse(body); + console.info(`📬 New mail arrived! Sender ${email.from.address} (${email.from.address}), subject: ${email.subject}`); - console.info("🔫 Telegram response sent successfully"); - return c.text("Request completed"); -}); + const emailContent = email.text || email.html; + if (!emailContent) throw new Error("📬 Email content is empty"); -export default { - fetch: app.fetch, + const emailData = `Email date: ${email.date}\nEmail sender: ${email.from.name}\nEmail content:\n${emailContent}`; + const transactionDetails = await processEmail(emailData, env); - /** - * Generate a daily report of the transactions. - * - * This function will be called by Cloudflare at the specified cron time. - * The `env` argument is an object that contains the environment variables. - */ - async dailyReport(env: Environment) { - return createAndProcessScheduledReport(env, 'ngày'); - }, + if (!transactionDetails) return "Not okay"; - /** - * Generate a weekly report of the transactions. - * - * This function will be called by Cloudflare at the specified cron time. - * The `env` argument is an object that contains the environment variables. - */ - async weeklyReport(env: Environment) { - return createAndProcessScheduledReport(env, 'tuần'); - }, + await Promise.all([storeTransaction(transactionDetails, env), notifyServices(transactionDetails, env)]); + return "📬 Email processed successfully"; +} - /** - * Generate a monthly report of the transactions. - * - * This function will be called by Cloudflare at the specified cron time. - * The `env` argument is an object that contains the environment variables. - */ - async monthlyReport(env: Environment) { - return createAndProcessScheduledReport(env, 'tháng'); - }, +const storeTransaction = async (details, env: Environment) => { + const fileName = `ArgusChiTieu_transaction_${new Date().toISOString()}.txt`; - /** - * This function is a Cloudflare scheduled worker. - * - * It will be called by Cloudflare at the specified cron time. - * The `event` argument is an object that contains information about the scheduled task, - * and the `env` argument is an object that contains the environment variables. - * - * Depending on the cron time, it will call either the `dailyReport`, `weeklyReport`, or `monthlyReport` function. - */ - async scheduled(event, env: Environment) { - switch (event.cron) { - case "0 15 * * *": - console.info("⏰ Daily scheduler triggered"); - await this.dailyReport(env); - break; - case "58 16 * * 0": - console.info("⏰ Weekly scheduler triggered"); - await this.weeklyReport(env); - break; - case "0 15 1 * *": - console.info("⏰ Monthly scheduler triggered"); - await this.monthlyReport(env); - break; - } - }, + // Seems Cloudflare not allow Workers to write temporary files so + // we use HTTP API instead of client library. - /** - * Process an incoming email. - * - * This function is a Cloudflare Email Worker. - * The `message` argument is an object that contains the email data, - * and the `env` argument is an object that contains the environment variables. - * - * This function will try to parse the email content and extract information from it. - * If the content is not a transaction email, it will return "Not okay". - * If it is a transaction email, it will store the transaction details in the vector store - * and notify the telegram bot. - * The function will return "Email processed successfully" if everything is okay. - */ - async email(message, env: Environment) { - const parser = new PostalMime(); - const body = await new Response(message.raw).arrayBuffer(); - const email = await parser.parse(body); - console.info(`📬 New mail arrived! Sender ${email.from.address} (${email.from.address}), subject: ${email.subject}`); + // Convert the details to a text format + const transactionText = JSON.stringify(details, null, 2); + const formData = new FormData(); + formData.append('purpose', 'assistants'); - const emailContent = email.text || email.html; - if (!emailContent) throw new Error("📬 Email content is empty"); + // Create a Blob from the file content + const blob = Buffer.from(transactionText); // Convert content to Buffer + const file = new File([blob], fileName, { type: 'application/json' }); - const emailData = `Email date: ${email.date}\nEmail sender: ${email.from.name}\nEmail content:\n${emailContent}`; - const transactionDetails = await this.processEmail(emailData, env); + // Append the file to FormData + formData.append('file', file); - if (!transactionDetails) return "Not okay"; + // Make the fetch request + const uploadResponse = await fetch(`${env.AI_API_GATEWAY || "https://api.openai.com/v1"}/files`, { + method: 'POST', + headers: { + 'Authorization': `Bearer ${env.OPENAI_API_KEY}`, + // Note: FormData automatically sets the 'Content-Type' boundary, so no need to set it manually + }, + body: formData, + }); - await Promise.all([this.storeTransaction(transactionDetails, env), this.notifyServices(transactionDetails, env)]); - return "📬 Email processed successfully"; - }, + // Check if the response is okay + if (!uploadResponse.ok) { + throw new Error(`Upload transaction file error: ${uploadResponse.statusText}`); + } - /** - * Process an email using AI provider's chat completion API. - * - * Given an email data, it will call AI provider's chat completion API with the email data and the configured system/user prompts. - * The response will be parsed as JSON and returned. - * If the response is not a transaction email, `false` will be returned. - * If the response is a transaction email, the transaction details will be returned as an object. - * @param {string} emailData - The email data - * @param {Environment} env - The environment variables - * @returns {false | { result: string, datetime: string, message: string, amount: number, currency: string, bank_name: string, bank_icon: string }} - */ - async processEmail(emailData: string, env: Environment) { - const openai = initOpenAIClient(env); - const completion = await openai.chat.completions.create({ - messages: [ - { role: "system", content: env.OPENAI_PROCESS_EMAIL_SYSTEM_PROMPT }, - { role: "user", content: `${env.OPENAI_PROCESS_EMAIL_USER_PROMPT}\n\n${emailData}` }, - ], - model: env.OPENAI_PROCESS_EMAIL_MODEL, - store: false, - }); - - const contentStr = completion.choices[0]?.message?.content?.replaceAll('`', ''); - if (!contentStr) { - console.error("🤖 Failed to parse transaction details"); - return; - } + console.info(`🤖 Upload ${fileName} successfully`) + + const uploadResult = await uploadResponse.json(); + const fileId = uploadResult.id; + const vectorStoreResponse = await fetch(`${env.AI_API_GATEWAY || "https://api.openai.com/v1"}/vector_stores/${env.OPENAI_ASSISTANT_VECTORSTORE_ID}/files`, { + method: 'POST', + headers: { + 'Authorization': `Bearer ${env.OPENAI_API_KEY}`, + 'Content-Type': 'application/json', + 'OpenAI-Beta': 'assistants=v2', + }, + body: JSON.stringify({ file_id: fileId }), + }); - const content = JSON.parse(contentStr); - if (content.result === "failed") { - console.warn("🤖 Not a transaction email"); - return; - } + // Check if the response for adding to vector store is okay + if (!vectorStoreResponse.ok) { + throw new Error(`Error adding file to vector store: ${vectorStoreResponse.statusText}`); + } - console.info(`🤖 Processed email content: ${JSON.stringify(content)}`); - return content; - }, + console.info(`🤖 Add ${fileName} to Vector store successfully`) +} - /** - * Store a transaction in AI provider's vector store. - * @param {false | { result: string, datetime: string, message: string, amount: number, currency: string, bank_name: string, bank_icon: string }} details - The transaction details - * @param {Environment} env - The environment variables - * @returns {Promise} - * Resolves when the transaction is stored successfully. - * Rejects if any error occurs during the process. - */ - async storeTransaction(details, env: Environment) { - const fileName = `ArgusChiTieu_transaction_${new Date().toISOString()}.txt`; - - // Seems Cloudflare not allow Workers to write temporary files so - // we use HTTP API instead of client library. - - // Convert the details to a text format - const transactionText = JSON.stringify(details, null, 2); - const formData = new FormData(); - formData.append('purpose', 'assistants'); - - // Create a Blob from the file content - const blob = Buffer.from(transactionText); // Convert content to Buffer - const file = new File([blob], fileName, { type: 'application/json' }); - - // Append the file to FormData - formData.append('file', file); - - // Make the fetch request - const uploadResponse = await fetch(`${env.AI_API_GATEWAY || "https://api.openai.com/v1"}/files`, { - method: 'POST', - headers: { - 'Authorization': `Bearer ${env.OPENAI_API_KEY}`, - // Note: FormData automatically sets the 'Content-Type' boundary, so no need to set it manually - }, - body: formData, - }); +const notifyServices = async (details: any, env: Environment) => { + const message = formatTransactionDetails(details); + await sendTelegramMessage(env, message); +} - // Check if the response is okay - if (!uploadResponse.ok) { - throw new Error(`Upload transaction file error: ${uploadResponse.statusText}`); - } +const processEmail = async (emailData: string, env: Environment) => { + const openai = new OpenAI({ + project: env.OPENAI_PROJECT_ID, + apiKey: env.OPENAI_API_KEY, - console.info(`🤖 Upload ${fileName} successfully`) + // Your AI gateway, example: + // https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/openai + baseURL: env.AI_API_GATEWAY || "https://api.openai.com/v1", + }); - const uploadResult = await uploadResponse.json(); - const fileId = uploadResult.id; - const vectorStoreResponse = await fetch(`${env.AI_API_GATEWAY || "https://api.openai.com/v1"}/vector_stores/${env.OPENAI_ASSISTANT_VECTORSTORE_ID}/files`, { - method: 'POST', - headers: { - 'Authorization': `Bearer ${env.OPENAI_API_KEY}`, - 'Content-Type': 'application/json', - 'OpenAI-Beta': 'assistants=v2', - }, - body: JSON.stringify({ file_id: fileId }), - }); + console.log(`🤖 Processing email content: ${emailData}`); - // Check if the response for adding to vector store is okay - if (!vectorStoreResponse.ok) { - throw new Error(`Error adding file to vector store: ${vectorStoreResponse.statusText}`); - } + const completion = await openai.chat.completions.create({ + messages: [ + { role: "system", content: env.OPENAI_PROCESS_EMAIL_SYSTEM_PROMPT }, + { role: "user", content: `${env.OPENAI_PROCESS_EMAIL_USER_PROMPT}\n\n${emailData}` }, + ], + model: env.OPENAI_PROCESS_EMAIL_MODEL, + store: false, + }); - console.info(`🤖 Add ${fileName} to Vector store successfully`) - }, + const contentStr = completion.choices[0]?.message?.content?.replaceAll('`', ''); + if (!contentStr) { + console.error("🤖 Failed to parse transaction details"); + return; + } + const content = JSON.parse(contentStr); + if (content.result === "failed") { + console.warn("🤖 Not a transaction email"); + return; + } - /** - * Notify all services of a new transaction. - * - * Currently only notifies Telegram. - * - * @param {object} details - The transaction details - * @param {object} env - The environment variables - * @returns {Promise} - */ - async notifyServices(details: any, env: Environment) { - await this.sendTelegramNotification(details, env); - }, + console.info(`🤖 Processed email content: ${JSON.stringify(content)}`); + return content; +} + +const dailyReport = async (env: Environment) => { + return createAndProcessScheduledReport(env, 'ngày'); +} + +const weeklyReport = async (env: Environment) => { + return createAndProcessScheduledReport(env, 'tuần'); +} + +const monthlyReport = async (env: Environment) => { + return createAndProcessScheduledReport(env, 'tháng'); +} - /** - * Sends a Telegram notification with the transaction details. - * - * @param {object} details - The transaction details - * @param {object} env - The environment variables - * @returns {Promise} - */ - async sendTelegramNotification(details: any, env: Environment) { - const message = formatTransactionDetails(details); - await sendTelegramMessage(env, message); +export default { + fetch: app.fetch, + + async scheduled(event, env: Environment) { + switch (event.cron) { + case "0 15 * * *": + console.info("⏰ Daily scheduler triggered"); + await dailyReport(env); + break; + case "58 16 * * 0": + console.info("⏰ Weekly scheduler triggered"); + await weeklyReport(env); + break; + case "0 15 1 * *": + console.info("⏰ Monthly scheduler triggered"); + await monthlyReport(env); + break; + } }, + + async email(message, env: Environment) { + return email(message, env); + } }; diff --git a/package.json b/package.json index 620b1f7..f7185de 100644 --- a/package.json +++ b/package.json @@ -5,14 +5,15 @@ "scripts": { "deploy": "wrangler deploy", "dev": "wrangler dev", - "start": "wrangler dev" + "start": "wrangler dev", + "tail": "wrangler tail" }, "devDependencies": { - "wrangler": "^3.111.0" + "wrangler": "^3.114.2" }, "dependencies": { - "hono": "^4.7.2", - "openai": "^4.85.4", + "hono": "^4.7.5", + "openai": "^4.89.0", "postal-mime": "^2.4.3", "telegraf": "^4.16.3" } From a26945896885b68b45f32511393fc9dd6436dbc7 Mon Sep 17 00:00:00 2001 From: Tuan Date: Wed, 26 Mar 2025 18:38:19 +0700 Subject: [PATCH 6/6] OCR work too --- index.ts | 90 +++++++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 83 insertions(+), 7 deletions(-) diff --git a/index.ts b/index.ts index 370565d..c41d32e 100644 --- a/index.ts +++ b/index.ts @@ -1,7 +1,9 @@ import { Hono } from 'hono'; +import { logger } from 'hono/logger' import PostalMime from 'postal-mime'; import OpenAI from 'openai'; import { Telegraf } from 'telegraf'; +import { Buffer } from 'node:buffer'; type Environment = { readonly TELEGRAM_CHAT_ID: string; @@ -23,6 +25,7 @@ type Environment = { }; const app = new Hono<{ Bindings: Environment }>(); +app.use(logger()) const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); @@ -94,9 +97,11 @@ const sendTelegramMessage = async (env: Environment, message: string, options = * - For "tuần", the date range is returned in the format "YYYY-MM-DD đến YYYY-MM-DD". * - For "tháng", the date is returned in the format "MM/YYYY". */ -const formatDateForReport = (reportType: 'ngày' | 'tuần' | 'tháng') => { +const formatDate = (reportType?: 'giờ' |'ngày' | 'tuần' | 'tháng') => { const currentDate = new Date(); switch (reportType) { + case 'giờ': + return currentDate.toLocaleTimeString('vi-VN', { timeZone: "Asia/Bangkok" }); case 'ngày': return currentDate.toLocaleDateString('vi-VN', { timeZone: "Asia/Bangkok" }); case 'tuần': @@ -108,6 +113,8 @@ const formatDateForReport = (reportType: 'ngày' | 'tuần' | 'tháng') => { return ` từ ${formattedMonday} đến ${formattedSunday}`; case 'tháng': return `${currentDate.getMonth() + 1}/${currentDate.getFullYear()}`; + default: + return `${formatDate('ngày')} vào lúc ${formatDate('giờ')}`; } }; @@ -120,7 +127,7 @@ const formatDateForReport = (reportType: 'ngày' | 'tuần' | 'tháng') => { * @returns {Promise} A promise that resolves to a message indicating that the scheduled process has completed. */ const createAndProcessScheduledReport = async (env: Environment, reportType: 'ngày' | 'tuần' | 'tháng') => { - const prompt = env.OPENAI_ASSISTANT_SCHEDULED_PROMPT.replace("%DATETIME%", formatDateForReport(reportType)); + const prompt = env.OPENAI_ASSISTANT_SCHEDULED_PROMPT.replace("%DATETIME%", formatDate(reportType)); console.info(`⏰ Processing report for prompt ${prompt}`) const openai = new OpenAI({ @@ -177,13 +184,69 @@ const assistantQuestion = async (c, message) => { return c.text("Request completed"); } -const assistantOcr = (c) => { +// Function to download telegram file from bot from file_id and return as base64 +const downloadTelegramFile = async (fileId: string, env: Environment) => { + const url = `https://api.telegram.org/bot${env.TELEGRAM_BOT_TOKEN}/getFile?file_id=${fileId}`; + const response = await fetch(url); + const data = await response.json(); + const fileUrl = `https://api.telegram.org/file/bot${env.TELEGRAM_BOT_TOKEN}/${data.result.file_path}`; + const fileResponse = await fetch(fileUrl); + const buffer = Buffer.from(await fileResponse.arrayBuffer()); + + // Extract the file extension to determine the image type + const fileExtension = data.result.file_path.split('.').pop(); + const imageType = fileExtension ? fileExtension : 'jpeg'; + + console.log("🔫 File downloaded successfully", `data:image/${imageType};base64,${buffer.toString('base64')}`); + + return `data:image/${imageType};base64,${buffer.toString('base64')}`; +} + + +const imageOcr = async (message, c) => { + let imgB64 = await downloadTelegramFile(message.photo[3].file_id, c.env); + + const openai = new OpenAI({ + project: c.env.OPENAI_PROJECT_ID, + apiKey: c.env.OPENAI_API_KEY, + + // Your AI gateway, example: + // https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/openai + baseURL: c.env.AI_API_GATEWAY || "https://api.openai.com/v1", + }); + + const response = await openai.responses.create({ + model: "gpt-4o-mini", + input: [ + { + role: "user", + content: [ + { type: "input_text", text: `Print the text inside the image. Try to focus on store name, date time (if not found, please use ${formatDate()}), price tag of receipt` }, + { + type: "input_image", + image_url: imgB64, + }, + ], + }, + ], + }); + + return response.output_text; +} + +const assistantOcr = async (message, c) => { + const transaction = await imageOcr(message, c); + const transactionDetails = await processTransaction(transaction, c.env); + + if (!transactionDetails) return "Not okay"; + await Promise.all([storeTransaction(transactionDetails, c.env), notifyServices(transactionDetails, c.env)]); + return "📬 Email processed successfully"; } const assistantManualTransaction = async (transaction, env: Environment) => { console.info("🔫 Processing manual transaction:", transaction); - const transactionDetails = await processEmail(transaction, env); + const transactionDetails = await processTransaction(transaction, env); if (!transactionDetails) return "Not okay"; await Promise.all([storeTransaction(transactionDetails, env), notifyServices(transactionDetails, env)]); @@ -279,6 +342,12 @@ app.post('/assistant', async (c) => { strict: false }]; + if (message.text === undefined) { + console.log("🔫 Processing case assistantOcr"); + await assistantOcr(message, c); + return c.text("Success"); + } + console.log("🔫 /assistant/OpenAiResponse request:", message.text); const response = await openai.responses.create({ model: "gpt-4o", @@ -290,13 +359,20 @@ app.post('/assistant', async (c) => { ], tools: available_functions }); - console.log("🔫 /assistant/OpenAiResponse response:", response); + switch (response.output[0].name) { case "assistantManualTransaction": console.log("🔫 Processing case assistantManualTransaction"); await assistantManualTransaction(JSON.parse(response.output[0].arguments).transaction, c.env); break; + case "assistantQuestion": + console.log("🔫 Processing case assistantQuestion"); + await assistantQuestion(c, message); + break; + default: + console.log("🔫 Processing default case"); + return c.text("Request completed"); } return c.text("Success"); @@ -312,7 +388,7 @@ const email = async (message, env: Environment) => { if (!emailContent) throw new Error("📬 Email content is empty"); const emailData = `Email date: ${email.date}\nEmail sender: ${email.from.name}\nEmail content:\n${emailContent}`; - const transactionDetails = await processEmail(emailData, env); + const transactionDetails = await processTransaction(emailData, env); if (!transactionDetails) return "Not okay"; @@ -380,7 +456,7 @@ const notifyServices = async (details: any, env: Environment) => { await sendTelegramMessage(env, message); } -const processEmail = async (emailData: string, env: Environment) => { +const processTransaction = async (emailData: string, env: Environment) => { const openai = new OpenAI({ project: env.OPENAI_PROJECT_ID, apiKey: env.OPENAI_API_KEY,