I am trying to call workers ai using
I am trying to call workers ai using langchain in a worker, I have the code setup but get
Cloudflare LLM call failed with status code 404
. Maybe my environment variables are not setup right? But I cannot tell because console.log's apparently do not show when running wrangler dev... Am I missing something obvious here?
import type { D1Database } from '@cloudflare/workers-types';
import { Hono } from 'hono';
import { ConversationChain } from 'langchain/chains';
import { CloudflareWorkersAI } from 'langchain/llms/cloudflare_workersai';
import { BufferMemory } from 'langchain/memory';
import { CloudflareD1MessageHistory } from 'langchain/stores/message/cloudflare_d1';
type Bindings = {
DB: D1Database;
CLOUDFLARE_API_TOKEN: string;
CLOUDFLARE_ACCOUNT_ID: string;
};
const app = new Hono<{ Bindings: Bindings }>();
app.get('/', async (c) => {
try {
const input = c.req.query('input');
console.log(input);
if (!input) {
return c.text('Please provide input query parameter', 400);
}
console.error(c.env);
const memory = new BufferMemory({
chatHistory: new CloudflareD1MessageHistory({
tableName: 'stored_message',
sessionId: 'example',
database: c.env.DB,
}),
});
const model = new CloudflareWorkersAI({
// model: '@cf/mistral/mistral-7b-instruct-v0.1',
streaming: false,
cloudflareAccountId: c.env.CLOUDFLARE_ACCOUNT_ID,
cloudflareApiToken: c.env.CLOUDFLARE_API_TOKEN,
});
const chain = new ConversationChain({ llm: model, memory });
const res = await chain.call({ input });
return c.text(res.response);
} catch (e) {
console.error(e);
return c.text(e.message, 500);
}
});
export default app;
import type { D1Database } from '@cloudflare/workers-types';
import { Hono } from 'hono';
import { ConversationChain } from 'langchain/chains';
import { CloudflareWorkersAI } from 'langchain/llms/cloudflare_workersai';
import { BufferMemory } from 'langchain/memory';
import { CloudflareD1MessageHistory } from 'langchain/stores/message/cloudflare_d1';
type Bindings = {
DB: D1Database;
CLOUDFLARE_API_TOKEN: string;
CLOUDFLARE_ACCOUNT_ID: string;
};
const app = new Hono<{ Bindings: Bindings }>();
app.get('/', async (c) => {
try {
const input = c.req.query('input');
console.log(input);
if (!input) {
return c.text('Please provide input query parameter', 400);
}
console.error(c.env);
const memory = new BufferMemory({
chatHistory: new CloudflareD1MessageHistory({
tableName: 'stored_message',
sessionId: 'example',
database: c.env.DB,
}),
});
const model = new CloudflareWorkersAI({
// model: '@cf/mistral/mistral-7b-instruct-v0.1',
streaming: false,
cloudflareAccountId: c.env.CLOUDFLARE_ACCOUNT_ID,
cloudflareApiToken: c.env.CLOUDFLARE_API_TOKEN,
});
const chain = new ConversationChain({ llm: model, memory });
const res = await chain.call({ input });
return c.text(res.response);
} catch (e) {
console.error(e);
return c.text(e.message, 500);
}
});
export default app;
1 Reply
I have a
looks like according to that error account id is undefined so i guess they are not being read correctly
.dev.vars
file with my env variables in it. but again I cannot confirm they actually work since apparently logs do not show.
✘ [ERROR] Error: Cloudflare LLM call failed with status code 404
at file:///Users/jaybell/WebstormProjects/chat-mtg/apps/urza/.wrangler/tmp/dev-Wu0sfs/worker.js:21281:23
at async RetryOperation._fn (file:///Users/jaybell/WebstormProjects/chat-mtg/apps/urza/.wrangler/tmp/dev-Wu0sfs/worker.js:2531:19) {
response: Response {
cf: undefined,
webSocket: null,
url: 'https://api.cloudflare.com/client/v4/accounts/undefined/ai/run/@cf/meta/llama-2-7b-chat-int8',
redirected: false,
ok: false,
headers: Headers(7) {
'cf-ray' => '82bda66f7c48137e-YVR',
'connection' => 'keep-alive',
'content-length' => '212',
'content-type' => 'application/json',
'date' => 'Sat, 25 Nov 2023 23:25:18 GMT',
'server' => 'cloudflare',
'set-cookie' => '__cfruid=c0301c9031f66fa1b36041c6283bd35b6b21326c-1700954718; path=/; domain=.api.cloudflare.com; HttpOnly; Secure; SameSite=None',
[immutable]: true
},
statusText: 'Not found',
status: 404,
bodyUsed: false,
body: ReadableStream {
locked: false,
[state]: 'readable',
[supportsBYOB]: true,
[length]: 212n
}
},
attemptNumber: 1,
retriesLeft: 6
[mf:inf] GET / 500 Internal Server Error (88ms)
✘ [ERROR] Error: Cloudflare LLM call failed with status code 404
at file:///Users/jaybell/WebstormProjects/chat-mtg/apps/urza/.wrangler/tmp/dev-Wu0sfs/worker.js:21281:23
at async RetryOperation._fn (file:///Users/jaybell/WebstormProjects/chat-mtg/apps/urza/.wrangler/tmp/dev-Wu0sfs/worker.js:2531:19) {
response: Response {
cf: undefined,
webSocket: null,
url: 'https://api.cloudflare.com/client/v4/accounts/undefined/ai/run/@cf/meta/llama-2-7b-chat-int8',
redirected: false,
ok: false,
headers: Headers(7) {
'cf-ray' => '82bda66f7c48137e-YVR',
'connection' => 'keep-alive',
'content-length' => '212',
'content-type' => 'application/json',
'date' => 'Sat, 25 Nov 2023 23:25:18 GMT',
'server' => 'cloudflare',
'set-cookie' => '__cfruid=c0301c9031f66fa1b36041c6283bd35b6b21326c-1700954718; path=/; domain=.api.cloudflare.com; HttpOnly; Secure; SameSite=None',
[immutable]: true
},
statusText: 'Not found',
status: 404,
bodyUsed: false,
body: ReadableStream {
locked: false,
[state]: 'readable',
[supportsBYOB]: true,
[length]: 212n
}
},
attemptNumber: 1,
retriesLeft: 6
[mf:inf] GET / 500 Internal Server Error (88ms)