i still get an error after changing the constants.ts file:
  {
    name: 'OpenAILike',
    staticModels: [
      { name: 'hf:meta-llama/Meta-Llama-3.1-405B-Instruct', label: 'hf:meta-llama/Meta-Llama-3.1-405B-Instruct', provider: 'glhf', maxTokenAllowed: 4096},
      { name: 'hf:Qwen/Qwen2.5-Coder-32B-Instruct', label: 'hf:Qwen/Qwen2.5-Coder-32B-Instruct', provider: 'glhf', maxTokenAllowed: 4096},
      { name: 'hf:Qwen/Qwen2.5-72B-Instruct', label: 'hf:Qwen/Qwen2.5-72B-Instruct', provider: 'glhf', maxTokenAllowed: 4096},
      {
        name: 'hf:meta-llama/Llama-3.3-70B-Instruct',
        label: 'hf:meta-llama/Llama-3.3-70B-Instruct',
        provider: 'glhf',
        maxTokenAllowed: 4096
      },
    ],
    getApiKeyLink: "https://glhf.chat/api/openai/v1",
  },
error:
bolt.diy/node_modules/.pnpm/@remix-run+dev@2.15.0_@remix-run+react@2.15.0_react-dom@18.3.1_react@18.3.1__react@18.3.1_typ_3djlhh3t6jbfog2cydlrvgreoy/node_modules/@remix-run/dev/dist/vite/cloudflare-proxy-plugin.js:70:25 {
  cause: undefined,
  url: 'https://glhf.chat/api/openai/v1/chat/completions',
  requestBodyValues: {
    model: 'numctx-marco-o1:latest',
    logit_bias: undefined,
    logprobs: undefined,
    top_logprobs: undefined,
    user: undefined,
    parallel_tool_calls: undefined,
    max_tokens: 8000,
    temperature: 0,
    top_p: undefined,
    frequency_penalty: undefined,
    presence_penalty: undefined,
    stop: undefined,
    seed: undefined,
    max_completion_tokens: undefined,
    store: undefined,
    metadata: undefined,
    response_format: undefined,
    messages: [ [Object], [Object] ],
    tools: undefined,
    tool_choice: undefined,
    stream: true,
    stream_options: undefined
  },
  statusCode: 400,
  responseHeaders: {
    'access-control-allow-headers': 'Content-Type, Authorization',
    'access-control-allow-methods': 'GET,POST,PUT,DELETE,OPTIONS',
    'access-control-allow-origin': '*',
    'alt-svc': 'h3=":443"; ma=86400',
    'cf-cache-status': 'DYNAMIC',
    'cf-ray': '8f316e7abe14a336-SEA',
    connection: 'keep-alive',
    'content-type': 'application/json',
    date: 'Mon, 16 Dec 2024 20:30:30 GMT',
    nel: '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}',
    'report-to': '{"endpoints":[{"url":"https:\\/\\/a.nel.cloudflare.com\\/report\\/v4?s=512xwjRdHDEfmhT1jiqaY1JtZjcyFfhNWxmIM6j4GvENJZDKYN5pM%2BxWNBv9TKjJy43CBXZfzYW6pLjJxAK%2F47cPhb3wF9LI%2FImbOxyVoxETGL2VZG1T1yPOPPo%3D"}],"group":"cf-nel","max_age":604800}',
    server: 'cloudflare',
    'server-timing': 'cfL4;desc="?proto=TCP&rtt=5970&min_rtt=4696&rtt_var=2299&sent=7&recv=18&lost=0&retrans=0&sent_bytes=2831&recv_bytes=16797&delivery_rate=555796&cwnd=246&unsent_bytes=0&cid=692b40617c12cb31&ts=910&x=0"',
    'strict-transport-security': 'max-age=31536000; includeSubDomains',
    'transfer-encoding': 'chunked',
    vary: 'RSC, Next-Router-State-Tree, Next-Router-Prefetch',
    'x-clerk-auth-message': 'Invalid JWT form. A JWT consists of three parts separated by dots. (reason=token-invalid, token-carrier=header)',
    'x-clerk-auth-reason': 'token-invalid',
    'x-clerk-auth-status': 'signed-out',
    'x-middleware-rewrite': '/api/openai/v1/chat/completions'
  },
  responseBody: '{"error":"Failed validation: model must begin with valid prefix (eg. hf:)"}',
  isRetryable: false,
  data: undefined,
  [Symbol(vercel.ai.error)]: true,
  [Symbol(vercel.ai.error.AI_APICallError)]: true
}
and it doesn’t show any models in the drop-down after selecting “openai-like”
any ideas?