Vendor opencode source for docker build

This commit is contained in:
southseact-3d
2026-02-07 20:54:46 +00:00
parent b30ff1cfa4
commit efda260214
3195 changed files with 387717 additions and 1 deletions

View File

@@ -0,0 +1,867 @@
::selection {
background: var(--color-background-interactive);
color: var(--color-text-strong);
@media (prefers-color-scheme: dark) {
background: var(--color-background-interactive);
color: var(--color-text-inverted);
}
}
[data-page="zen"] {
--color-background: hsl(0, 20%, 99%);
--color-background-weak: hsl(0, 8%, 97%);
--color-background-strong: hsl(0, 5%, 12%);
--color-background-strong-hover: hsl(0, 5%, 18%);
--color-background-interactive: hsl(62, 84%, 88%);
--color-background-interactive-weaker: hsl(64, 74%, 95%);
--color-text: hsl(0, 1%, 39%);
--color-text-weak: hsl(0, 1%, 74%);
--color-text-weaker: hsl(30, 2%, 81%);
--color-text-strong: hsl(0, 5%, 12%);
--color-text-inverted: hsl(0, 20%, 99%);
--color-border: hsl(30, 2%, 81%);
--color-border-weak: hsl(0, 1%, 85%);
--color-icon: hsl(0, 1%, 55%);
}
[data-page="zen"] {
@media (prefers-color-scheme: dark) {
--color-background: hsl(0, 9%, 7%);
--color-background-weak: hsl(0, 6%, 10%);
--color-background-strong: hsl(0, 15%, 94%);
--color-background-strong-hover: hsl(0, 15%, 97%);
--color-background-interactive: hsl(62, 100%, 90%);
--color-background-interactive-weaker: hsl(60, 20%, 8%);
--color-text: hsl(0, 4%, 71%);
--color-text-weak: hsl(0, 2%, 49%);
--color-text-weaker: hsl(0, 3%, 28%);
--color-text-strong: hsl(0, 15%, 94%);
--color-text-inverted: hsl(0, 9%, 7%);
--color-border: hsl(0, 3%, 28%);
--color-border-weak: hsl(0, 4%, 23%);
--color-icon: hsl(10, 3%, 43%);
}
}
body {
background: var(--color-background);
}
@supports (background: -webkit-named-image(i)) {
[data-page="opencode"] {
border-top: 1px solid var(--color-border-weak);
}
}
[data-page="zen"] {
background: var(--color-background);
--padding: 5rem;
--vertical-padding: 4rem;
@media (max-width: 60rem) {
--padding: 1.5rem;
--vertical-padding: 3rem;
}
display: flex;
gap: var(--vertical-padding);
flex-direction: column;
font-family: var(--font-mono);
color: var(--color-text);
padding-bottom: 5rem;
a {
color: var(--color-text-strong);
text-decoration: underline;
text-underline-offset: var(--space-1);
text-decoration-thickness: 1px;
}
p {
line-height: 200%;
@media (max-width: 60rem) {
line-height: 180%;
}
}
@media (max-width: 60rem) {
font-size: 15px;
}
input:-webkit-autofill,
input:-webkit-autofill:hover,
input:-webkit-autofill:focus,
input:-webkit-autofill:active {
transition: background-color 5000000s ease-in-out 0s;
}
input:-webkit-autofill {
-webkit-text-fill-color: var(--color-text-strong) !important;
}
input:-moz-autofill {
-moz-text-fill-color: var(--color-text-strong) !important;
}
[data-component="container"] {
max-width: 67.5rem;
margin: 0 auto;
border: 1px solid var(--color-border-weak);
border-top: none;
@media (max-width: 65rem) {
border: none;
}
}
[data-component="content"] {
}
[data-component="top"] {
padding: 24px var(--padding);
height: 80px;
position: sticky;
top: 0;
display: flex;
justify-content: space-between;
align-items: center;
background: var(--color-background);
border-bottom: 1px solid var(--color-border-weak);
z-index: 10;
img {
height: 34px;
width: auto;
}
[data-component="nav-desktop"] {
ul {
display: flex;
justify-content: space-between;
align-items: center;
gap: 48px;
@media (max-width: 55rem) {
gap: 32px;
}
@media (max-width: 48rem) {
gap: 24px;
}
li {
display: inline-block;
a {
text-decoration: none;
span {
color: var(--color-text-weak);
}
}
a:hover {
text-decoration: underline;
text-underline-offset: var(--space-1);
text-decoration-thickness: 1px;
}
[data-slot="cta-button"] {
background: var(--color-background-strong);
color: var(--color-text-inverted);
padding: 8px 16px;
border-radius: 4px;
font-weight: 500;
text-decoration: none;
white-space: nowrap;
@media (max-width: 55rem) {
display: none;
}
}
[data-slot="cta-button"]:hover {
background: var(--color-background-strong-hover);
text-decoration: none;
}
}
}
@media (max-width: 40rem) {
display: none;
}
}
[data-component="nav-mobile"] {
button > svg {
color: var(--color-icon);
}
}
[data-component="nav-mobile-toggle"] {
border: none;
background: none;
outline: none;
height: 40px;
width: 40px;
cursor: pointer;
}
[data-component="nav-mobile-toggle"]:hover {
background: var(--color-background-weak);
}
[data-component="nav-mobile"] {
display: none;
@media (max-width: 40rem) {
display: block;
[data-component="nav-mobile-icon"] {
cursor: pointer;
height: 40px;
width: 40px;
display: flex;
align-items: center;
justify-content: center;
}
[data-component="nav-mobile-menu-list"] {
position: fixed;
background: var(--color-background);
top: 80px;
left: 0;
right: 0;
height: 100vh;
ul {
list-style: none;
padding: 20px 0;
li {
a {
text-decoration: none;
padding: 20px;
display: block;
span {
color: var(--color-text-weak);
}
}
a:hover {
background: var(--color-background-weak);
}
}
}
}
}
}
[data-slot="logo dark"] {
display: none;
}
@media (prefers-color-scheme: dark) {
[data-slot="logo light"] {
display: none;
}
[data-slot="logo dark"] {
display: block;
}
}
}
[data-component="hero"] {
display: flex;
flex-direction: column;
padding: calc(var(--vertical-padding) * 2) var(--padding);
[data-slot="zen logo dark"] {
display: none;
}
@media (max-width: 30rem) {
padding: var(--vertical-padding) var(--padding);
}
@media (prefers-color-scheme: dark) {
[data-slot="zen logo light"] {
display: none;
}
[data-slot="zen logo dark"] {
display: block;
}
}
}
[data-slot="hero-copy"] {
img {
margin-bottom: 24px;
}
h1 {
font-size: 28px;
color: var(--color-text-strong);
font-weight: 700;
margin-bottom: 16px;
display: block;
@media (max-width: 60rem) {
font-size: 22px;
}
}
p {
color: var(--color-text);
margin-bottom: 24px;
max-width: 82%;
@media (max-width: 50rem) {
max-width: 100%;
}
}
a {
background: var(--color-background-strong);
padding: 8px 12px 8px 20px;
color: var(--color-text-inverted);
border: none;
border-radius: 4px;
font-weight: 500;
cursor: pointer;
margin-bottom: 56px;
display: flex;
width: fit-content;
gap: 12px;
text-decoration: none;
}
a:hover {
background: var(--color-background-strong-hover);
}
}
[data-slot="model-logos"] {
display: flex;
gap: 24px;
margin-bottom: 56px;
svg {
color: var(--color-background-strong);
}
@media (prefers-color-scheme: dark) {
svg {
color: var(--color-background-strong);
}
}
}
[data-slot="pricing-copy"] {
strong {
color: var(--color-text-strong);
font-weight: 500;
}
p:first-child {
margin-bottom: 24px;
color: var(--color-text);
display: flex;
gap: 8px;
@media (max-width: 40rem) {
flex-direction: column;
gap: 4px;
}
}
}
[data-component="comparison"] {
border-top: 1px solid var(--color-border-weak);
video {
width: 100%;
height: auto;
max-width: none;
max-height: none;
display: block;
}
}
[data-slot="section-title"] {
margin-bottom: 24px;
h3 {
font-size: 16px;
font-weight: 700;
color: var(--color-text-strong);
margin-bottom: 12px;
}
p {
margin-bottom: 12px;
color: var(--color-text);
}
}
[data-component="problem"] {
border-top: 1px solid var(--color-border-weak);
padding: var(--vertical-padding) var(--padding);
color: var(--color-text);
p {
margin-bottom: 24px;
}
ul {
padding: 0;
li {
list-style: none;
margin-bottom: 16px;
display: flex;
gap: 12px;
span {
color: var(--color-icon);
}
}
li:last-child {
margin-bottom: 0;
}
}
}
[data-component="how"] {
border-top: 1px solid var(--color-border-weak);
padding: var(--vertical-padding) var(--padding);
color: var(--color-text);
ul {
padding: 0;
li {
list-style: none;
margin-bottom: 16px;
display: flex;
gap: 12px;
span {
color: var(--color-icon);
}
strong {
font-weight: 500;
color: var(--color-text-strong);
}
}
li:last-child {
margin-bottom: 0;
}
}
}
[data-component="privacy"] {
border-top: 1px solid var(--color-border-weak);
padding: var(--vertical-padding) var(--padding);
color: var(--color-text);
[data-slot="privacy-title"] {
h3 {
font-size: 16px;
font-weight: 700;
color: var(--color-text);
margin-bottom: 12px;
}
div {
display: flex;
gap: 12px;
}
p {
}
span {
color: var(--color-icon);
line-height: 200%;
@media (max-width: 60rem) {
line-height: 180%;
}
}
div {
display: flex;
gap: 12px;
}
}
}
[data-component="email"] {
border-top: 1px solid var(--color-border-weak);
padding: var(--vertical-padding) var(--padding);
color: var(--color-text);
[data-slot="dock"] {
border-radius: 14px;
border: 0.5px solid rgba(176, 176, 176, 0.6);
background: #f2f1f0;
margin-bottom: 32px;
overflow: hidden;
height: 64px;
width: 185px;
box-shadow:
0 6px 80px 0 rgba(0, 0, 0, 0.05),
0 2.507px 33.422px 0 rgba(0, 0, 0, 0.04),
0 1.34px 17.869px 0 rgba(0, 0, 0, 0.03),
0 0.751px 10.017px 0 rgba(0, 0, 0, 0.03),
0 0.399px 5.32px 0 rgba(0, 0, 0, 0.02),
0 0.166px 2.214px 0 rgba(0, 0, 0, 0.01);
img {
width: 100%;
height: auto;
}
@media (prefers-color-scheme: dark) {
background: #312d2d;
}
}
[data-slot="form"] {
position: relative;
input {
background: var(--color-background-weak);
border-radius: 6px;
border: 1px solid var(--color-border-weak);
padding: 20px;
width: 100%;
/* Use color, not -moz-text-fill-color, for normal text */
color: var(--color-text-strong);
@media (max-width: 30rem) {
padding-bottom: 80px;
}
&:not(:focus) {
color: var(--color-text-strong);
}
&::placeholder {
color: var(--color-text-weak);
opacity: 1;
}
/* Optional legacy */
&::-moz-placeholder {
color: var(--color-text-weak);
opacity: 1;
}
}
input:focus {
background: var(--color-background-interactive-weaker);
outline: none;
border: none;
color: var(--color-text-strong);
border: 1px solid var(--color-background-strong); /* Tailwind blue-600 as example */
/* Tailwind-style ring */
box-shadow: 0 0 0 3px var(--color-background-interactive);
/* mimics "ring-2 ring-blue-600/50" */
}
button {
position: absolute;
height: 40px;
right: 12px;
background: var(--color-background-strong);
padding: 4px 20px;
color: var(--color-text-inverted);
border-radius: 4px;
font-weight: 500;
border: none;
outline: none;
cursor: pointer;
top: 50%;
margin-top: -20px;
@media (max-width: 30rem) {
left: 20px;
right: 20px;
bottom: 20px;
top: auto;
}
}
}
}
[data-component="faq"] {
border-top: 1px solid var(--color-border-weak);
padding: var(--vertical-padding) var(--padding);
ul {
padding: 0;
li {
list-style: none;
margin-bottom: 24px;
line-height: 200%;
@media (max-width: 60rem) {
line-height: 180%;
}
}
}
[data-slot="faq-question"] {
display: flex;
gap: 16px;
margin-bottom: 8px;
color: var(--color-text-strong);
font-weight: 500;
cursor: pointer;
background: none;
border: none;
padding: 0;
[data-slot="faq-icon-plus"] {
flex-shrink: 0;
color: var(--color-text-weak);
margin-top: 2px;
[data-closed] & {
display: block;
}
[data-expanded] & {
display: none;
}
}
[data-slot="faq-icon-minus"] {
flex-shrink: 0;
color: var(--color-text-weak);
margin-top: 2px;
[data-closed] & {
display: none;
}
[data-expanded] & {
display: block;
}
}
[data-slot="faq-question-text"] {
flex-grow: 1;
text-align: left;
}
}
[data-slot="faq-answer"] {
margin-left: 40px;
margin-bottom: 32px;
}
}
[data-component="testimonials"] {
border-top: 1px solid var(--color-border-weak);
padding: var(--vertical-padding) var(--padding);
display: flex;
flex-direction: column;
gap: 20px;
a {
text-decoration: none;
}
[data-slot="testimonial"] {
background: var(--color-background-weak);
border-radius: 6px;
border: 1px solid var(--color-border-weak);
padding: 20px;
display: flex;
flex-direction: column;
gap: 12px;
@media (max-width: 30rem) {
flex-direction: column-reverse;
gap: 24px;
}
[data-slot="name"] {
display: flex;
gap: 16px;
strong {
font-weight: 500;
flex: 0 0 auto;
}
span {
color: var(--color-text);
}
@media (max-width: 30rem) {
flex-direction: column;
gap: 8px;
}
span {
display: inline-block;
}
img {
height: 24px;
width: 24px;
border-radius: 24px;
}
}
[data-slot="quote"] {
margin-left: 40px;
@media (max-width: 30rem) {
margin-left: 0;
}
span {
color: var(--color-text);
text-decoration: none;
}
}
}
[data-slot="button"] {
all: unset;
cursor: pointer;
display: flex;
align-items: center;
color: var(--color-text);
gap: var(--space-2-5);
font-size: 1rem;
@media (max-width: 24rem) {
font-size: 0.875rem;
}
strong {
color: var(--color-text-strong);
font-weight: 500;
}
@media (max-width: 40rem) {
justify-content: flex-start;
}
@media (max-width: 30rem) {
justify-content: center;
}
}
}
[data-component="copy-status"] {
@media (max-width: 38rem) {
display: none;
}
[data-slot="copy"] {
display: block;
width: var(--space-4);
height: var(--space-4);
color: var(--color-text-weaker);
[data-copied] & {
display: none;
}
}
[data-slot="check"] {
display: none;
width: var(--space-4);
height: var(--space-4);
color: var(--color-text-strong);
[data-copied] & {
display: block;
}
}
}
[data-component="footer"] {
border-top: 1px solid var(--color-border-weak);
display: flex;
flex-direction: row;
@media (max-width: 65rem) {
border-bottom: 1px solid var(--color-border-weak);
}
[data-slot="cell"] {
flex: 1;
text-align: center;
a {
text-decoration: none;
padding: 2rem 0;
width: 100%;
display: block;
span {
color: var(--color-text-weak);
@media (max-width: 40rem) {
display: none;
}
}
}
a:hover {
background: var(--color-background-weak);
text-decoration: underline;
text-underline-offset: var(--space-1);
text-decoration-thickness: 1px;
}
}
[data-slot="cell"] + [data-slot="cell"] {
border-left: 1px solid var(--color-border-weak);
@media (max-width: 40rem) {
border-left: none;
}
}
/* Mobile: third column on its own row */
@media (max-width: 25rem) {
flex-wrap: wrap;
[data-slot="cell"] {
flex: 1 0 100%;
border-left: none;
border-top: 1px solid var(--color-border-weak);
}
[data-slot="cell"]:nth-child(1) {
border-top: none;
}
}
}
[data-component="legal"] {
color: var(--color-text-weak);
text-align: center;
display: flex;
gap: 32px;
justify-content: center;
a {
color: var(--color-text-weak);
text-decoration: none;
}
a:hover {
color: var(--color-text);
text-decoration: underline;
}
}
}

View File

@@ -0,0 +1,337 @@
import "./index.css"
import { createAsync, query, redirect } from "@solidjs/router"
import { Title, Meta } from "@solidjs/meta"
//import { HttpHeader } from "@solidjs/start"
import zenLogoLight from "../../asset/zen-ornate-light.svg"
import zenLogoDark from "../../asset/zen-ornate-dark.svg"
import compareVideo from "../../asset/lander/opencode-comparison-min.mp4"
import compareVideoPoster from "../../asset/lander/opencode-comparison-poster.png"
import avatarDax from "../../asset/lander/avatar-dax.png"
import avatarJay from "../../asset/lander/avatar-jay.png"
import avatarFrank from "../../asset/lander/avatar-frank.png"
import avatarAdam from "../../asset/lander/avatar-adam.png"
import avatarDavid from "../../asset/lander/avatar-david.png"
import { EmailSignup } from "~/component/email-signup"
import { Faq } from "~/component/faq"
import { Legal } from "~/component/legal"
import { Footer } from "~/component/footer"
import { Header } from "~/component/header"
import { getLastSeenWorkspaceID } from "../workspace/common"
import { IconGemini, IconMiniMax, IconZai } from "~/component/icon"
import { useI18n } from "~/context/i18n"
import { useLanguage } from "~/context/language"
import { LocaleLinks } from "~/component/locale-links"
const checkLoggedIn = query(async () => {
"use server"
const workspaceID = await getLastSeenWorkspaceID().catch(() => {})
if (workspaceID) throw redirect(`/workspace/${workspaceID}`)
}, "checkLoggedIn.get")
export default function Home() {
const loggedin = createAsync(() => checkLoggedIn())
const i18n = useI18n()
const language = useLanguage()
return (
<main data-page="zen">
{/*<HttpHeader name="Cache-Control" value="public, max-age=1, s-maxage=3600, stale-while-revalidate=86400" />*/}
<Title>{i18n.t("zen.title")}</Title>
<LocaleLinks path="/zen" />
<Meta property="og:image" content="/social-share-zen.png" />
<Meta name="twitter:image" content="/social-share-zen.png" />
<Meta name="opencode:auth" content={loggedin() ? "true" : "false"} />
<div data-component="container">
<Header zen hideGetStarted />
<div data-component="content">
<section data-component="hero">
<div data-slot="hero-copy">
<img data-slot="zen logo light" src={zenLogoLight} alt="" />
<img data-slot="zen logo dark" src={zenLogoDark} alt="" />
<h1>{i18n.t("zen.hero.title")}</h1>
<p>{i18n.t("zen.hero.body")}</p>
<div data-slot="model-logos">
<div>
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<mask
id="mask0_79_128586"
style="mask-type:luminance"
maskUnits="userSpaceOnUse"
x="1"
y="1"
width="22"
height="22"
>
<path d="M23 1.5H1V22.2952H23V1.5Z" fill="white" />
</mask>
<g mask="url(#mask0_79_128586)">
<path
d="M9.43799 9.06943V7.09387C9.43799 6.92749 9.50347 6.80267 9.65601 6.71959L13.8206 4.43211C14.3875 4.1202 15.0635 3.9747 15.7611 3.9747C18.3775 3.9747 20.0347 5.9087 20.0347 7.96734C20.0347 8.11288 20.0347 8.27926 20.0128 8.44564L15.6956 6.03335C15.434 5.88785 15.1723 5.88785 14.9107 6.03335L9.43799 9.06943ZM19.1624 16.7637V12.0431C19.1624 11.7519 19.0315 11.544 18.7699 11.3984L13.2972 8.36234L15.0851 7.3849C15.2377 7.30182 15.3686 7.30182 15.5212 7.3849L19.6858 9.67238C20.8851 10.3379 21.6917 11.7519 21.6917 13.1243C21.6917 14.7047 20.7106 16.1604 19.1624 16.7636V16.7637ZM8.15158 12.6047L6.36369 11.6066C6.21114 11.5235 6.14566 11.3986 6.14566 11.2323V6.65735C6.14566 4.43233 7.93355 2.7478 10.3538 2.7478C11.2697 2.7478 12.1199 3.039 12.8396 3.55886L8.54424 5.92959C8.28268 6.07508 8.15181 6.28303 8.15181 6.57427V12.6049L8.15158 12.6047ZM12 14.7258L9.43799 13.3533V10.4421L12 9.06965L14.5618 10.4421V13.3533L12 14.7258ZM13.6461 21.0476C12.7303 21.0476 11.8801 20.7564 11.1604 20.2366L15.4557 17.8658C15.7173 17.7203 15.8482 17.5124 15.8482 17.2211V11.1905L17.658 12.1886C17.8105 12.2717 17.876 12.3965 17.876 12.563V17.1379C17.876 19.3629 16.0662 21.0474 13.6461 21.0474V21.0476ZM8.47863 16.4103L4.314 14.1229C3.11471 13.4573 2.30808 12.0433 2.30808 10.6709C2.30808 9.06965 3.31106 7.6348 4.85903 7.03168V11.773C4.85903 12.0642 4.98995 12.2721 5.25151 12.4177L10.7025 15.4328L8.91464 16.4103C8.76209 16.4934 8.63117 16.4934 8.47863 16.4103ZM8.23892 19.8207C5.77508 19.8207 3.96533 18.0531 3.96533 15.8696C3.96533 15.7032 3.98719 15.5368 4.00886 15.3704L8.30418 17.7412C8.56574 17.8867 8.82752 17.8867 9.08909 17.7412L14.5618 14.726V16.7015C14.5618 16.8679 14.4964 16.9927 14.3438 17.0758L10.1792 19.3633C9.61225 19.6752 8.93631 19.8207 8.23869 19.8207H8.23892ZM13.6461 22.2952C16.2844 22.2952 18.4865 20.5069 18.9882 18.1362C21.4301 17.5331 23 15.3495 23 13.1245C23 11.6688 22.346 10.2548 21.1685 9.23581C21.2775 8.79908 21.343 8.36234 21.343 7.92582C21.343 4.95215 18.8137 2.72691 15.892 2.72691C15.3034 2.72691 14.7365 2.80999 14.1695 2.99726C13.1882 2.08223 11.8364 1.5 10.3538 1.5C7.71557 1.5 5.51352 3.28829 5.01185 5.65902C2.56987 6.26214 1 8.44564 1 10.6707C1 12.1264 1.65404 13.5404 2.83147 14.5594C2.72246 14.9961 2.65702 15.4328 2.65702 15.8694C2.65702 18.8431 5.1863 21.0683 8.108 21.0683C8.69661 21.0683 9.26354 20.9852 9.83046 20.7979C10.8115 21.713 12.1634 22.2952 13.6461 22.2952Z"
fill="currentColor"
/>
</g>
</svg>
</div>
<div>
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M13.7891 3.93164L20.2223 20.0677H23.7502L17.317 3.93164H13.7891Z" fill="currentColor" />
<path
d="M6.32538 13.6824L8.52662 8.01177L10.7279 13.6824H6.32538ZM6.68225 3.93164L0.25 20.0677H3.84652L5.16202 16.6791H11.8914L13.2067 20.0677H16.8033L10.371 3.93164H6.68225Z"
fill="currentColor"
/>
</svg>
</div>
<div>
<IconGemini width="24" height="24" />
</div>
<div>
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M9.16861 16.0529L17.2018 9.85156C17.5957 9.54755 18.1586 9.66612 18.3463 10.1384C19.3339 12.6288 18.8926 15.6217 16.9276 17.6766C14.9626 19.7314 12.2285 20.1821 9.72948 19.1557L6.9995 20.4775C10.9151 23.2763 15.6699 22.5841 18.6411 19.4749C20.9979 17.0103 21.7278 13.6508 21.0453 10.6214L21.0515 10.6278C20.0617 6.17736 21.2948 4.39847 23.8207 0.760904C23.8804 0.674655 23.9402 0.588405 24 0.5L20.6762 3.97585V3.96506L9.16658 16.0551"
fill="currentColor"
/>
<path
d="M7.37742 16.7017C4.67579 14.0395 5.14158 9.91963 7.44676 7.54383C9.15135 5.78544 11.9442 5.06779 14.3821 6.12281L17.0005 4.87559C16.5288 4.52392 15.9242 4.14566 15.2305 3.87986C12.0948 2.54882 8.34069 3.21127 5.79171 5.8386C3.33985 8.36779 2.56881 12.2567 3.89286 15.5751C4.88192 18.0552 3.26056 19.8094 1.62731 21.5801C1.04853 22.2078 0.467774 22.8355 0 23.5L7.3754 16.7037"
fill="currentColor"
/>
</svg>
</div>
<div>
<IconMiniMax width="24" height="24" />
</div>
<div>
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M12.6241 11.346L20.3848 3.44816C20.5309 3.29931 20.4487 3 20.2601 3H16.0842C16.0388 3 15.9949 3.01897 15.9594 3.05541L7.59764 11.5629C7.46721 11.6944 7.27446 11.5771 7.27446 11.3666V3.25183C7.27446 3.11242 7.18515 3 7.07594 3H4.19843C4.08932 3 4 3.11242 4 3.25183V20.7482C4 20.8876 4.08932 21 4.19843 21H7.07594C7.18515 21 7.27446 20.8876 7.27446 20.7482V17.1834C7.27446 17.1073 7.30136 17.0344 7.34815 16.987L9.94075 14.3486C10.0031 14.2853 10.0895 14.2757 10.159 14.3232L17.0934 19.5573C18.2289 20.3412 19.4975 20.8226 20.786 20.9652C20.9008 20.9778 21 20.8606 21 20.7133V17.3559C21 17.2276 20.9249 17.1232 20.8243 17.1073C20.0659 16.9853 19.326 16.6845 18.6569 16.222L12.6538 11.764C12.5291 11.6785 12.5135 11.4584 12.6241 11.346Z"
fill="currentColor"
/>
</svg>
</div>
<div>
<IconZai width="24" height="24" />
</div>
<div>
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
fill-rule="evenodd"
clip-rule="evenodd"
d="M12.6043 1.34016C12.9973 2.03016 13.3883 2.72215 13.7783 3.41514C13.7941 3.44286 13.8169 3.46589 13.8445 3.48187C13.8721 3.49786 13.9034 3.50624 13.9353 3.50614H19.4873C19.6612 3.50614 19.8092 3.61614 19.9332 3.83314L21.3872 6.40311C21.5772 6.74011 21.6272 6.88111 21.4112 7.24011C21.1512 7.6701 20.8982 8.1041 20.6512 8.54009L20.2842 9.19809C20.1782 9.39409 20.0612 9.47809 20.2442 9.71008L22.8962 14.347C23.0682 14.648 23.0072 14.841 22.8532 15.117C22.4162 15.902 21.9712 16.681 21.5182 17.457C21.3592 17.729 21.1662 17.832 20.8382 17.827C20.0612 17.811 19.2863 17.817 18.5113 17.843C18.4946 17.8439 18.4785 17.8489 18.4644 17.8576C18.4502 17.8664 18.4385 17.8785 18.4303 17.893C17.5361 19.4773 16.6344 21.0573 15.7253 22.633C15.5563 22.926 15.3453 22.996 15.0003 22.997C14.0033 23 12.9983 23.001 11.9833 22.999C11.8889 22.9987 11.7961 22.9735 11.7145 22.9259C11.6328 22.8783 11.5652 22.8101 11.5184 22.728L10.1834 20.405C10.1756 20.3898 10.1637 20.3771 10.149 20.3684C10.1343 20.3598 10.1174 20.3554 10.1004 20.356H4.98244C4.69744 20.386 4.42944 20.355 4.17745 20.264L2.57447 17.494C2.52706 17.412 2.50193 17.319 2.50158 17.2243C2.50123 17.1296 2.52567 17.0364 2.57247 16.954L3.77945 14.834C3.79665 14.8041 3.80569 14.7701 3.80569 14.7355C3.80569 14.701 3.79665 14.667 3.77945 14.637C3.15073 13.5485 2.52573 12.4579 1.90448 11.3651L1.11449 9.97008C0.954488 9.66008 0.941489 9.47409 1.20949 9.00509C1.67448 8.1921 2.13647 7.38011 2.59647 6.56911C2.72847 6.33512 2.90046 6.23512 3.18046 6.23412C4.04344 6.23048 4.90644 6.23015 5.76943 6.23312C5.79123 6.23295 5.81259 6.22704 5.83138 6.21597C5.85016 6.20491 5.8657 6.1891 5.87643 6.17012L8.68239 1.27516C8.72491 1.2007 8.78631 1.13875 8.86039 1.09556C8.93448 1.05238 9.01863 1.02948 9.10439 1.02917C9.62838 1.02817 10.1574 1.02917 10.6874 1.02317L11.7044 1.00017C12.0453 0.997165 12.4283 1.03217 12.6043 1.34016ZM9.17238 1.74316C9.16185 1.74315 9.15149 1.74592 9.14236 1.75119C9.13323 1.75645 9.12565 1.76403 9.12038 1.77316L6.25442 6.78811C6.24066 6.81174 6.22097 6.83137 6.19729 6.84505C6.17361 6.85873 6.14677 6.86599 6.11942 6.86611H3.25346C3.19746 6.86611 3.18346 6.89111 3.21246 6.94011L9.02239 17.096C9.04739 17.138 9.03539 17.158 8.98839 17.159L6.19342 17.174C6.15256 17.1727 6.11214 17.1828 6.07678 17.2033C6.04141 17.2238 6.01253 17.2539 5.99342 17.29L4.67344 19.6C4.62944 19.678 4.65244 19.718 4.74144 19.718L10.4574 19.726C10.5034 19.726 10.5374 19.746 10.5614 19.787L11.9643 22.241C12.0103 22.322 12.0563 22.323 12.1033 22.241L17.1093 13.481L17.8923 12.0991C17.897 12.0905 17.904 12.0834 17.9125 12.0785C17.9209 12.0735 17.9305 12.0709 17.9403 12.0709C17.9501 12.0709 17.9597 12.0735 17.9681 12.0785C17.9765 12.0834 17.9835 12.0905 17.9883 12.0991L19.4123 14.629C19.4229 14.648 19.4385 14.6637 19.4573 14.6746C19.4761 14.6855 19.4975 14.6912 19.5193 14.691L22.2822 14.671C22.2893 14.6711 22.2963 14.6693 22.3024 14.6658C22.3086 14.6623 22.3137 14.6572 22.3172 14.651C22.3206 14.6449 22.3224 14.638 22.3224 14.631C22.3224 14.624 22.3206 14.6172 22.3172 14.611L19.4173 9.52508C19.4068 9.50809 19.4013 9.48853 19.4013 9.46859C19.4013 9.44864 19.4068 9.42908 19.4173 9.41209L19.7102 8.90509L20.8302 6.92811C20.8542 6.88711 20.8422 6.86611 20.7952 6.86611H9.20038C9.14138 6.86611 9.12738 6.84011 9.15738 6.78911L10.5914 4.28413C10.6021 4.26706 10.6078 4.24731 10.6078 4.22714C10.6078 4.20697 10.6021 4.18721 10.5914 4.17014L9.22538 1.77416C9.22016 1.7647 9.21248 1.75682 9.20315 1.75137C9.19382 1.74591 9.18319 1.74307 9.17238 1.74316ZM15.4623 9.76308C15.5083 9.76308 15.5203 9.78308 15.4963 9.82308L14.6643 11.2881L12.0513 15.873C12.0464 15.8819 12.0392 15.8894 12.0304 15.8945C12.0216 15.8996 12.0115 15.9022 12.0013 15.902C11.9912 15.902 11.9813 15.8993 11.9725 15.8942C11.9637 15.8891 11.9564 15.8818 11.9513 15.873L8.49839 9.84108C8.47839 9.80708 8.48839 9.78908 8.52639 9.78708L8.74239 9.77508L15.4643 9.76308H15.4623Z"
fill="currentColor"
/>
</svg>
</div>
</div>
<a href="/auth">
<span>{i18n.t("zen.cta.start")}</span>
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M6.5 12L17 12M13 16.5L17.5 12L13 7.5"
stroke="currentColor"
stroke-width="1.5"
stroke-linecap="square"
/>
</svg>
</a>
</div>
<div data-slot="pricing-copy">
<p>
<strong>{i18n.t("zen.pricing.title")}</strong> <span>{i18n.t("zen.pricing.fee")}</span>
</p>
<p>{i18n.t("zen.pricing.body")}</p>
</div>
</section>
<section data-component="comparison">
<video src={compareVideo} autoplay playsinline loop muted preload="auto" poster={compareVideoPoster}>
{i18n.t("common.videoUnsupported")}
</video>
</section>
<section data-component="problem">
<div data-slot="section-title">
<h3>{i18n.t("zen.problem.title")}</h3>
<p>{i18n.t("zen.problem.body")}</p>
</div>
<p>{i18n.t("zen.problem.subtitle")}</p>
<ul>
<li>
<span>[*]</span> {i18n.t("zen.problem.item1")}
</li>
<li>
<span>[*]</span> {i18n.t("zen.problem.item2")}
</li>
<li>
<span>[*]</span> {i18n.t("zen.problem.item3")}
</li>
</ul>
</section>
<section data-component="how">
<div data-slot="section-title">
<h3>{i18n.t("zen.how.title")}</h3>
<p>{i18n.t("zen.how.body")}</p>
</div>
<ul>
<li>
<span>[1]</span>
<div>
<strong>{i18n.t("zen.how.step1.title")}</strong> - {i18n.t("zen.how.step1.beforeLink")}{" "}
<a href={language.route("/docs/zen/#how-it-works")} title={i18n.t("zen.how.step1.link")}>
{i18n.t("zen.how.step1.link")}
</a>
</div>
</li>
<li>
<span>[2]</span>
<div>
<strong>{i18n.t("zen.how.step2.title")}</strong> -{" "}
<a href={language.route("/docs/zen/#pricing")}>{i18n.t("zen.how.step2.link")}</a>{" "}
{i18n.t("zen.how.step2.afterLink")}
</div>
</li>
<li>
<span>[3]</span>
<div>
<strong>{i18n.t("zen.how.step3.title")}</strong> - {i18n.t("zen.how.step3.body")}
</div>
</li>
</ul>
</section>
<section data-component="privacy">
<div data-slot="privacy-title">
<h3>{i18n.t("zen.privacy.title")}</h3>
<div>
<span>[*]</span>
<p>
{i18n.t("zen.privacy.beforeExceptions")}{" "}
<a href={language.route("/docs/zen/#privacy")}>{i18n.t("zen.privacy.exceptionsLink")}</a>.
</p>
</div>
</div>
</section>
<section data-component="testimonials">
{/*Dax*/}
<a href="https://x.com/thdxr/status/1973531687629017227">
<div data-slot="testimonial">
<div data-slot="name">
<img src={avatarDax} alt="" />
<strong>Dax Raad</strong>
<span>ex-CEO, Terminal Products</span>
</div>
<div data-slot="quote">
<span>@OpenCode</span>
{" Zen has been life changing, it's truly a no-brainer."}
</div>
</div>
</a>
{/*Jay*/}
<a href="https://x.com/jayair/status/1973530190870618456">
<div data-slot="testimonial">
<div data-slot="name">
<img src={avatarJay} alt="" />
<strong>Jay V</strong>
<span>ex-Founder, SEED, PM, Melt, Pop, Dapt, Cadmus, and ViewPoint</span>
</div>
<div data-slot="quote">
{"4 out of 5 people on our team love using "}
<span>@OpenCode</span>
{" Zen."}
</div>
</div>
</a>
{/*Adam*/}
<a href="https://x.com/adamdotdev/status/1973732040718860563">
<div data-slot="testimonial">
<div data-slot="name">
<img src={avatarAdam} alt="" />
<strong>Adam Elmore</strong>
<span>ex-Hero, AWS</span>
</div>
<div data-slot="quote">
{"I can't recommend "}
<span>@OpenCode</span>
{" Zen enough. Seriously, it's really good."}
</div>
</div>
</a>
{/*David*/}
<a href="https://x.com/iamdavidhill/status/1973530568773214622">
<div data-slot="testimonial">
<div data-slot="name">
<img src={avatarDavid} alt="" />
<strong>David Hill</strong>
<span>ex-Head of Design, Laravel</span>
</div>
<div data-slot="quote">
{"With "}
<span>@OpenCode</span>
{" Zen I know all the models are tested and perfect for coding agents."}
</div>
</div>
</a>
{/*Frank*/}
<a href="https://x.com/fanjiewang/status/1973530092736487756">
<div data-slot="testimonial">
<div data-slot="name">
<img src={avatarFrank} alt="" />
<strong>Frank Wang</strong>
<span>ex-Intern, Nvidia (4 times)</span>
</div>
<div data-slot="quote">I wish I was still at Nvidia.</div>
</div>
</a>
</section>
<section data-component="faq">
<div data-slot="section-title">
<h3>{i18n.t("common.faq")}</h3>
</div>
<ul>
<li>
<Faq question={i18n.t("zen.faq.q1")}>{i18n.t("zen.faq.a1")}</Faq>
</li>
<li>
<Faq question={i18n.t("zen.faq.q2")}>{i18n.t("zen.faq.a2")}</Faq>
</li>
<li>
<Faq question={i18n.t("zen.faq.q3")}>{i18n.t("zen.faq.a3")}</Faq>
</li>
<li>
<Faq question={i18n.t("zen.faq.q4")}>
{i18n.t("zen.faq.a4.p1.beforePricing")}{" "}
<a href={language.route("/docs/zen/#pricing")}>{i18n.t("zen.faq.a4.p1.pricingLink")}</a>{" "}
{i18n.t("zen.faq.a4.p1.afterPricing")} {i18n.t("zen.faq.a4.p2.beforeAccount")}{" "}
<a href="/auth">{i18n.t("zen.faq.a4.p2.accountLink")}</a>. {i18n.t("zen.faq.a4.p3")}
</Faq>
</li>
<li>
<Faq question={i18n.t("zen.faq.q5")}>
{i18n.t("zen.faq.a5.beforeExceptions")}{" "}
<a href={language.route("/docs/zen/#privacy")}>{i18n.t("zen.faq.a5.exceptionsLink")}</a>.
</Faq>
</li>
<li>
<Faq question={i18n.t("zen.faq.q6")}>{i18n.t("zen.faq.a6")}</Faq>
</li>
<li>
<Faq question={i18n.t("zen.faq.q7")}>{i18n.t("zen.faq.a7")}</Faq>
</li>
<li>
<Faq question={i18n.t("zen.faq.q8")}>{i18n.t("zen.faq.a8")}</Faq>
</li>
</ul>
</section>
<EmailSignup />
<Footer />
</div>
</div>
<Legal />
</main>
)
}

View File

@@ -0,0 +1,44 @@
import { Resource, waitUntil } from "@opencode-ai/console-resource"
export function createDataDumper(sessionId: string, requestId: string, projectId: string) {
if (Resource.App.stage !== "production") return
if (sessionId === "") return
let data: Record<string, any> = { sessionId, requestId, projectId }
let metadata: Record<string, any> = { sessionId, requestId, projectId }
return {
provideModel: (model?: string) => {
data.modelName = model
metadata.modelName = model
},
provideRequest: (request: string) => (data.request = request),
provideResponse: (response: string) => (data.response = response),
provideStream: (chunk: string) => (data.response = (data.response ?? "") + chunk),
flush: () => {
if (!data.modelName) return
const timestamp = new Date().toISOString().replace(/[^0-9]/g, "")
const year = timestamp.substring(0, 4)
const month = timestamp.substring(4, 6)
const day = timestamp.substring(6, 8)
const hour = timestamp.substring(8, 10)
const minute = timestamp.substring(10, 12)
const second = timestamp.substring(12, 14)
waitUntil(
Resource.ZenDataNew.put(
`data/${data.modelName}/${year}/${month}/${day}/${hour}/${minute}/${second}/${requestId}.json`,
JSON.stringify({ timestamp, ...data }),
),
)
waitUntil(
Resource.ZenDataNew.put(
`meta/${data.modelName}/${sessionId}/${requestId}.json`,
JSON.stringify({ timestamp, ...metadata }),
),
)
},
}
}

View File

@@ -0,0 +1,13 @@
export class AuthError extends Error {}
export class CreditsError extends Error {}
export class MonthlyLimitError extends Error {}
export class SubscriptionError extends Error {
retryAfter?: number
constructor(message: string, retryAfter?: number) {
super(message)
this.retryAfter = retryAfter
}
}
export class UserLimitError extends Error {}
export class ModelError extends Error {}
export class RateLimitError extends Error {}

View File

@@ -0,0 +1,784 @@
import type { APIEvent } from "@solidjs/start/server"
import { and, Database, eq, isNull, lt, or, sql } from "@opencode-ai/console-core/drizzle/index.js"
import { KeyTable } from "@opencode-ai/console-core/schema/key.sql.js"
import { BillingTable, SubscriptionTable, UsageTable } from "@opencode-ai/console-core/schema/billing.sql.js"
import { centsToMicroCents } from "@opencode-ai/console-core/util/price.js"
import { getWeekBounds } from "@opencode-ai/console-core/util/date.js"
import { Identifier } from "@opencode-ai/console-core/identifier.js"
import { Billing } from "@opencode-ai/console-core/billing.js"
import { Actor } from "@opencode-ai/console-core/actor.js"
import { WorkspaceTable } from "@opencode-ai/console-core/schema/workspace.sql.js"
import { ZenData } from "@opencode-ai/console-core/model.js"
import { Black, BlackData } from "@opencode-ai/console-core/black.js"
import { UserTable } from "@opencode-ai/console-core/schema/user.sql.js"
import { ModelTable } from "@opencode-ai/console-core/schema/model.sql.js"
import { ProviderTable } from "@opencode-ai/console-core/schema/provider.sql.js"
import { logger } from "./logger"
import {
AuthError,
CreditsError,
MonthlyLimitError,
SubscriptionError,
UserLimitError,
ModelError,
RateLimitError,
} from "./error"
import { createBodyConverter, createStreamPartConverter, createResponseConverter, UsageInfo } from "./provider/provider"
import { anthropicHelper } from "./provider/anthropic"
import { googleHelper } from "./provider/google"
import { openaiHelper } from "./provider/openai"
import { oaCompatHelper } from "./provider/openai-compatible"
import { createRateLimiter } from "./rateLimiter"
import { createDataDumper } from "./dataDumper"
import { createTrialLimiter } from "./trialLimiter"
import { createStickyTracker } from "./stickyProviderTracker"
type ZenData = Awaited<ReturnType<typeof ZenData.list>>
type RetryOptions = {
excludeProviders: string[]
retryCount: number
}
export async function handler(
input: APIEvent,
opts: {
format: ZenData.Format
parseApiKey: (headers: Headers) => string | undefined
parseModel: (url: string, body: any) => string
parseIsStream: (url: string, body: any) => boolean
},
) {
type AuthInfo = Awaited<ReturnType<typeof authenticate>>
type ModelInfo = Awaited<ReturnType<typeof validateModel>>
type ProviderInfo = Awaited<ReturnType<typeof selectProvider>>
const MAX_RETRIES = 3
const FREE_WORKSPACES = [
"wrk_01K46JDFR0E75SG2Q8K172KF3Y", // frank
"wrk_01K6W1A3VE0KMNVSCQT43BG2SX", // opencode bench
]
try {
const url = input.request.url
const body = await input.request.json()
const model = opts.parseModel(url, body)
const isStream = opts.parseIsStream(url, body)
const ip = input.request.headers.get("x-real-ip") ?? ""
const sessionId = input.request.headers.get("x-opencode-session") ?? ""
const requestId = input.request.headers.get("x-opencode-request") ?? ""
const projectId = input.request.headers.get("x-opencode-project") ?? ""
const ocClient = input.request.headers.get("x-opencode-client") ?? ""
logger.metric({
is_tream: isStream,
session: sessionId,
request: requestId,
client: ocClient,
})
const zenData = ZenData.list()
const modelInfo = validateModel(zenData, model)
const dataDumper = createDataDumper(sessionId, requestId, projectId)
const trialLimiter = createTrialLimiter(modelInfo.trial, ip, ocClient)
const isTrial = await trialLimiter?.isTrial()
const rateLimiter = createRateLimiter(modelInfo.rateLimit, ip, input.request.headers)
await rateLimiter?.check()
const stickyTracker = createStickyTracker(modelInfo.stickyProvider, sessionId)
const stickyProvider = await stickyTracker?.get()
const authInfo = await authenticate(modelInfo)
const billingSource = validateBilling(authInfo, modelInfo)
const retriableRequest = async (retry: RetryOptions = { excludeProviders: [], retryCount: 0 }) => {
const providerInfo = selectProvider(
model,
zenData,
authInfo,
modelInfo,
sessionId,
isTrial ?? false,
retry,
stickyProvider,
)
validateModelSettings(authInfo)
updateProviderKey(authInfo, providerInfo)
logger.metric({ provider: providerInfo.id })
const startTimestamp = Date.now()
const reqUrl = providerInfo.modifyUrl(providerInfo.api, isStream)
const reqBody = JSON.stringify(
providerInfo.modifyBody({
...createBodyConverter(opts.format, providerInfo.format)(body),
model: providerInfo.model,
}),
)
logger.debug("REQUEST URL: " + reqUrl)
logger.debug("REQUEST: " + reqBody.substring(0, 300) + "...")
const res = await fetch(reqUrl, {
method: "POST",
headers: (() => {
const headers = new Headers(input.request.headers)
providerInfo.modifyHeaders(headers, body, providerInfo.apiKey)
Object.entries(providerInfo.headerMappings ?? {}).forEach(([k, v]) => {
headers.set(k, headers.get(v)!)
})
headers.delete("host")
headers.delete("content-length")
headers.delete("x-opencode-request")
headers.delete("x-opencode-session")
headers.delete("x-opencode-project")
headers.delete("x-opencode-client")
return headers
})(),
body: reqBody,
})
// Try another provider => stop retrying if using fallback provider
if (
res.status !== 200 &&
// ie. openai 404 error: Item with id 'msg_0ead8b004a3b165d0069436a6b6834819896da85b63b196a3f' not found.
res.status !== 404 &&
// ie. cannot change codex model providers mid-session
modelInfo.stickyProvider !== "strict" &&
modelInfo.fallbackProvider &&
providerInfo.id !== modelInfo.fallbackProvider
) {
return retriableRequest({
excludeProviders: [...retry.excludeProviders, providerInfo.id],
retryCount: retry.retryCount + 1,
})
}
return { providerInfo, reqBody, res, startTimestamp }
}
const { providerInfo, reqBody, res, startTimestamp } = await retriableRequest()
// Store model request
dataDumper?.provideModel(providerInfo.storeModel)
dataDumper?.provideRequest(reqBody)
// Store sticky provider
await stickyTracker?.set(providerInfo.id)
// Temporarily change 404 to 400 status code b/c solid start automatically override 404 response
const resStatus = res.status === 404 ? 400 : res.status
// Scrub response headers
const resHeaders = new Headers()
const keepHeaders = ["content-type", "cache-control"]
for (const [k, v] of res.headers.entries()) {
if (keepHeaders.includes(k.toLowerCase())) {
resHeaders.set(k, v)
}
}
logger.debug("STATUS: " + res.status + " " + res.statusText)
// Handle non-streaming response
if (!isStream) {
const responseConverter = createResponseConverter(providerInfo.format, opts.format)
const json = await res.json()
const body = JSON.stringify(responseConverter(json))
logger.metric({ response_length: body.length })
logger.debug("RESPONSE: " + body)
dataDumper?.provideResponse(body)
dataDumper?.flush()
const tokensInfo = providerInfo.normalizeUsage(json.usage)
await trialLimiter?.track(tokensInfo)
await rateLimiter?.track()
const costInfo = await trackUsage(authInfo, modelInfo, providerInfo, billingSource, tokensInfo)
await reload(authInfo, costInfo)
return new Response(body, {
status: resStatus,
statusText: res.statusText,
headers: resHeaders,
})
}
// Handle streaming response
const streamConverter = createStreamPartConverter(providerInfo.format, opts.format)
const usageParser = providerInfo.createUsageParser()
const binaryDecoder = providerInfo.createBinaryStreamDecoder()
const stream = new ReadableStream({
start(c) {
const reader = res.body?.getReader()
const decoder = new TextDecoder()
const encoder = new TextEncoder()
let buffer = ""
let responseLength = 0
function pump(): Promise<void> {
return (
reader?.read().then(async ({ done, value: rawValue }) => {
if (done) {
logger.metric({
response_length: responseLength,
"timestamp.last_byte": Date.now(),
})
dataDumper?.flush()
await rateLimiter?.track()
const usage = usageParser.retrieve()
if (usage) {
const tokensInfo = providerInfo.normalizeUsage(usage)
await trialLimiter?.track(tokensInfo)
const costInfo = await trackUsage(authInfo, modelInfo, providerInfo, billingSource, tokensInfo)
await reload(authInfo, costInfo)
}
c.close()
return
}
if (responseLength === 0) {
const now = Date.now()
logger.metric({
time_to_first_byte: now - startTimestamp,
"timestamp.first_byte": now,
})
}
const value = binaryDecoder ? binaryDecoder(rawValue) : rawValue
if (!value) return
responseLength += value.length
buffer += decoder.decode(value, { stream: true })
dataDumper?.provideStream(buffer)
const parts = buffer.split(providerInfo.streamSeparator)
buffer = parts.pop() ?? ""
for (let part of parts) {
logger.debug("PART: " + part)
part = part.trim()
usageParser.parse(part)
if (providerInfo.format !== opts.format) {
part = streamConverter(part)
c.enqueue(encoder.encode(part + "\n\n"))
}
}
if (providerInfo.format === opts.format) {
c.enqueue(value)
}
return pump()
}) || Promise.resolve()
)
}
return pump()
},
})
return new Response(stream, {
status: resStatus,
statusText: res.statusText,
headers: resHeaders,
})
} catch (error: any) {
logger.metric({
"error.type": error.constructor.name,
"error.message": error.message,
})
// Note: both top level "type" and "error.type" fields are used by the @ai-sdk/anthropic client to render the error message.
if (
error instanceof AuthError ||
error instanceof CreditsError ||
error instanceof MonthlyLimitError ||
error instanceof UserLimitError ||
error instanceof ModelError
)
return new Response(
JSON.stringify({
type: "error",
error: { type: error.constructor.name, message: error.message },
}),
{ status: 401 },
)
if (error instanceof RateLimitError || error instanceof SubscriptionError) {
const headers = new Headers()
if (error instanceof SubscriptionError && error.retryAfter) {
headers.set("retry-after", String(error.retryAfter))
}
return new Response(
JSON.stringify({
type: "error",
error: { type: error.constructor.name, message: error.message },
}),
{ status: 429, headers },
)
}
return new Response(
JSON.stringify({
type: "error",
error: {
type: "error",
message: error.message,
},
}),
{ status: 500 },
)
}
function validateModel(zenData: ZenData, reqModel: string) {
if (!(reqModel in zenData.models)) throw new ModelError(`Model ${reqModel} not supported`)
const modelId = reqModel as keyof typeof zenData.models
const modelData = Array.isArray(zenData.models[modelId])
? zenData.models[modelId].find((model) => opts.format === model.formatFilter)
: zenData.models[modelId]
if (!modelData) throw new ModelError(`Model ${reqModel} not supported for format ${opts.format}`)
logger.metric({ model: modelId })
return { id: modelId, ...modelData }
}
function selectProvider(
reqModel: string,
zenData: ZenData,
authInfo: AuthInfo,
modelInfo: ModelInfo,
sessionId: string,
isTrial: boolean,
retry: RetryOptions,
stickyProvider: string | undefined,
) {
const modelProvider = (() => {
if (authInfo?.provider?.credentials) {
return modelInfo.providers.find((provider) => provider.id === modelInfo.byokProvider)
}
if (isTrial) {
return modelInfo.providers.find((provider) => provider.id === modelInfo.trial!.provider)
}
if (stickyProvider) {
const provider = modelInfo.providers.find((provider) => provider.id === stickyProvider)
if (provider) return provider
}
if (retry.retryCount === MAX_RETRIES) {
return modelInfo.providers.find((provider) => provider.id === modelInfo.fallbackProvider)
}
const providers = modelInfo.providers
.filter((provider) => !provider.disabled)
.filter((provider) => !retry.excludeProviders.includes(provider.id))
.flatMap((provider) => Array<typeof provider>(provider.weight ?? 1).fill(provider))
// Use the last 4 characters of session ID to select a provider
let h = 0
const l = sessionId.length
for (let i = l - 4; i < l; i++) {
h = (h * 31 + sessionId.charCodeAt(i)) | 0 // 32-bit int
}
const index = (h >>> 0) % providers.length // make unsigned + range 0..length-1
return providers[index || 0]
})()
if (!modelProvider) throw new ModelError("No provider available")
if (!(modelProvider.id in zenData.providers)) throw new ModelError(`Provider ${modelProvider.id} not supported`)
return {
...modelProvider,
...zenData.providers[modelProvider.id],
...(() => {
const format = zenData.providers[modelProvider.id].format
const providerModel = modelProvider.model
if (format === "anthropic") return anthropicHelper({ reqModel, providerModel })
if (format === "google") return googleHelper({ reqModel, providerModel })
if (format === "openai") return openaiHelper({ reqModel, providerModel })
return oaCompatHelper({ reqModel, providerModel })
})(),
}
}
async function authenticate(modelInfo: ModelInfo) {
const apiKey = opts.parseApiKey(input.request.headers)
if (!apiKey || apiKey === "public") {
if (modelInfo.allowAnonymous) return
throw new AuthError("Missing API key.")
}
const data = await Database.use((tx) =>
tx
.select({
apiKey: KeyTable.id,
workspaceID: KeyTable.workspaceID,
billing: {
balance: BillingTable.balance,
paymentMethodID: BillingTable.paymentMethodID,
monthlyLimit: BillingTable.monthlyLimit,
monthlyUsage: BillingTable.monthlyUsage,
timeMonthlyUsageUpdated: BillingTable.timeMonthlyUsageUpdated,
reloadTrigger: BillingTable.reloadTrigger,
timeReloadLockedTill: BillingTable.timeReloadLockedTill,
subscription: BillingTable.subscription,
},
user: {
id: UserTable.id,
monthlyLimit: UserTable.monthlyLimit,
monthlyUsage: UserTable.monthlyUsage,
timeMonthlyUsageUpdated: UserTable.timeMonthlyUsageUpdated,
},
subscription: {
id: SubscriptionTable.id,
rollingUsage: SubscriptionTable.rollingUsage,
fixedUsage: SubscriptionTable.fixedUsage,
timeRollingUpdated: SubscriptionTable.timeRollingUpdated,
timeFixedUpdated: SubscriptionTable.timeFixedUpdated,
},
provider: {
credentials: ProviderTable.credentials,
},
timeDisabled: ModelTable.timeCreated,
})
.from(KeyTable)
.innerJoin(WorkspaceTable, eq(WorkspaceTable.id, KeyTable.workspaceID))
.innerJoin(BillingTable, eq(BillingTable.workspaceID, KeyTable.workspaceID))
.innerJoin(UserTable, and(eq(UserTable.workspaceID, KeyTable.workspaceID), eq(UserTable.id, KeyTable.userID)))
.leftJoin(ModelTable, and(eq(ModelTable.workspaceID, KeyTable.workspaceID), eq(ModelTable.model, modelInfo.id)))
.leftJoin(
ProviderTable,
modelInfo.byokProvider
? and(
eq(ProviderTable.workspaceID, KeyTable.workspaceID),
eq(ProviderTable.provider, modelInfo.byokProvider),
)
: sql`false`,
)
.leftJoin(
SubscriptionTable,
and(
eq(SubscriptionTable.workspaceID, KeyTable.workspaceID),
eq(SubscriptionTable.userID, KeyTable.userID),
isNull(SubscriptionTable.timeDeleted),
),
)
.where(and(eq(KeyTable.key, apiKey), isNull(KeyTable.timeDeleted)))
.then((rows) => rows[0]),
)
if (!data) throw new AuthError("Invalid API key.")
logger.metric({
api_key: data.apiKey,
workspace: data.workspaceID,
isSubscription: data.subscription ? true : false,
subscription: data.billing.subscription?.plan,
})
return {
apiKeyId: data.apiKey,
workspaceID: data.workspaceID,
billing: data.billing,
user: data.user,
subscription: data.subscription,
provider: data.provider,
isFree: FREE_WORKSPACES.includes(data.workspaceID),
isDisabled: !!data.timeDisabled,
}
}
function validateBilling(authInfo: AuthInfo, modelInfo: ModelInfo) {
if (!authInfo) return "anonymous"
if (authInfo.provider?.credentials) return "free"
if (authInfo.isFree) return "free"
if (modelInfo.allowAnonymous) return "free"
// Validate subscription billing
if (authInfo.billing.subscription && authInfo.subscription) {
try {
const sub = authInfo.subscription
const plan = authInfo.billing.subscription.plan
const formatRetryTime = (seconds: number) => {
const days = Math.floor(seconds / 86400)
if (days >= 1) return `${days} day${days > 1 ? "s" : ""}`
const hours = Math.floor(seconds / 3600)
const minutes = Math.ceil((seconds % 3600) / 60)
if (hours >= 1) return `${hours}hr ${minutes}min`
return `${minutes}min`
}
// Check weekly limit
if (sub.fixedUsage && sub.timeFixedUpdated) {
const result = Black.analyzeWeeklyUsage({
plan,
usage: sub.fixedUsage,
timeUpdated: sub.timeFixedUpdated,
})
if (result.status === "rate-limited")
throw new SubscriptionError(
`Subscription quota exceeded. Retry in ${formatRetryTime(result.resetInSec)}.`,
result.resetInSec,
)
}
// Check rolling limit
if (sub.rollingUsage && sub.timeRollingUpdated) {
const result = Black.analyzeRollingUsage({
plan,
usage: sub.rollingUsage,
timeUpdated: sub.timeRollingUpdated,
})
if (result.status === "rate-limited")
throw new SubscriptionError(
`Subscription quota exceeded. Retry in ${formatRetryTime(result.resetInSec)}.`,
result.resetInSec,
)
}
return "subscription"
} catch (e) {
if (!authInfo.billing.subscription.useBalance) throw e
}
}
// Validate pay as you go billing
const billing = authInfo.billing
if (!billing.paymentMethodID)
throw new CreditsError(
`No payment method. Add a payment method here: https://opencode.ai/workspace/${authInfo.workspaceID}/billing`,
)
if (billing.balance <= 0)
throw new CreditsError(
`Insufficient balance. Manage your billing here: https://opencode.ai/workspace/${authInfo.workspaceID}/billing`,
)
const now = new Date()
const currentYear = now.getUTCFullYear()
const currentMonth = now.getUTCMonth()
if (
billing.monthlyLimit &&
billing.monthlyUsage &&
billing.timeMonthlyUsageUpdated &&
billing.monthlyUsage >= centsToMicroCents(billing.monthlyLimit * 100) &&
currentYear === billing.timeMonthlyUsageUpdated.getUTCFullYear() &&
currentMonth === billing.timeMonthlyUsageUpdated.getUTCMonth()
)
throw new MonthlyLimitError(
`Your workspace has reached its monthly spending limit of $${billing.monthlyLimit}. Manage your limits here: https://opencode.ai/workspace/${authInfo.workspaceID}/billing`,
)
if (
authInfo.user.monthlyLimit &&
authInfo.user.monthlyUsage &&
authInfo.user.timeMonthlyUsageUpdated &&
authInfo.user.monthlyUsage >= centsToMicroCents(authInfo.user.monthlyLimit * 100) &&
currentYear === authInfo.user.timeMonthlyUsageUpdated.getUTCFullYear() &&
currentMonth === authInfo.user.timeMonthlyUsageUpdated.getUTCMonth()
)
throw new UserLimitError(
`You have reached your monthly spending limit of $${authInfo.user.monthlyLimit}. Manage your limits here: https://opencode.ai/workspace/${authInfo.workspaceID}/members`,
)
return "balance"
}
function validateModelSettings(authInfo: AuthInfo) {
if (!authInfo) return
if (authInfo.isDisabled) throw new ModelError("Model is disabled")
}
function updateProviderKey(authInfo: AuthInfo, providerInfo: ProviderInfo) {
if (!authInfo?.provider?.credentials) return
providerInfo.apiKey = authInfo.provider.credentials
}
async function trackUsage(
authInfo: AuthInfo,
modelInfo: ModelInfo,
providerInfo: ProviderInfo,
billingSource: ReturnType<typeof validateBilling>,
usageInfo: UsageInfo,
) {
const { inputTokens, outputTokens, reasoningTokens, cacheReadTokens, cacheWrite5mTokens, cacheWrite1hTokens } =
usageInfo
const modelCost =
modelInfo.cost200K &&
inputTokens + (cacheReadTokens ?? 0) + (cacheWrite5mTokens ?? 0) + (cacheWrite1hTokens ?? 0) > 200_000
? modelInfo.cost200K
: modelInfo.cost
const inputCost = modelCost.input * inputTokens * 100
const outputCost = modelCost.output * outputTokens * 100
const reasoningCost = (() => {
if (!reasoningTokens) return undefined
return modelCost.output * reasoningTokens * 100
})()
const cacheReadCost = (() => {
if (!cacheReadTokens) return undefined
if (!modelCost.cacheRead) return undefined
return modelCost.cacheRead * cacheReadTokens * 100
})()
const cacheWrite5mCost = (() => {
if (!cacheWrite5mTokens) return undefined
if (!modelCost.cacheWrite5m) return undefined
return modelCost.cacheWrite5m * cacheWrite5mTokens * 100
})()
const cacheWrite1hCost = (() => {
if (!cacheWrite1hTokens) return undefined
if (!modelCost.cacheWrite1h) return undefined
return modelCost.cacheWrite1h * cacheWrite1hTokens * 100
})()
const totalCostInCent =
inputCost +
outputCost +
(reasoningCost ?? 0) +
(cacheReadCost ?? 0) +
(cacheWrite5mCost ?? 0) +
(cacheWrite1hCost ?? 0)
logger.metric({
"tokens.input": inputTokens,
"tokens.output": outputTokens,
"tokens.reasoning": reasoningTokens,
"tokens.cache_read": cacheReadTokens,
"tokens.cache_write_5m": cacheWrite5mTokens,
"tokens.cache_write_1h": cacheWrite1hTokens,
"cost.input": Math.round(inputCost),
"cost.output": Math.round(outputCost),
"cost.reasoning": reasoningCost ? Math.round(reasoningCost) : undefined,
"cost.cache_read": cacheReadCost ? Math.round(cacheReadCost) : undefined,
"cost.cache_write_5m": cacheWrite5mCost ? Math.round(cacheWrite5mCost) : undefined,
"cost.cache_write_1h": cacheWrite1hCost ? Math.round(cacheWrite1hCost) : undefined,
"cost.total": Math.round(totalCostInCent),
})
if (billingSource === "anonymous") return
authInfo = authInfo!
const cost = authInfo.provider?.credentials ? 0 : centsToMicroCents(totalCostInCent)
await Database.use((db) =>
Promise.all([
db.insert(UsageTable).values({
workspaceID: authInfo.workspaceID,
id: Identifier.create("usage"),
model: modelInfo.id,
provider: providerInfo.id,
inputTokens,
outputTokens,
reasoningTokens,
cacheReadTokens,
cacheWrite5mTokens,
cacheWrite1hTokens,
cost,
keyID: authInfo.apiKeyId,
enrichment: billingSource === "subscription" ? { plan: "sub" } : undefined,
}),
db
.update(KeyTable)
.set({ timeUsed: sql`now()` })
.where(and(eq(KeyTable.workspaceID, authInfo.workspaceID), eq(KeyTable.id, authInfo.apiKeyId))),
...(billingSource === "subscription"
? (() => {
const plan = authInfo.billing.subscription!.plan
const black = BlackData.getLimits({ plan })
const week = getWeekBounds(new Date())
const rollingWindowSeconds = black.rollingWindow * 3600
return [
db
.update(SubscriptionTable)
.set({
fixedUsage: sql`
CASE
WHEN ${SubscriptionTable.timeFixedUpdated} >= ${week.start} THEN ${SubscriptionTable.fixedUsage} + ${cost}
ELSE ${cost}
END
`,
timeFixedUpdated: sql`now()`,
rollingUsage: sql`
CASE
WHEN UNIX_TIMESTAMP(${SubscriptionTable.timeRollingUpdated}) >= UNIX_TIMESTAMP(now()) - ${rollingWindowSeconds} THEN ${SubscriptionTable.rollingUsage} + ${cost}
ELSE ${cost}
END
`,
timeRollingUpdated: sql`
CASE
WHEN UNIX_TIMESTAMP(${SubscriptionTable.timeRollingUpdated}) >= UNIX_TIMESTAMP(now()) - ${rollingWindowSeconds} THEN ${SubscriptionTable.timeRollingUpdated}
ELSE now()
END
`,
})
.where(
and(
eq(SubscriptionTable.workspaceID, authInfo.workspaceID),
eq(SubscriptionTable.userID, authInfo.user.id),
),
),
]
})()
: [
db
.update(BillingTable)
.set({
balance: authInfo.isFree
? sql`${BillingTable.balance} - ${0}`
: sql`${BillingTable.balance} - ${cost}`,
monthlyUsage: sql`
CASE
WHEN MONTH(${BillingTable.timeMonthlyUsageUpdated}) = MONTH(now()) AND YEAR(${BillingTable.timeMonthlyUsageUpdated}) = YEAR(now()) THEN ${BillingTable.monthlyUsage} + ${cost}
ELSE ${cost}
END
`,
timeMonthlyUsageUpdated: sql`now()`,
})
.where(eq(BillingTable.workspaceID, authInfo.workspaceID)),
db
.update(UserTable)
.set({
monthlyUsage: sql`
CASE
WHEN MONTH(${UserTable.timeMonthlyUsageUpdated}) = MONTH(now()) AND YEAR(${UserTable.timeMonthlyUsageUpdated}) = YEAR(now()) THEN ${UserTable.monthlyUsage} + ${cost}
ELSE ${cost}
END
`,
timeMonthlyUsageUpdated: sql`now()`,
})
.where(and(eq(UserTable.workspaceID, authInfo.workspaceID), eq(UserTable.id, authInfo.user.id))),
]),
]),
)
return { costInMicroCents: cost }
}
async function reload(authInfo: AuthInfo, costInfo: Awaited<ReturnType<typeof trackUsage>>) {
if (!authInfo) return
if (authInfo.isFree) return
if (authInfo.provider?.credentials) return
if (authInfo.subscription) return
if (!costInfo) return
const reloadTrigger = centsToMicroCents((authInfo.billing.reloadTrigger ?? Billing.RELOAD_TRIGGER) * 100)
if (authInfo.billing.balance - costInfo.costInMicroCents >= reloadTrigger) return
if (authInfo.billing.timeReloadLockedTill && authInfo.billing.timeReloadLockedTill > new Date()) return
const lock = await Database.use((tx) =>
tx
.update(BillingTable)
.set({
timeReloadLockedTill: sql`now() + interval 1 minute`,
})
.where(
and(
eq(BillingTable.workspaceID, authInfo.workspaceID),
eq(BillingTable.reload, true),
lt(BillingTable.balance, reloadTrigger),
or(isNull(BillingTable.timeReloadLockedTill), lt(BillingTable.timeReloadLockedTill, sql`now()`)),
),
),
)
if (lock.rowsAffected === 0) return
await Actor.provide("system", { workspaceID: authInfo.workspaceID }, async () => {
await Billing.reload()
})
}
}

View File

@@ -0,0 +1,12 @@
import { Resource } from "@opencode-ai/console-resource"
export const logger = {
metric: (values: Record<string, any>) => {
console.log(`_metric:${JSON.stringify(values)}`)
},
log: console.log,
debug: (message: string) => {
if (Resource.App.stage === "production") return
console.debug(message)
},
}

View File

@@ -0,0 +1,752 @@
import { EventStreamCodec } from "@smithy/eventstream-codec"
import { ProviderHelper, CommonRequest, CommonResponse, CommonChunk } from "./provider"
import { fromUtf8, toUtf8 } from "@smithy/util-utf8"
type Usage = {
cache_creation?: {
ephemeral_5m_input_tokens?: number
ephemeral_1h_input_tokens?: number
}
cache_creation_input_tokens?: number
cache_read_input_tokens?: number
input_tokens?: number
output_tokens?: number
server_tool_use?: {
web_search_requests?: number
}
}
export const anthropicHelper: ProviderHelper = ({ reqModel, providerModel }) => {
const isBedrockModelArn = providerModel.startsWith("arn:aws:bedrock:")
const isBedrockModelID = providerModel.startsWith("global.anthropic.")
const isBedrock = isBedrockModelArn || isBedrockModelID
const supports1m = reqModel.includes("sonnet") || reqModel.includes("opus-4-6")
return {
format: "anthropic",
modifyUrl: (providerApi: string, isStream?: boolean) =>
isBedrock
? `${providerApi}/model/${isBedrockModelArn ? encodeURIComponent(providerModel) : providerModel}/${isStream ? "invoke-with-response-stream" : "invoke"}`
: providerApi + "/messages",
modifyHeaders: (headers: Headers, body: Record<string, any>, apiKey: string) => {
if (isBedrock) {
headers.set("Authorization", `Bearer ${apiKey}`)
} else {
headers.set("x-api-key", apiKey)
headers.set("anthropic-version", headers.get("anthropic-version") ?? "2023-06-01")
if (supports1m) {
headers.set("anthropic-beta", "context-1m-2025-08-07")
}
}
},
modifyBody: (body: Record<string, any>) => ({
...body,
...(isBedrock
? {
anthropic_version: "bedrock-2023-05-31",
anthropic_beta: supports1m ? "context-1m-2025-08-07" : undefined,
model: undefined,
stream: undefined,
}
: {
service_tier: "standard_only",
}),
}),
createBinaryStreamDecoder: () => {
if (!isBedrock) return undefined
const decoder = new TextDecoder()
const encoder = new TextEncoder()
const codec = new EventStreamCodec(toUtf8, fromUtf8)
let buffer = new Uint8Array(0)
return (value: Uint8Array) => {
const newBuffer = new Uint8Array(buffer.length + value.length)
newBuffer.set(buffer)
newBuffer.set(value, buffer.length)
buffer = newBuffer
const messages = []
while (buffer.length >= 4) {
// first 4 bytes are the total length (big-endian)
const totalLength = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength).getUint32(0, false)
// wait for more chunks
if (buffer.length < totalLength) break
try {
const subView = buffer.subarray(0, totalLength)
const decoded = codec.decode(subView)
buffer = buffer.slice(totalLength)
/* Example of Bedrock data
```
{
bytes: 'eyJ0eXBlIjoibWVzc2FnZV9zdGFydCIsIm1lc3NhZ2UiOnsibW9kZWwiOiJjbGF1ZGUtb3B1cy00LTUtMjAyNTExMDEiLCJpZCI6Im1zZ19iZHJrXzAxMjVGdHRGb2lkNGlwWmZ4SzZMbktxeCIsInR5cGUiOiJtZXNzYWdlIiwicm9sZSI6ImFzc2lzdGFudCIsImNvbnRlbnQiOltdLCJzdG9wX3JlYXNvbiI6bnVsbCwic3RvcF9zZXF1ZW5jZSI6bnVsbCwidXNhZ2UiOnsiaW5wdXRfdG9rZW5zIjo0LCJjYWNoZV9jcmVhdGlvbl9pbnB1dF90b2tlbnMiOjEsImNhY2hlX3JlYWRfaW5wdXRfdG9rZW5zIjoxMTk2MywiY2FjaGVfY3JlYXRpb24iOnsiZXBoZW1lcmFsXzVtX2lucHV0X3Rva2VucyI6MSwiZXBoZW1lcmFsXzFoX2lucHV0X3Rva2VucyI6MH0sIm91dHB1dF90b2tlbnMiOjF9fX0=',
p: '...'
}
```
Decoded bytes
```
{
type: 'message_start',
message: {
model: 'claude-opus-4-5-20251101',
id: 'msg_bdrk_0125FttFoid4ipZfxK6LnKqx',
type: 'message',
role: 'assistant',
content: [],
stop_reason: null,
stop_sequence: null,
usage: {
input_tokens: 4,
cache_creation_input_tokens: 1,
cache_read_input_tokens: 11963,
cache_creation: [Object],
output_tokens: 1
}
}
}
```
*/
/* Example of Anthropic data
```
event: message_delta
data: {"type":"message_start","message":{"model":"claude-opus-4-5-20251101","id":"msg_01ETvwVWSKULxzPdkQ1xAnk2","type":"message","role":"assistant","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":3,"cache_creation_input_tokens":11543,"cache_read_input_tokens":0,"cache_creation":{"ephemeral_5m_input_tokens":11543,"ephemeral_1h_input_tokens":0},"output_tokens":1,"service_tier":"standard"}}}
```
*/
if (decoded.headers[":message-type"]?.value === "event") {
const data = decoder.decode(decoded.body, { stream: true })
const parsedDataResult = JSON.parse(data)
delete parsedDataResult.p
const binary = atob(parsedDataResult.bytes)
const uint8 = Uint8Array.from(binary, (c) => c.charCodeAt(0))
const bytes = decoder.decode(uint8)
const eventName = JSON.parse(bytes).type
messages.push([`event: ${eventName}`, "\n", `data: ${bytes}`, "\n\n"].join(""))
}
} catch (e) {
console.log("@@@EE@@@")
console.log(e)
break
}
}
return encoder.encode(messages.join(""))
}
},
streamSeparator: "\n\n",
createUsageParser: () => {
let usage: Usage
return {
parse: (chunk: string) => {
const data = chunk.split("\n")[1]
if (!data.startsWith("data: ")) return
let json
try {
json = JSON.parse(data.slice(6))
} catch (e) {
return
}
const usageUpdate = json.usage ?? json.message?.usage
if (!usageUpdate) return
usage = {
...usage,
...usageUpdate,
cache_creation: {
...usage?.cache_creation,
...usageUpdate.cache_creation,
},
server_tool_use: {
...usage?.server_tool_use,
...usageUpdate.server_tool_use,
},
}
},
retrieve: () => usage,
}
},
normalizeUsage: (usage: Usage) => ({
inputTokens: usage.input_tokens ?? 0,
outputTokens: usage.output_tokens ?? 0,
reasoningTokens: undefined,
cacheReadTokens: usage.cache_read_input_tokens ?? undefined,
cacheWrite5mTokens: usage.cache_creation?.ephemeral_5m_input_tokens ?? undefined,
cacheWrite1hTokens: usage.cache_creation?.ephemeral_1h_input_tokens ?? undefined,
}),
}
}
export function fromAnthropicRequest(body: any): CommonRequest {
if (!body || typeof body !== "object") return body
const msgs: any[] = []
const sys = Array.isArray(body.system) ? body.system : undefined
if (sys && sys.length > 0) {
for (const s of sys) {
if (!s) continue
if ((s as any).type !== "text") continue
if (typeof (s as any).text !== "string") continue
if ((s as any).text.length === 0) continue
msgs.push({ role: "system", content: (s as any).text })
}
}
const toImg = (src: any) => {
if (!src || typeof src !== "object") return undefined
if ((src as any).type === "url" && typeof (src as any).url === "string")
return { type: "image_url", image_url: { url: (src as any).url } }
if (
(src as any).type === "base64" &&
typeof (src as any).media_type === "string" &&
typeof (src as any).data === "string"
)
return {
type: "image_url",
image_url: { url: `data:${(src as any).media_type};base64,${(src as any).data}` },
}
return undefined
}
const inMsgs = Array.isArray(body.messages) ? body.messages : []
for (const m of inMsgs) {
if (!m || !(m as any).role) continue
if ((m as any).role === "user") {
const partsIn = Array.isArray((m as any).content) ? (m as any).content : []
const partsOut: any[] = []
for (const p of partsIn) {
if (!p || !(p as any).type) continue
if ((p as any).type === "text" && typeof (p as any).text === "string")
partsOut.push({ type: "text", text: (p as any).text })
if ((p as any).type === "image") {
const ip = toImg((p as any).source)
if (ip) partsOut.push(ip)
}
if ((p as any).type === "tool_result") {
const id = (p as any).tool_use_id
const content =
typeof (p as any).content === "string" ? (p as any).content : JSON.stringify((p as any).content)
msgs.push({ role: "tool", tool_call_id: id, content })
}
}
if (partsOut.length > 0) {
if (partsOut.length === 1 && partsOut[0].type === "text") msgs.push({ role: "user", content: partsOut[0].text })
else msgs.push({ role: "user", content: partsOut })
}
continue
}
if ((m as any).role === "assistant") {
const partsIn = Array.isArray((m as any).content) ? (m as any).content : []
const texts: string[] = []
const tcs: any[] = []
for (const p of partsIn) {
if (!p || !(p as any).type) continue
if ((p as any).type === "text" && typeof (p as any).text === "string") texts.push((p as any).text)
if ((p as any).type === "tool_use") {
const name = (p as any).name
const id = (p as any).id
const inp = (p as any).input
const input = (() => {
if (typeof inp === "string") return inp
try {
return JSON.stringify(inp ?? {})
} catch {
return String(inp ?? "")
}
})()
tcs.push({ id, type: "function", function: { name, arguments: input } })
}
}
const out: any = { role: "assistant", content: texts.join("") }
if (tcs.length > 0) out.tool_calls = tcs
msgs.push(out)
continue
}
}
const tools = Array.isArray(body.tools)
? body.tools
.filter((t: any) => t && typeof t === "object" && "input_schema" in t)
.map((t: any) => ({
type: "function",
function: {
name: (t as any).name,
description: (t as any).description,
parameters: (t as any).input_schema,
},
}))
: undefined
const tcin = body.tool_choice
const tc = (() => {
if (!tcin) return undefined
if ((tcin as any).type === "auto") return "auto"
if ((tcin as any).type === "any") return "required"
if ((tcin as any).type === "tool" && typeof (tcin as any).name === "string")
return { type: "function" as const, function: { name: (tcin as any).name } }
return undefined
})()
const stop = (() => {
const v = body.stop_sequences
if (!v) return undefined
if (Array.isArray(v)) return v.length === 1 ? v[0] : v
if (typeof v === "string") return v
return undefined
})()
return {
model: body.model,
max_tokens: body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop,
messages: msgs,
stream: !!body.stream,
tools,
tool_choice: tc,
}
}
export function toAnthropicRequest(body: CommonRequest) {
if (!body || typeof body !== "object") return body
const sysIn = Array.isArray(body.messages) ? body.messages.filter((m: any) => m && m.role === "system") : []
let ccCount = 0
const cc = () => {
ccCount++
return ccCount <= 4 ? { cache_control: { type: "ephemeral" } } : {}
}
const system = sysIn
.filter((m: any) => typeof m.content === "string" && m.content.length > 0)
.map((m: any) => ({ type: "text", text: m.content, ...cc() }))
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const msgsOut: any[] = []
const toSrc = (p: any) => {
if (!p || typeof p !== "object") return undefined
if ((p as any).type === "image_url" && (p as any).image_url) {
const u = (p as any).image_url.url ?? (p as any).image_url
if (typeof u === "string" && u.startsWith("data:")) {
const m = u.match(/^data:([^;]+);base64,(.*)$/)
if (m) return { type: "base64", media_type: m[1], data: m[2] }
}
if (typeof u === "string") return { type: "url", url: u }
}
return undefined
}
for (const m of msgsIn) {
if (!m || !(m as any).role) continue
if ((m as any).role === "user") {
if (typeof (m as any).content === "string") {
msgsOut.push({
role: "user",
content: [{ type: "text", text: (m as any).content, ...cc() }],
})
} else if (Array.isArray((m as any).content)) {
const parts: any[] = []
for (const p of (m as any).content) {
if (!p || !(p as any).type) continue
if ((p as any).type === "text" && typeof (p as any).text === "string")
parts.push({ type: "text", text: (p as any).text, ...cc() })
if ((p as any).type === "image_url") {
const s = toSrc(p)
if (s) parts.push({ type: "image", source: s, ...cc() })
}
}
if (parts.length > 0) msgsOut.push({ role: "user", content: parts })
}
continue
}
if ((m as any).role === "assistant") {
const out: any = { role: "assistant", content: [] as any[] }
if (typeof (m as any).content === "string" && (m as any).content.length > 0) {
;(out.content as any[]).push({ type: "text", text: (m as any).content, ...cc() })
}
if (Array.isArray((m as any).tool_calls)) {
for (const tc of (m as any).tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
let input: any
const a = (tc as any).function.arguments
if (typeof a === "string") {
try {
input = JSON.parse(a)
} catch {
input = a
}
} else input = a
const id = (tc as any).id || `toolu_${Math.random().toString(36).slice(2)}`
;(out.content as any[]).push({
type: "tool_use",
id,
name: (tc as any).function.name,
input,
...cc(),
})
}
}
}
if ((out.content as any[]).length > 0) msgsOut.push(out)
continue
}
if ((m as any).role === "tool") {
msgsOut.push({
role: "user",
content: [
{
type: "tool_result",
tool_use_id: (m as any).tool_call_id,
content: (m as any).content,
...cc(),
},
],
})
continue
}
}
const tools = Array.isArray(body.tools)
? body.tools
.filter((t: any) => t && typeof t === "object" && (t as any).type === "function")
.map((t: any) => ({
name: (t as any).function.name,
description: (t as any).function.description,
input_schema: (t as any).function.parameters,
...cc(),
}))
: undefined
const tcIn = body.tool_choice
const tool_choice = (() => {
if (!tcIn) return undefined
if (tcIn === "auto") return { type: "auto" }
if (tcIn === "required") return { type: "any" }
if ((tcIn as any).type === "function" && (tcIn as any).function?.name)
return { type: "tool", name: (tcIn as any).function.name }
return undefined
})()
const stop_sequences = (() => {
const v = body.stop
if (!v) return undefined
if (Array.isArray(v)) return v
if (typeof v === "string") return [v]
return undefined
})()
return {
max_tokens: body.max_tokens ?? 32_000,
temperature: body.temperature,
top_p: body.top_p,
system: system.length > 0 ? system : undefined,
messages: msgsOut,
stream: !!body.stream,
tools,
tool_choice,
stop_sequences,
}
}
export function fromAnthropicResponse(resp: any): CommonResponse {
if (!resp || typeof resp !== "object") return resp
if (Array.isArray((resp as any).choices)) return resp
const isAnthropic = typeof (resp as any).type === "string" && (resp as any).type === "message"
if (!isAnthropic) return resp
const idIn = (resp as any).id
const id =
typeof idIn === "string" ? idIn.replace(/^msg_/, "chatcmpl_") : `chatcmpl_${Math.random().toString(36).slice(2)}`
const model = (resp as any).model
const blocks: any[] = Array.isArray((resp as any).content) ? (resp as any).content : []
const text = blocks
.filter((b) => b && b.type === "text" && typeof (b as any).text === "string")
.map((b: any) => b.text)
.join("")
const tcs = blocks
.filter((b) => b && b.type === "tool_use")
.map((b: any) => {
const name = (b as any).name
const args = (() => {
const inp = (b as any).input
if (typeof inp === "string") return inp
try {
return JSON.stringify(inp ?? {})
} catch {
return String(inp ?? "")
}
})()
const tid =
typeof (b as any).id === "string" && (b as any).id.length > 0
? (b as any).id
: `toolu_${Math.random().toString(36).slice(2)}`
return { id: tid, type: "function" as const, function: { name, arguments: args } }
})
const finish = (r: string | null) => {
if (r === "end_turn") return "stop"
if (r === "tool_use") return "tool_calls"
if (r === "max_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
}
const u = (resp as any).usage
const usage = (() => {
if (!u) return undefined as any
const pt = typeof (u as any).input_tokens === "number" ? (u as any).input_tokens : undefined
const ct = typeof (u as any).output_tokens === "number" ? (u as any).output_tokens : undefined
const total = pt != null && ct != null ? pt + ct : undefined
const cached =
typeof (u as any).cache_read_input_tokens === "number" ? (u as any).cache_read_input_tokens : undefined
const details = cached != null ? { cached_tokens: cached } : undefined
return {
prompt_tokens: pt,
completion_tokens: ct,
total_tokens: total,
...(details ? { prompt_tokens_details: details } : {}),
}
})()
return {
id,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
message: {
role: "assistant",
...(text && text.length > 0 ? { content: text } : {}),
...(tcs.length > 0 ? { tool_calls: tcs } : {}),
},
finish_reason: finish((resp as any).stop_reason ?? null),
},
],
...(usage ? { usage } : {}),
}
}
export function toAnthropicResponse(resp: CommonResponse) {
if (!resp || typeof resp !== "object") return resp
if (!Array.isArray((resp as any).choices)) return resp
const choice = (resp as any).choices[0]
if (!choice) return resp
const message = choice.message
if (!message) return resp
const content: any[] = []
if (typeof message.content === "string" && message.content.length > 0)
content.push({ type: "text", text: message.content })
if (Array.isArray(message.tool_calls)) {
for (const tc of message.tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
let input: any
try {
input = JSON.parse((tc as any).function.arguments)
} catch {
input = (tc as any).function.arguments
}
content.push({
type: "tool_use",
id: (tc as any).id,
name: (tc as any).function.name,
input,
})
}
}
}
const stop_reason = (() => {
const r = choice.finish_reason
if (r === "stop") return "end_turn"
if (r === "tool_calls") return "tool_use"
if (r === "length") return "max_tokens"
if (r === "content_filter") return "content_filter"
return null
})()
const usage = (() => {
const u = (resp as any).usage
if (!u) return undefined
return {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
cache_read_input_tokens: u.prompt_tokens_details?.cached_tokens,
}
})()
return {
id: (resp as any).id,
type: "message",
role: "assistant",
content: content.length > 0 ? content : [{ type: "text", text: "" }],
model: (resp as any).model,
stop_reason,
usage,
}
}
export function fromAnthropicChunk(chunk: string): CommonChunk | string {
// Anthropic sends two lines per part: "event: <type>\n" + "data: <json>"
const lines = chunk.split("\n")
const dataLine = lines.find((l) => l.startsWith("data: "))
if (!dataLine) return chunk
let json
try {
json = JSON.parse(dataLine.slice(6))
} catch {
return chunk
}
const out: CommonChunk = {
id: json.id ?? json.message?.id ?? "",
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: json.model ?? json.message?.model ?? "",
choices: [],
}
if (json.type === "content_block_start") {
const cb = json.content_block
if (cb?.type === "text") {
out.choices.push({
index: json.index ?? 0,
delta: { role: "assistant", content: "" },
finish_reason: null,
})
} else if (cb?.type === "tool_use") {
out.choices.push({
index: json.index ?? 0,
delta: {
tool_calls: [
{
index: json.index ?? 0,
id: cb.id,
type: "function",
function: { name: cb.name, arguments: "" },
},
],
},
finish_reason: null,
})
}
}
if (json.type === "content_block_delta") {
const d = json.delta
if (d?.type === "text_delta") {
out.choices.push({ index: json.index ?? 0, delta: { content: d.text }, finish_reason: null })
} else if (d?.type === "input_json_delta") {
out.choices.push({
index: json.index ?? 0,
delta: {
tool_calls: [{ index: json.index ?? 0, function: { arguments: d.partial_json } }],
},
finish_reason: null,
})
}
}
if (json.type === "message_delta") {
const d = json.delta
const finish_reason = (() => {
const r = d?.stop_reason
if (r === "end_turn") return "stop"
if (r === "tool_use") return "tool_calls"
if (r === "max_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
})()
out.choices.push({ index: 0, delta: {}, finish_reason })
}
if (json.usage) {
const u = json.usage
out.usage = {
prompt_tokens: u.input_tokens,
completion_tokens: u.output_tokens,
total_tokens: (u.input_tokens || 0) + (u.output_tokens || 0),
...(u.cache_read_input_tokens ? { prompt_tokens_details: { cached_tokens: u.cache_read_input_tokens } } : {}),
}
}
return out
}
export function toAnthropicChunk(chunk: CommonChunk): string {
if (!chunk.choices || !Array.isArray(chunk.choices) || chunk.choices.length === 0) {
return JSON.stringify({})
}
const choice = chunk.choices[0]
const delta = choice.delta
if (!delta) return JSON.stringify({})
const result: any = {}
if (delta.content) {
result.type = "content_block_delta"
result.index = 0
result.delta = { type: "text_delta", text: delta.content }
}
if (delta.tool_calls) {
for (const tc of delta.tool_calls) {
if (tc.function?.name) {
result.type = "content_block_start"
result.index = tc.index ?? 0
result.content_block = { type: "tool_use", id: tc.id, name: tc.function.name, input: {} }
} else if (tc.function?.arguments) {
result.type = "content_block_delta"
result.index = tc.index ?? 0
result.delta = { type: "input_json_delta", partial_json: tc.function.arguments }
}
}
}
if (choice.finish_reason) {
const stop_reason = (() => {
const r = choice.finish_reason
if (r === "stop") return "end_turn"
if (r === "tool_calls") return "tool_use"
if (r === "length") return "max_tokens"
if (r === "content_filter") return "content_filter"
return null
})()
result.type = "message_delta"
result.delta = { stop_reason, stop_sequence: null }
}
if (chunk.usage) {
const u = chunk.usage
result.usage = {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
cache_read_input_tokens: u.prompt_tokens_details?.cached_tokens,
}
}
return JSON.stringify(result)
}

View File

@@ -0,0 +1,75 @@
import { ProviderHelper } from "./provider"
/*
{
promptTokenCount: 11453,
candidatesTokenCount: 71,
totalTokenCount: 11625,
cachedContentTokenCount: 8100,
promptTokensDetails: [
{modality: "TEXT",tokenCount: 11453}
],
cacheTokensDetails: [
{modality: "TEXT",tokenCount: 8100}
],
thoughtsTokenCount: 101
}
*/
type Usage = {
promptTokenCount?: number
candidatesTokenCount?: number
totalTokenCount?: number
cachedContentTokenCount?: number
promptTokensDetails?: { modality: string; tokenCount: number }[]
cacheTokensDetails?: { modality: string; tokenCount: number }[]
thoughtsTokenCount?: number
}
export const googleHelper: ProviderHelper = ({ providerModel }) => ({
format: "google",
modifyUrl: (providerApi: string, isStream?: boolean) =>
`${providerApi}/models/${providerModel}:${isStream ? "streamGenerateContent?alt=sse" : "generateContent"}`,
modifyHeaders: (headers: Headers, body: Record<string, any>, apiKey: string) => {
headers.set("x-goog-api-key", apiKey)
},
modifyBody: (body: Record<string, any>) => {
return body
},
createBinaryStreamDecoder: () => undefined,
streamSeparator: "\r\n\r\n",
createUsageParser: () => {
let usage: Usage
return {
parse: (chunk: string) => {
if (!chunk.startsWith("data: ")) return
let json
try {
json = JSON.parse(chunk.slice(6)) as { usageMetadata?: Usage }
} catch (e) {
return
}
if (!json.usageMetadata) return
usage = json.usageMetadata
},
retrieve: () => usage,
}
},
normalizeUsage: (usage: Usage) => {
const inputTokens = usage.promptTokenCount ?? 0
const outputTokens = usage.candidatesTokenCount ?? 0
const reasoningTokens = usage.thoughtsTokenCount ?? 0
const cacheReadTokens = usage.cachedContentTokenCount ?? 0
return {
inputTokens: inputTokens - cacheReadTokens,
outputTokens,
reasoningTokens,
cacheReadTokens,
cacheWrite5mTokens: undefined,
cacheWrite1hTokens: undefined,
}
},
})

View File

@@ -0,0 +1,547 @@
import { ProviderHelper, CommonRequest, CommonResponse, CommonChunk } from "./provider"
type Usage = {
prompt_tokens?: number
completion_tokens?: number
total_tokens?: number
// used by moonshot
cached_tokens?: number
// used by xai
prompt_tokens_details?: {
text_tokens?: number
audio_tokens?: number
image_tokens?: number
cached_tokens?: number
}
completion_tokens_details?: {
reasoning_tokens?: number
audio_tokens?: number
accepted_prediction_tokens?: number
rejected_prediction_tokens?: number
}
}
export const oaCompatHelper: ProviderHelper = () => ({
format: "oa-compat",
modifyUrl: (providerApi: string) => providerApi + "/chat/completions",
modifyHeaders: (headers: Headers, body: Record<string, any>, apiKey: string) => {
headers.set("authorization", `Bearer ${apiKey}`)
headers.set("x-session-affinity", headers.get("x-opencode-session") ?? "")
},
modifyBody: (body: Record<string, any>) => {
return {
...body,
...(body.stream ? { stream_options: { include_usage: true } } : {}),
}
},
createBinaryStreamDecoder: () => undefined,
streamSeparator: "\n\n",
createUsageParser: () => {
let usage: Usage
return {
parse: (chunk: string) => {
if (!chunk.startsWith("data: ")) return
let json
try {
json = JSON.parse(chunk.slice(6)) as { usage?: Usage }
} catch (e) {
return
}
if (!json.usage) return
usage = json.usage
},
retrieve: () => usage,
}
},
normalizeUsage: (usage: Usage) => {
const inputTokens = usage.prompt_tokens ?? 0
const outputTokens = usage.completion_tokens ?? 0
const reasoningTokens = usage.completion_tokens_details?.reasoning_tokens ?? undefined
const cacheReadTokens = usage.cached_tokens ?? usage.prompt_tokens_details?.cached_tokens ?? undefined
return {
inputTokens: inputTokens - (cacheReadTokens ?? 0),
outputTokens,
reasoningTokens,
cacheReadTokens,
cacheWrite5mTokens: undefined,
cacheWrite1hTokens: undefined,
}
},
})
export function fromOaCompatibleRequest(body: any): CommonRequest {
if (!body || typeof body !== "object") return body
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const msgsOut: any[] = []
for (const m of msgsIn) {
if (!m || !m.role) continue
if (m.role === "system") {
if (typeof m.content === "string" && m.content.length > 0) msgsOut.push({ role: "system", content: m.content })
continue
}
if (m.role === "user") {
if (typeof m.content === "string") {
msgsOut.push({ role: "user", content: m.content })
} else if (Array.isArray(m.content)) {
const parts: any[] = []
for (const p of m.content) {
if (!p || !p.type) continue
if (p.type === "text" && typeof p.text === "string") parts.push({ type: "text", text: p.text })
if (p.type === "image_url") parts.push({ type: "image_url", image_url: p.image_url })
}
if (parts.length === 1 && parts[0].type === "text") msgsOut.push({ role: "user", content: parts[0].text })
else if (parts.length > 0) msgsOut.push({ role: "user", content: parts })
}
continue
}
if (m.role === "assistant") {
const out: any = { role: "assistant" }
if (typeof m.content === "string") out.content = m.content
if (Array.isArray(m.tool_calls)) out.tool_calls = m.tool_calls
msgsOut.push(out)
continue
}
if (m.role === "tool") {
msgsOut.push({ role: "tool", tool_call_id: m.tool_call_id, content: m.content })
continue
}
}
return {
model: body.model,
max_tokens: body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop: body.stop,
messages: msgsOut,
stream: !!body.stream,
tools: Array.isArray(body.tools) ? body.tools : undefined,
tool_choice: body.tool_choice,
}
}
export function toOaCompatibleRequest(body: CommonRequest) {
if (!body || typeof body !== "object") return body
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const msgsOut: any[] = []
const toImg = (p: any) => {
if (!p || typeof p !== "object") return undefined
if (p.type === "image_url" && p.image_url) return { type: "image_url", image_url: p.image_url }
const s = (p as any).source
if (!s || typeof s !== "object") return undefined
if (s.type === "url" && typeof s.url === "string") return { type: "image_url", image_url: { url: s.url } }
if (s.type === "base64" && typeof s.media_type === "string" && typeof s.data === "string")
return { type: "image_url", image_url: { url: `data:${s.media_type};base64,${s.data}` } }
return undefined
}
for (const m of msgsIn) {
if (!m || !m.role) continue
if (m.role === "system") {
if (typeof m.content === "string" && m.content.length > 0) msgsOut.push({ role: "system", content: m.content })
continue
}
if (m.role === "user") {
if (typeof m.content === "string") {
msgsOut.push({ role: "user", content: m.content })
continue
}
if (Array.isArray(m.content)) {
const parts: any[] = []
for (const p of m.content) {
if (!p || !p.type) continue
if (p.type === "text" && typeof p.text === "string") parts.push({ type: "text", text: p.text })
const ip = toImg(p)
if (ip) parts.push(ip)
}
if (parts.length === 1 && parts[0].type === "text") msgsOut.push({ role: "user", content: parts[0].text })
else if (parts.length > 0) msgsOut.push({ role: "user", content: parts })
}
continue
}
if (m.role === "assistant") {
const out: any = { role: "assistant" }
if (typeof m.content === "string") out.content = m.content
if (Array.isArray(m.tool_calls)) out.tool_calls = m.tool_calls
msgsOut.push(out)
continue
}
if (m.role === "tool") {
msgsOut.push({ role: "tool", tool_call_id: m.tool_call_id, content: m.content })
continue
}
}
const tools = Array.isArray(body.tools)
? body.tools.map((tool: any) => ({
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: tool.parameters,
},
}))
: undefined
return {
model: body.model,
max_tokens: body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop: body.stop,
messages: msgsOut,
stream: !!body.stream,
tools,
tool_choice: body.tool_choice,
response_format: (body as any).response_format,
}
}
export function fromOaCompatibleResponse(resp: any): CommonResponse {
if (!resp || typeof resp !== "object") return resp
if (!Array.isArray((resp as any).choices)) return resp
const choice = (resp as any).choices[0]
if (!choice) return resp
const message = choice.message
if (!message) return resp
const content: any[] = []
if (typeof message.content === "string" && message.content.length > 0) {
content.push({ type: "text", text: message.content })
}
if (Array.isArray(message.tool_calls)) {
for (const toolCall of message.tool_calls) {
if (toolCall.type === "function" && toolCall.function) {
let input
try {
input = JSON.parse(toolCall.function.arguments)
} catch {
input = toolCall.function.arguments
}
content.push({
type: "tool_use",
id: toolCall.id,
name: toolCall.function.name,
input,
})
}
}
}
const stopReason = (() => {
const reason = choice.finish_reason
if (reason === "stop") return "stop"
if (reason === "tool_calls") return "tool_calls"
if (reason === "length") return "length"
if (reason === "content_filter") return "content_filter"
return null
})()
const usage = (() => {
const u = (resp as any).usage
if (!u) return undefined
return {
prompt_tokens: u.prompt_tokens,
completion_tokens: u.completion_tokens,
total_tokens: u.total_tokens,
...(u.prompt_tokens_details?.cached_tokens
? { prompt_tokens_details: { cached_tokens: u.prompt_tokens_details.cached_tokens } }
: {}),
}
})()
return {
id: (resp as any).id,
object: "chat.completion" as const,
created: Math.floor(Date.now() / 1000),
model: (resp as any).model,
choices: [
{
index: 0,
message: {
role: "assistant" as const,
...(content.length > 0 && content.some((c) => c.type === "text")
? {
content: content
.filter((c) => c.type === "text")
.map((c: any) => c.text)
.join(""),
}
: {}),
...(content.length > 0 && content.some((c) => c.type === "tool_use")
? {
tool_calls: content
.filter((c) => c.type === "tool_use")
.map((c: any) => ({
id: c.id,
type: "function" as const,
function: {
name: c.name,
arguments: typeof c.input === "string" ? c.input : JSON.stringify(c.input),
},
})),
}
: {}),
},
finish_reason: stopReason,
},
],
...(usage ? { usage } : {}),
}
}
export function toOaCompatibleResponse(resp: CommonResponse) {
if (!resp || typeof resp !== "object") return resp
if (Array.isArray((resp as any).choices)) return resp
const isAnthropic = typeof (resp as any).type === "string" && (resp as any).type === "message"
if (!isAnthropic) return resp
const idIn = (resp as any).id
const id =
typeof idIn === "string" ? idIn.replace(/^msg_/, "chatcmpl_") : `chatcmpl_${Math.random().toString(36).slice(2)}`
const model = (resp as any).model
const blocks: any[] = Array.isArray((resp as any).content) ? (resp as any).content : []
const text = blocks
.filter((b) => b && b.type === "text" && typeof b.text === "string")
.map((b) => b.text)
.join("")
const tcs = blocks
.filter((b) => b && b.type === "tool_use")
.map((b) => {
const name = (b as any).name
const args = (() => {
const inp = (b as any).input
if (typeof inp === "string") return inp
try {
return JSON.stringify(inp ?? {})
} catch {
return String(inp ?? "")
}
})()
const tid =
typeof (b as any).id === "string" && (b as any).id.length > 0
? (b as any).id
: `toolu_${Math.random().toString(36).slice(2)}`
return { id: tid, type: "function" as const, function: { name, arguments: args } }
})
const finish = (r: string | null) => {
if (r === "end_turn") return "stop"
if (r === "tool_use") return "tool_calls"
if (r === "max_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
}
const u = (resp as any).usage
const usage = (() => {
if (!u) return undefined as any
const pt = typeof u.input_tokens === "number" ? u.input_tokens : undefined
const ct = typeof u.output_tokens === "number" ? u.output_tokens : undefined
const total = pt != null && ct != null ? pt + ct : undefined
const cached = typeof u.cache_read_input_tokens === "number" ? u.cache_read_input_tokens : undefined
const details = cached != null ? { cached_tokens: cached } : undefined
return {
prompt_tokens: pt,
completion_tokens: ct,
total_tokens: total,
...(details ? { prompt_tokens_details: details } : {}),
}
})()
return {
id,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
message: {
role: "assistant",
...(text && text.length > 0 ? { content: text } : {}),
...(tcs.length > 0 ? { tool_calls: tcs } : {}),
},
finish_reason: finish((resp as any).stop_reason ?? null),
},
],
...(usage ? { usage } : {}),
}
}
export function fromOaCompatibleChunk(chunk: string): CommonChunk | string {
if (!chunk.startsWith("data: ")) return chunk
let json
try {
json = JSON.parse(chunk.slice(6))
} catch {
return chunk
}
if (!json.choices || !Array.isArray(json.choices) || json.choices.length === 0) {
return chunk
}
const choice = json.choices[0]
const delta = choice.delta
if (!delta) return chunk
const result: CommonChunk = {
id: json.id ?? "",
object: "chat.completion.chunk",
created: json.created ?? Math.floor(Date.now() / 1000),
model: json.model ?? "",
choices: [],
}
if (delta.content) {
result.choices.push({
index: choice.index ?? 0,
delta: { content: delta.content },
finish_reason: null,
})
}
if (delta.tool_calls) {
for (const toolCall of delta.tool_calls) {
result.choices.push({
index: choice.index ?? 0,
delta: {
tool_calls: [
{
index: toolCall.index ?? 0,
id: toolCall.id,
type: toolCall.type ?? "function",
function: toolCall.function,
},
],
},
finish_reason: null,
})
}
}
if (choice.finish_reason) {
result.choices.push({
index: choice.index ?? 0,
delta: {},
finish_reason: choice.finish_reason,
})
}
if (json.usage) {
const usage = json.usage
result.usage = {
prompt_tokens: usage.prompt_tokens,
completion_tokens: usage.completion_tokens,
total_tokens: usage.total_tokens,
...(usage.prompt_tokens_details?.cached_tokens
? { prompt_tokens_details: { cached_tokens: usage.prompt_tokens_details.cached_tokens } }
: {}),
}
}
return result
}
export function toOaCompatibleChunk(chunk: CommonChunk): string {
const result: any = {
id: chunk.id,
object: "chat.completion.chunk",
created: chunk.created,
model: chunk.model,
choices: [],
}
if (!chunk.choices || chunk.choices.length === 0) {
return `data: ${JSON.stringify(result)}`
}
const choice = chunk.choices[0]
const delta = choice.delta
if (delta?.role) {
result.choices.push({
index: choice.index,
delta: { role: delta.role },
finish_reason: null,
})
}
if (delta?.content) {
result.choices.push({
index: choice.index,
delta: { content: delta.content },
finish_reason: null,
})
}
if (delta?.tool_calls) {
for (const tc of delta.tool_calls) {
result.choices.push({
index: choice.index,
delta: {
tool_calls: [
{
index: tc.index,
id: tc.id,
type: tc.type,
function: tc.function,
},
],
},
finish_reason: null,
})
}
}
if (choice.finish_reason) {
result.choices.push({
index: choice.index,
delta: {},
finish_reason: choice.finish_reason,
})
}
if (chunk.usage) {
result.usage = {
prompt_tokens: chunk.usage.prompt_tokens,
completion_tokens: chunk.usage.completion_tokens,
total_tokens: chunk.usage.total_tokens,
...(chunk.usage.prompt_tokens_details?.cached_tokens
? {
prompt_tokens_details: {
cached_tokens: chunk.usage.prompt_tokens_details.cached_tokens,
},
}
: {}),
}
}
return `data: ${JSON.stringify(result)}`
}

View File

@@ -0,0 +1,630 @@
import { ProviderHelper, CommonRequest, CommonResponse, CommonChunk } from "./provider"
type Usage = {
input_tokens?: number
input_tokens_details?: {
cached_tokens?: number
}
output_tokens?: number
output_tokens_details?: {
reasoning_tokens?: number
}
total_tokens?: number
}
export const openaiHelper: ProviderHelper = () => ({
format: "openai",
modifyUrl: (providerApi: string) => providerApi + "/responses",
modifyHeaders: (headers: Headers, body: Record<string, any>, apiKey: string) => {
headers.set("authorization", `Bearer ${apiKey}`)
},
modifyBody: (body: Record<string, any>) => {
return body
},
createBinaryStreamDecoder: () => undefined,
streamSeparator: "\n\n",
createUsageParser: () => {
let usage: Usage
return {
parse: (chunk: string) => {
const [event, data] = chunk.split("\n")
if (event !== "event: response.completed") return
if (!data.startsWith("data: ")) return
let json
try {
json = JSON.parse(data.slice(6)) as { response?: { usage?: Usage } }
} catch (e) {
return
}
if (!json.response?.usage) return
usage = json.response.usage
},
retrieve: () => usage,
}
},
normalizeUsage: (usage: Usage) => {
const inputTokens = usage.input_tokens ?? 0
const outputTokens = usage.output_tokens ?? 0
const reasoningTokens = usage.output_tokens_details?.reasoning_tokens ?? undefined
const cacheReadTokens = usage.input_tokens_details?.cached_tokens ?? undefined
return {
inputTokens: inputTokens - (cacheReadTokens ?? 0),
outputTokens: outputTokens - (reasoningTokens ?? 0),
reasoningTokens,
cacheReadTokens,
cacheWrite5mTokens: undefined,
cacheWrite1hTokens: undefined,
}
},
})
export function fromOpenaiRequest(body: any): CommonRequest {
if (!body || typeof body !== "object") return body
const toImg = (p: any) => {
if (!p || typeof p !== "object") return undefined
if ((p as any).type === "image_url" && (p as any).image_url)
return { type: "image_url", image_url: (p as any).image_url }
if ((p as any).type === "input_image" && (p as any).image_url)
return { type: "image_url", image_url: (p as any).image_url }
const s = (p as any).source
if (!s || typeof s !== "object") return undefined
if ((s as any).type === "url" && typeof (s as any).url === "string")
return { type: "image_url", image_url: { url: (s as any).url } }
if (
(s as any).type === "base64" &&
typeof (s as any).media_type === "string" &&
typeof (s as any).data === "string"
)
return {
type: "image_url",
image_url: { url: `data:${(s as any).media_type};base64,${(s as any).data}` },
}
return undefined
}
const msgs: any[] = []
const inMsgs = Array.isArray(body.input) ? body.input : Array.isArray(body.messages) ? body.messages : []
for (const m of inMsgs) {
if (!m) continue
// Responses API items without role:
if (!(m as any).role && (m as any).type) {
if ((m as any).type === "function_call") {
const name = (m as any).name
const a = (m as any).arguments
const args = typeof a === "string" ? a : JSON.stringify(a ?? {})
msgs.push({
role: "assistant",
tool_calls: [{ id: (m as any).id, type: "function", function: { name, arguments: args } }],
})
}
if ((m as any).type === "function_call_output") {
const id = (m as any).call_id
const out = (m as any).output
const content = typeof out === "string" ? out : JSON.stringify(out)
msgs.push({ role: "tool", tool_call_id: id, content })
}
continue
}
if ((m as any).role === "system" || (m as any).role === "developer") {
const c = (m as any).content
if (typeof c === "string" && c.length > 0) msgs.push({ role: "system", content: c })
if (Array.isArray(c)) {
const t = c.find((p: any) => p && typeof p.text === "string")
if (t && typeof t.text === "string" && t.text.length > 0) msgs.push({ role: "system", content: t.text })
}
continue
}
if ((m as any).role === "user") {
const c = (m as any).content
if (typeof c === "string") {
msgs.push({ role: "user", content: c })
} else if (Array.isArray(c)) {
const parts: any[] = []
for (const p of c) {
if (!p || !(p as any).type) continue
if (((p as any).type === "text" || (p as any).type === "input_text") && typeof (p as any).text === "string")
parts.push({ type: "text", text: (p as any).text })
const ip = toImg(p)
if (ip) parts.push(ip)
if ((p as any).type === "tool_result") {
const id = (p as any).tool_call_id
const content =
typeof (p as any).content === "string" ? (p as any).content : JSON.stringify((p as any).content)
msgs.push({ role: "tool", tool_call_id: id, content })
}
}
if (parts.length === 1 && parts[0].type === "text") msgs.push({ role: "user", content: parts[0].text })
else if (parts.length > 0) msgs.push({ role: "user", content: parts })
}
continue
}
if ((m as any).role === "assistant") {
const c = (m as any).content
const out: any = { role: "assistant" }
if (typeof c === "string" && c.length > 0) out.content = c
if (Array.isArray((m as any).tool_calls)) out.tool_calls = (m as any).tool_calls
msgs.push(out)
continue
}
if ((m as any).role === "tool") {
msgs.push({
role: "tool",
tool_call_id: (m as any).tool_call_id,
content: (m as any).content,
})
continue
}
}
const tcIn = body.tool_choice
const tc = (() => {
if (!tcIn) return undefined
if (tcIn === "auto") return "auto"
if (tcIn === "required") return "required"
if ((tcIn as any).type === "function" && (tcIn as any).function?.name)
return { type: "function" as const, function: { name: (tcIn as any).function.name } }
return undefined
})()
const stop = (() => {
const v = body.stop_sequences ?? body.stop
if (!v) return undefined
if (Array.isArray(v)) return v.length === 1 ? v[0] : v
if (typeof v === "string") return v
return undefined
})()
return {
model: body.model,
max_tokens: body.max_output_tokens ?? body.max_tokens,
temperature: body.temperature,
top_p: body.top_p,
stop,
messages: msgs,
stream: !!body.stream,
tools: Array.isArray(body.tools) ? body.tools : undefined,
tool_choice: tc,
}
}
export function toOpenaiRequest(body: CommonRequest) {
if (!body || typeof body !== "object") return body
const msgsIn = Array.isArray(body.messages) ? body.messages : []
const input: any[] = []
const toPart = (p: any) => {
if (!p || typeof p !== "object") return undefined
if ((p as any).type === "text" && typeof (p as any).text === "string")
return { type: "input_text", text: (p as any).text }
if ((p as any).type === "image_url" && (p as any).image_url)
return { type: "input_image", image_url: (p as any).image_url }
const s = (p as any).source
if (!s || typeof s !== "object") return undefined
if ((s as any).type === "url" && typeof (s as any).url === "string")
return { type: "input_image", image_url: { url: (s as any).url } }
if (
(s as any).type === "base64" &&
typeof (s as any).media_type === "string" &&
typeof (s as any).data === "string"
)
return {
type: "input_image",
image_url: { url: `data:${(s as any).media_type};base64,${(s as any).data}` },
}
return undefined
}
for (const m of msgsIn) {
if (!m || !(m as any).role) continue
if ((m as any).role === "system") {
const c = (m as any).content
if (typeof c === "string") input.push({ role: "system", content: c })
continue
}
if ((m as any).role === "user") {
const c = (m as any).content
if (typeof c === "string") {
input.push({ role: "user", content: [{ type: "input_text", text: c }] })
} else if (Array.isArray(c)) {
const parts: any[] = []
for (const p of c) {
const op = toPart(p)
if (op) parts.push(op)
}
if (parts.length > 0) input.push({ role: "user", content: parts })
}
continue
}
if ((m as any).role === "assistant") {
const c = (m as any).content
if (typeof c === "string" && c.length > 0) {
input.push({ role: "assistant", content: [{ type: "output_text", text: c }] })
}
if (Array.isArray((m as any).tool_calls)) {
for (const tc of (m as any).tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
const name = (tc as any).function.name
const a = (tc as any).function.arguments
const args = typeof a === "string" ? a : JSON.stringify(a)
input.push({ type: "function_call", call_id: (tc as any).id, name, arguments: args })
}
}
}
continue
}
if ((m as any).role === "tool") {
const out = typeof (m as any).content === "string" ? (m as any).content : JSON.stringify((m as any).content)
input.push({ type: "function_call_output", call_id: (m as any).tool_call_id, output: out })
continue
}
}
const stop_sequences = (() => {
const v = body.stop
if (!v) return undefined
if (Array.isArray(v)) return v
if (typeof v === "string") return [v]
return undefined
})()
const tcIn = body.tool_choice
const tool_choice = (() => {
if (!tcIn) return undefined
if (tcIn === "auto") return "auto"
if (tcIn === "required") return "required"
if ((tcIn as any).type === "function" && (tcIn as any).function?.name)
return { type: "function", function: { name: (tcIn as any).function.name } }
return undefined
})()
const tools = (() => {
if (!Array.isArray(body.tools)) return undefined
return body.tools.map((tool: any) => {
if (tool.type === "function") {
return {
type: "function",
name: tool.function?.name,
description: tool.function?.description,
parameters: tool.function?.parameters,
strict: tool.function?.strict,
}
}
return tool
})
})()
return {
model: body.model,
input,
max_output_tokens: body.max_tokens,
top_p: body.top_p,
stop_sequences,
stream: !!body.stream,
tools,
tool_choice,
include: Array.isArray((body as any).include) ? (body as any).include : undefined,
truncation: (body as any).truncation,
metadata: (body as any).metadata,
store: (body as any).store,
user: (body as any).user,
text: { verbosity: body.model === "gpt-5-codex" ? "medium" : "low" },
reasoning: { effort: "medium" },
}
}
export function fromOpenaiResponse(resp: any): CommonResponse {
if (!resp || typeof resp !== "object") return resp
if (Array.isArray((resp as any).choices)) return resp
const r = (resp as any).response ?? resp
if (!r || typeof r !== "object") return resp
const idIn = (r as any).id
const id =
typeof idIn === "string" ? idIn.replace(/^resp_/, "chatcmpl_") : `chatcmpl_${Math.random().toString(36).slice(2)}`
const model = (r as any).model ?? (resp as any).model
const out = Array.isArray((r as any).output) ? (r as any).output : []
const text = out
.filter((o: any) => o && o.type === "message" && Array.isArray((o as any).content))
.flatMap((o: any) => (o as any).content)
.filter((p: any) => p && p.type === "output_text" && typeof p.text === "string")
.map((p: any) => p.text)
.join("")
const tcs = out
.filter((o: any) => o && o.type === "function_call")
.map((o: any) => {
const name = (o as any).name
const a = (o as any).arguments
const args = typeof a === "string" ? a : JSON.stringify(a ?? {})
const tid =
typeof (o as any).id === "string" && (o as any).id.length > 0
? (o as any).id
: `toolu_${Math.random().toString(36).slice(2)}`
return { id: tid, type: "function" as const, function: { name, arguments: args } }
})
const finish = (r: string | null) => {
if (r === "stop") return "stop"
if (r === "tool_call" || r === "tool_calls") return "tool_calls"
if (r === "length" || r === "max_output_tokens") return "length"
if (r === "content_filter") return "content_filter"
return null
}
const u = (r as any).usage ?? (resp as any).usage
const usage = (() => {
if (!u) return undefined as any
const pt = typeof (u as any).input_tokens === "number" ? (u as any).input_tokens : undefined
const ct = typeof (u as any).output_tokens === "number" ? (u as any).output_tokens : undefined
const total = pt != null && ct != null ? pt + ct : undefined
const cached = (u as any).input_tokens_details?.cached_tokens
const details = typeof cached === "number" ? { cached_tokens: cached } : undefined
return {
prompt_tokens: pt,
completion_tokens: ct,
total_tokens: total,
...(details ? { prompt_tokens_details: details } : {}),
}
})()
return {
id,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
message: {
role: "assistant",
...(text && text.length > 0 ? { content: text } : {}),
...(tcs.length > 0 ? { tool_calls: tcs } : {}),
},
finish_reason: finish((r as any).stop_reason ?? null),
},
],
...(usage ? { usage } : {}),
}
}
export function toOpenaiResponse(resp: CommonResponse) {
if (!resp || typeof resp !== "object") return resp
if (!Array.isArray((resp as any).choices)) return resp
const choice = (resp as any).choices[0]
if (!choice) return resp
const msg = choice.message
if (!msg) return resp
const outputItems: any[] = []
if (typeof msg.content === "string" && msg.content.length > 0) {
outputItems.push({
id: `msg_${Math.random().toString(36).slice(2)}`,
type: "message",
status: "completed",
role: "assistant",
content: [{ type: "output_text", text: msg.content, annotations: [], logprobs: [] }],
})
}
if (Array.isArray(msg.tool_calls)) {
for (const tc of msg.tool_calls) {
if ((tc as any).type === "function" && (tc as any).function) {
outputItems.push({
id: (tc as any).id,
type: "function_call",
name: (tc as any).function.name,
call_id: (tc as any).id,
arguments: (tc as any).function.arguments,
})
}
}
}
const stop_reason = (() => {
const r = choice.finish_reason
if (r === "stop") return "stop"
if (r === "tool_calls") return "tool_call"
if (r === "length") return "max_output_tokens"
if (r === "content_filter") return "content_filter"
return null
})()
const usage = (() => {
const u = (resp as any).usage
if (!u) return undefined
return {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
total_tokens: u.total_tokens,
...(u.prompt_tokens_details?.cached_tokens
? { input_tokens_details: { cached_tokens: u.prompt_tokens_details.cached_tokens } }
: {}),
}
})()
return {
id: (resp as any).id?.replace(/^chatcmpl_/, "resp_") ?? `resp_${Math.random().toString(36).slice(2)}`,
object: "response",
model: (resp as any).model,
output: outputItems,
stop_reason,
usage,
}
}
export function fromOpenaiChunk(chunk: string): CommonChunk | string {
const lines = chunk.split("\n")
const ev = lines[0]
const dl = lines[1]
if (!ev || !dl || !dl.startsWith("data: ")) return chunk
let json: any
try {
json = JSON.parse(dl.slice(6))
} catch {
return chunk
}
const respObj = json.response ?? {}
const out: CommonChunk = {
id: respObj.id ?? json.id ?? "",
object: "chat.completion.chunk",
created: Math.floor(Date.now() / 1000),
model: respObj.model ?? json.model ?? "",
choices: [],
}
const e = ev.replace("event: ", "").trim()
if (e === "response.output_text.delta") {
const d = (json as any).delta ?? (json as any).text ?? (json as any).output_text_delta
if (typeof d === "string" && d.length > 0)
out.choices.push({ index: 0, delta: { content: d }, finish_reason: null })
}
if (e === "response.output_item.added" && (json as any).item?.type === "function_call") {
const name = (json as any).item?.name
const id = (json as any).item?.id
if (typeof name === "string" && name.length > 0) {
out.choices.push({
index: 0,
delta: {
tool_calls: [{ index: 0, id, type: "function", function: { name, arguments: "" } }],
},
finish_reason: null,
})
}
}
if (e === "response.function_call_arguments.delta") {
const a = (json as any).delta ?? (json as any).arguments_delta
if (typeof a === "string" && a.length > 0) {
out.choices.push({
index: 0,
delta: { tool_calls: [{ index: 0, function: { arguments: a } }] },
finish_reason: null,
})
}
}
if (e === "response.completed") {
const fr = (() => {
const sr = (respObj as any).stop_reason ?? (json as any).stop_reason
if (sr === "stop") return "stop"
if (sr === "tool_call" || sr === "tool_calls") return "tool_calls"
if (sr === "length" || sr === "max_output_tokens") return "length"
if (sr === "content_filter") return "content_filter"
return null
})()
out.choices.push({ index: 0, delta: {}, finish_reason: fr })
const u = (respObj as any).usage ?? (json as any).response?.usage
if (u) {
out.usage = {
prompt_tokens: u.input_tokens,
completion_tokens: u.output_tokens,
total_tokens: (u.input_tokens || 0) + (u.output_tokens || 0),
...(u.input_tokens_details?.cached_tokens
? { prompt_tokens_details: { cached_tokens: u.input_tokens_details.cached_tokens } }
: {}),
}
}
}
return out
}
export function toOpenaiChunk(chunk: CommonChunk): string {
if (!chunk.choices || !Array.isArray(chunk.choices) || chunk.choices.length === 0) {
return ""
}
const choice = chunk.choices[0]
const d = choice.delta
if (!d) return ""
const id = chunk.id
const model = chunk.model
if (d.content) {
const data = {
id,
type: "response.output_text.delta",
delta: d.content,
response: { id, model },
}
return `event: response.output_text.delta\ndata: ${JSON.stringify(data)}`
}
if (d.tool_calls) {
for (const tc of d.tool_calls) {
if (tc.function?.name) {
const data = {
type: "response.output_item.added",
output_index: 0,
item: {
id: tc.id,
type: "function_call",
name: tc.function.name,
call_id: tc.id,
arguments: "",
},
}
return `event: response.output_item.added\ndata: ${JSON.stringify(data)}`
}
if (tc.function?.arguments) {
const data = {
type: "response.function_call_arguments.delta",
output_index: 0,
delta: tc.function.arguments,
}
return `event: response.function_call_arguments.delta\ndata: ${JSON.stringify(data)}`
}
}
}
if (choice.finish_reason) {
const u = chunk.usage
const usage = u
? {
input_tokens: u.prompt_tokens,
output_tokens: u.completion_tokens,
total_tokens: u.total_tokens,
...(u.prompt_tokens_details?.cached_tokens
? { input_tokens_details: { cached_tokens: u.prompt_tokens_details.cached_tokens } }
: {}),
}
: undefined
const data: any = {
id,
type: "response.completed",
response: { id, model, ...(usage ? { usage } : {}) },
}
return `event: response.completed\ndata: ${JSON.stringify(data)}`
}
return ""
}

View File

@@ -0,0 +1,210 @@
import { ZenData } from "@opencode-ai/console-core/model.js"
import {
fromAnthropicChunk,
fromAnthropicRequest,
fromAnthropicResponse,
toAnthropicChunk,
toAnthropicRequest,
toAnthropicResponse,
} from "./anthropic"
import {
fromOpenaiChunk,
fromOpenaiRequest,
fromOpenaiResponse,
toOpenaiChunk,
toOpenaiRequest,
toOpenaiResponse,
} from "./openai"
import {
fromOaCompatibleChunk,
fromOaCompatibleRequest,
fromOaCompatibleResponse,
toOaCompatibleChunk,
toOaCompatibleRequest,
toOaCompatibleResponse,
} from "./openai-compatible"
export type UsageInfo = {
inputTokens: number
outputTokens: number
reasoningTokens?: number
cacheReadTokens?: number
cacheWrite5mTokens?: number
cacheWrite1hTokens?: number
}
export type ProviderHelper = (input: { reqModel: string; providerModel: string }) => {
format: ZenData.Format
modifyUrl: (providerApi: string, isStream?: boolean) => string
modifyHeaders: (headers: Headers, body: Record<string, any>, apiKey: string) => void
modifyBody: (body: Record<string, any>) => Record<string, any>
createBinaryStreamDecoder: () => ((chunk: Uint8Array) => Uint8Array | undefined) | undefined
streamSeparator: string
createUsageParser: () => {
parse: (chunk: string) => void
retrieve: () => any
}
normalizeUsage: (usage: any) => UsageInfo
}
export interface CommonMessage {
role: "system" | "user" | "assistant" | "tool"
content?: string | Array<CommonContentPart>
tool_call_id?: string
tool_calls?: CommonToolCall[]
}
export interface CommonContentPart {
type: "text" | "image_url"
text?: string
image_url?: { url: string }
}
export interface CommonToolCall {
id: string
type: "function"
function: {
name: string
arguments: string
}
}
export interface CommonTool {
type: "function"
function: {
name: string
description?: string
parameters?: Record<string, any>
}
}
export interface CommonUsage {
input_tokens?: number
output_tokens?: number
total_tokens?: number
prompt_tokens?: number
completion_tokens?: number
cache_read_input_tokens?: number
cache_creation?: {
ephemeral_5m_input_tokens?: number
ephemeral_1h_input_tokens?: number
}
input_tokens_details?: {
cached_tokens?: number
}
output_tokens_details?: {
reasoning_tokens?: number
}
}
export interface CommonRequest {
model: string
max_tokens?: number
temperature?: number
top_p?: number
stop?: string | string[]
messages: CommonMessage[]
stream?: boolean
tools?: CommonTool[]
tool_choice?: "auto" | "required" | { type: "function"; function: { name: string } }
}
export interface CommonResponse {
id: string
object: "chat.completion"
created: number
model: string
choices: Array<{
index: number
message: {
role: "assistant"
content?: string
tool_calls?: CommonToolCall[]
}
finish_reason: "stop" | "tool_calls" | "length" | "content_filter" | null
}>
usage?: {
prompt_tokens?: number
completion_tokens?: number
total_tokens?: number
prompt_tokens_details?: { cached_tokens?: number }
}
}
export interface CommonChunk {
id: string
object: "chat.completion.chunk"
created: number
model: string
choices: Array<{
index: number
delta: {
role?: "assistant"
content?: string
tool_calls?: Array<{
index: number
id?: string
type?: "function"
function?: {
name?: string
arguments?: string
}
}>
}
finish_reason: "stop" | "tool_calls" | "length" | "content_filter" | null
}>
usage?: {
prompt_tokens?: number
completion_tokens?: number
total_tokens?: number
prompt_tokens_details?: { cached_tokens?: number }
}
}
export function createBodyConverter(from: ZenData.Format, to: ZenData.Format) {
return (body: any): any => {
if (from === to) return body
let raw: CommonRequest
if (from === "anthropic") raw = fromAnthropicRequest(body)
else if (from === "openai") raw = fromOpenaiRequest(body)
else raw = fromOaCompatibleRequest(body)
if (to === "anthropic") return toAnthropicRequest(raw)
if (to === "openai") return toOpenaiRequest(raw)
if (to === "oa-compat") return toOaCompatibleRequest(raw)
}
}
export function createStreamPartConverter(from: ZenData.Format, to: ZenData.Format) {
return (part: any): any => {
if (from === to) return part
let raw: CommonChunk | string
if (from === "anthropic") raw = fromAnthropicChunk(part)
else if (from === "openai") raw = fromOpenaiChunk(part)
else raw = fromOaCompatibleChunk(part)
// If result is a string (error case), pass it through
if (typeof raw === "string") return raw
if (to === "anthropic") return toAnthropicChunk(raw)
if (to === "openai") return toOpenaiChunk(raw)
if (to === "oa-compat") return toOaCompatibleChunk(raw)
}
}
export function createResponseConverter(from: ZenData.Format, to: ZenData.Format) {
return (response: any): any => {
if (from === to) return response
let raw: CommonResponse
if (from === "anthropic") raw = fromAnthropicResponse(response)
else if (from === "openai") raw = fromOpenaiResponse(response)
else raw = fromOaCompatibleResponse(response)
if (to === "anthropic") return toAnthropicResponse(raw)
if (to === "openai") return toOpenaiResponse(raw)
if (to === "oa-compat") return toOaCompatibleResponse(raw)
}
}

View File

@@ -0,0 +1,54 @@
import { Database, eq, and, sql, inArray } from "@opencode-ai/console-core/drizzle/index.js"
import { IpRateLimitTable } from "@opencode-ai/console-core/schema/ip.sql.js"
import { RateLimitError } from "./error"
import { logger } from "./logger"
import { ZenData } from "@opencode-ai/console-core/model.js"
export function createRateLimiter(limit: ZenData.RateLimit | undefined, rawIp: string, headers: Headers) {
if (!limit) return
const limitValue = limit.checkHeader && !headers.get(limit.checkHeader) ? limit.fallbackValue! : limit.value
const ip = !rawIp.length ? "unknown" : rawIp
const now = Date.now()
const intervals =
limit.period === "day"
? [buildYYYYMMDD(now)]
: [buildYYYYMMDDHH(now), buildYYYYMMDDHH(now - 3_600_000), buildYYYYMMDDHH(now - 7_200_000)]
return {
track: async () => {
await Database.use((tx) =>
tx
.insert(IpRateLimitTable)
.values({ ip, interval: intervals[0], count: 1 })
.onDuplicateKeyUpdate({ set: { count: sql`${IpRateLimitTable.count} + 1` } }),
)
},
check: async () => {
const rows = await Database.use((tx) =>
tx
.select({ count: IpRateLimitTable.count })
.from(IpRateLimitTable)
.where(and(eq(IpRateLimitTable.ip, ip), inArray(IpRateLimitTable.interval, intervals))),
)
const total = rows.reduce((sum, r) => sum + r.count, 0)
logger.debug(`rate limit total: ${total}`)
if (total >= limitValue) throw new RateLimitError(`Rate limit exceeded. Please try again later.`)
},
}
}
function buildYYYYMMDD(timestamp: number) {
return new Date(timestamp)
.toISOString()
.replace(/[^0-9]/g, "")
.substring(0, 8)
}
function buildYYYYMMDDHH(timestamp: number) {
return new Date(timestamp)
.toISOString()
.replace(/[^0-9]/g, "")
.substring(0, 10)
}

View File

@@ -0,0 +1,16 @@
import { Resource } from "@opencode-ai/console-resource"
export function createStickyTracker(stickyProvider: "strict" | "prefer" | undefined, session: string) {
if (!stickyProvider) return
if (!session) return
const key = `sticky:${session}`
return {
get: async () => {
return await Resource.GatewayKv.get(key)
},
set: async (providerId: string) => {
await Resource.GatewayKv.put(key, providerId, { expirationTtl: 86400 })
},
}
}

View File

@@ -0,0 +1,49 @@
import { Database, eq, sql } from "@opencode-ai/console-core/drizzle/index.js"
import { IpTable } from "@opencode-ai/console-core/schema/ip.sql.js"
import { UsageInfo } from "./provider/provider"
import { ZenData } from "@opencode-ai/console-core/model.js"
export function createTrialLimiter(trial: ZenData.Trial | undefined, ip: string, client: string) {
if (!trial) return
if (!ip) return
const limit =
trial.limits.find((limit) => limit.client === client)?.limit ??
trial.limits.find((limit) => limit.client === undefined)?.limit
if (!limit) return
let _isTrial: boolean
return {
isTrial: async () => {
const data = await Database.use((tx) =>
tx
.select({
usage: IpTable.usage,
})
.from(IpTable)
.where(eq(IpTable.ip, ip))
.then((rows) => rows[0]),
)
_isTrial = (data?.usage ?? 0) < limit
return _isTrial
},
track: async (usageInfo: UsageInfo) => {
if (!_isTrial) return
const usage =
usageInfo.inputTokens +
usageInfo.outputTokens +
(usageInfo.reasoningTokens ?? 0) +
(usageInfo.cacheReadTokens ?? 0) +
(usageInfo.cacheWrite5mTokens ?? 0) +
(usageInfo.cacheWrite1hTokens ?? 0)
await Database.use((tx) =>
tx
.insert(IpTable)
.values({ ip, usage })
.onDuplicateKeyUpdate({ set: { usage: sql`${IpTable.usage} + ${usage}` } }),
)
},
}
}

View File

@@ -0,0 +1,11 @@
import type { APIEvent } from "@solidjs/start/server"
import { handler } from "~/routes/zen/util/handler"
export function POST(input: APIEvent) {
return handler(input, {
format: "oa-compat",
parseApiKey: (headers: Headers) => headers.get("authorization")?.split(" ")[1],
parseModel: (url: string, body: any) => body.model,
parseIsStream: (url: string, body: any) => !!body.stream,
})
}

View File

@@ -0,0 +1,11 @@
import type { APIEvent } from "@solidjs/start/server"
import { handler } from "~/routes/zen/util/handler"
export function POST(input: APIEvent) {
return handler(input, {
format: "anthropic",
parseApiKey: (headers: Headers) => headers.get("x-api-key") ?? undefined,
parseModel: (url: string, body: any) => body.model,
parseIsStream: (url: string, body: any) => !!body.stream,
})
}

View File

@@ -0,0 +1,60 @@
import type { APIEvent } from "@solidjs/start/server"
import { and, Database, eq, isNull } from "@opencode-ai/console-core/drizzle/index.js"
import { KeyTable } from "@opencode-ai/console-core/schema/key.sql.js"
import { WorkspaceTable } from "@opencode-ai/console-core/schema/workspace.sql.js"
import { ModelTable } from "@opencode-ai/console-core/schema/model.sql.js"
import { ZenData } from "@opencode-ai/console-core/model.js"
export async function OPTIONS(input: APIEvent) {
return new Response(null, {
status: 200,
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type, Authorization",
},
})
}
export async function GET(input: APIEvent) {
const zenData = ZenData.list()
const disabledModels = await authenticate()
return new Response(
JSON.stringify({
object: "list",
data: Object.entries(zenData.models)
.filter(([id]) => !disabledModels.includes(id))
.map(([id, _model]) => ({
id,
object: "model",
created: Math.floor(Date.now() / 1000),
owned_by: "opencode",
})),
}),
{
headers: {
"Content-Type": "application/json",
},
},
)
async function authenticate() {
const apiKey = input.request.headers.get("authorization")?.split(" ")[1]
if (!apiKey) return []
const disabledModels = await Database.use((tx) =>
tx
.select({
model: ModelTable.model,
})
.from(KeyTable)
.innerJoin(WorkspaceTable, eq(WorkspaceTable.id, KeyTable.workspaceID))
.leftJoin(ModelTable, and(eq(ModelTable.workspaceID, KeyTable.workspaceID), isNull(ModelTable.timeDeleted)))
.where(and(eq(KeyTable.key, apiKey), isNull(KeyTable.timeDeleted)))
.then((rows) => rows.map((row) => row.model)),
)
return disabledModels
}
}

View File

@@ -0,0 +1,13 @@
import type { APIEvent } from "@solidjs/start/server"
import { handler } from "~/routes/zen/util/handler"
export function POST(input: APIEvent) {
return handler(input, {
format: "google",
parseApiKey: (headers: Headers) => headers.get("x-goog-api-key") ?? undefined,
parseModel: (url: string, body: any) => url.split("/").pop()?.split(":")?.[0] ?? "",
parseIsStream: (url: string, body: any) =>
// ie. url: https://opencode.ai/zen/v1/models/gemini-3-pro:streamGenerateContent?alt=sse'
url.split("/").pop()?.split(":")?.[1]?.startsWith("streamGenerateContent") ?? false,
})
}

View File

@@ -0,0 +1,11 @@
import type { APIEvent } from "@solidjs/start/server"
import { handler } from "~/routes/zen/util/handler"
export function POST(input: APIEvent) {
return handler(input, {
format: "openai",
parseApiKey: (headers: Headers) => headers.get("authorization")?.split(" ")[1],
parseModel: (url: string, body: any) => body.model,
parseIsStream: (url: string, body: any) => !!body.stream,
})
}