diff --git a/infra/alerts.ts b/infra/alerts.ts new file mode 100644 index 00000000..3e4dcbd0 --- /dev/null +++ b/infra/alerts.ts @@ -0,0 +1,10 @@ +const alerts = new sst.aws.SnsTopic("Alerts"); + +new aws.sns.TopicSubscription("AlertsSubscription", { + topic: alerts.arn, + protocol: "email", + endpoint: + "alert-sst-aaaanfxph6mglwqxacgpdhpbrq@anomaly-innovations.slack.com", +}); + +export {}; diff --git a/infra/api.ts b/infra/api.ts new file mode 100644 index 00000000..23c410a5 --- /dev/null +++ b/infra/api.ts @@ -0,0 +1,48 @@ +import { auth } from "./auth"; +import { autodeploy } from "./autodeploy"; +import { bus } from "./bus"; +import { domain } from "./dns"; +import { email } from "./email"; +import { database } from "./planetscale"; +import { allSecrets, secret } from "./secret"; +import { storage } from "./storage"; +import { websocket } from "./websocket"; + +const api = new sst.aws.Function("Api", { + handler: "packages/functions/src/api/api.handler", + timeout: "2 minutes", + permissions: [{ actions: ["sts:*", "iot:*"], resources: ["*"] }], + link: [ + storage, + auth, + database, + bus, + email, + autodeploy, + websocket, + ...allSecrets, + ], + nodejs: { + install: ["source-map"], + }, + url: true, +}); + +const error = new sst.aws.Function("Error", { + handler: "packages/functions/src/error.handler", + url: true, + dev: false, + live: false, +}); + +export const apiRouter = new sst.aws.Router("ApiRouter", { + routes: { + "/*": api.url, + }, + domain: { + name: "api." + domain, + dns: sst.aws.dns({ + override: true, + }), + }, +}); diff --git a/infra/auth.ts b/infra/auth.ts new file mode 100644 index 00000000..4f41a760 --- /dev/null +++ b/infra/auth.ts @@ -0,0 +1,34 @@ +import { domain } from "./dns"; +import { email } from "./email"; +import { database } from "./planetscale"; +import { secret } from "./secret"; + +export const auth = new sst.aws.Auth("Auth", { + authenticator: { + handler: "packages/functions/src/auth.handler", + link: [ + email, + secret.SlackClientID, + secret.SlackClientSecret, + secret.BotpoisonSecretKey, + database, + ], + permissions: [{ actions: ["ses:*"], resources: ["*"] }], + url: true, + environment: { + AUTH_FRONTEND_URL: $dev ? "http://localhost:3000" : "https://" + domain, + }, + }, +}); + +export const authRouter = new sst.aws.Router("AuthRouter", { + routes: { + "/*": auth.url, + }, + domain: { + name: "auth." + domain, + dns: sst.aws.dns({ + override: true, + }), + }, +}); diff --git a/infra/autodeploy.ts b/infra/autodeploy.ts new file mode 100644 index 00000000..ba567cf2 --- /dev/null +++ b/infra/autodeploy.ts @@ -0,0 +1,159 @@ +import fs from "fs"; +import { createHash } from "crypto"; +import { storage } from "./storage"; +import { database } from "./planetscale"; +import { secret } from "./secret"; +import { bus } from "./bus"; +import { websocket } from "./websocket"; + +const { bucket, version } = createBuildScript(); +const repo = createEcrRepo(); +const monitor = createBuildTimeoutMonitor(); +const remover = createRunnerRemover(); +const parser = createConfigParser(); + +export const autodeploy = new sst.Linkable("AutodeployConfig", { + properties: { + buildImage: repo.repositoryUri, + buildspecBucketName: bucket, + buildspecVersion: version, + timeoutMonitorScheduleGroupName: monitor.scheduleGroup.name, + timeoutMonitorScheduleRoleArn: monitor.role.arn, + timeoutMonitorFunctionArn: monitor.handler.arn, + runnerRemoverScheduleGroupName: remover.scheduleGroup.name, + runnerRemoverScheduleRoleArn: remover.role.arn, + runnerRemoverFunctionArn: remover.handler.arn, + configParserFunctionArn: parser.arn, + }, + include: [ + sst.aws.permission({ + actions: ["scheduler:CreateSchedule"], + resources: ["*"], + }), + sst.aws.permission({ + actions: ["lambda:InvokeFunction"], + resources: [parser.arn], + }), + ], +}); + +function createBuildScript() { + const bucket = storage.name; + const content = fs.readFileSync("packages/build/buildspec/index.mjs", "utf8"); + const version = createHash("sha256").update(content).digest("hex"); + new aws.s3.BucketObjectv2("AutodeployBuildspec", { + bucket, + key: `buildspec/${version}/index.mjs`, + acl: "public-read", + content, + }); + return { bucket, version }; +} + +function createEcrRepo() { + const repo = new aws.ecrpublic.Repository("AutodeployRepository", { + repositoryName: `${$app.name}-${$app.stage}-images`, + forceDestroy: true, + }); + new aws.ecrpublic.RepositoryPolicy("AutodeployRepositoryPolicy", { + repositoryName: repo.repositoryName, + policy: aws.iam.getPolicyDocumentOutput({ + statements: [ + { + sid: "AllowPull", + principals: [ + { + type: "*", + identifiers: ["*"], + }, + ], + actions: [ + "ecr-public:BatchCheckLayerAvailability", + "ecr-public:DescribeImages", + "ecr-public:DescribeRepositories", + ], + }, + ], + }).json, + }); + + return repo; +} + +function createBuildTimeoutMonitor() { + const scheduleGroup = new aws.scheduler.ScheduleGroup( + "AutodeployTimeoutMonitorScheduleGroup", + { name: `${$app.name}-${$app.stage}-run-timeout-monitor` }, + ); + const handler = new sst.aws.Function("AutodeployTimeoutMonitor", { + handler: "packages/functions/src/run/monitor.handler", + link: [ + database, + bus, + websocket, + secret.GithubAppID, + secret.GithubPrivateKey, + ], + permissions: [{ actions: ["sts:*", "iot:*"], resources: ["*"] }], + }); + const role = new aws.iam.Role("AutodeployTimeoutMonitorRole", { + assumeRolePolicy: aws.iam.getPolicyDocumentOutput({ + statements: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + type: "Service", + identifiers: ["scheduler.amazonaws.com"], + }, + ], + }, + ], + }).json, + inlinePolicies: [ + { + policy: aws.iam.getPolicyDocumentOutput({ + statements: [ + { + actions: ["lambda:InvokeFunction"], + resources: [handler.arn], + }, + ], + }).json, + }, + ], + }); + return { scheduleGroup, handler, role }; +} + +function createRunnerRemover() { + const scheduleGroup = new aws.scheduler.ScheduleGroup( + "AutodeployRunnerRemoverScheduleGroup", + { name: `${$app.name}-${$app.stage}-runner-remover` }, + ); + const handler = new sst.aws.Function("AutodeployRunnerRemover", { + handler: "packages/functions/src/run/runner-remover.handler", + link: [database, websocket], + environment: { + RUNNER_REMOVER_SCHEDULE_GROUP_NAME: scheduleGroup.name!, + RUNNER_REMOVER_SCHEDULE_ROLE_ARN: monitor.role.arn, + }, + permissions: [ + { + actions: ["sts:*", "iot:*", "scheduler:CreateSchedule", "iam:PassRole"], + resources: ["*"], + }, + ], + }); + return { scheduleGroup, handler, role: monitor.role }; +} + +function createConfigParser() { + return new sst.aws.Function("AutodeployConfigParser", { + handler: "packages/functions/src/run/config-parser.handler", + timeout: "1 minute", + nodejs: { + install: ["esbuild", "@esbuild/linux-arm64"], + }, + }); +} diff --git a/infra/billing.ts b/infra/billing.ts new file mode 100644 index 00000000..d71a2b7e --- /dev/null +++ b/infra/billing.ts @@ -0,0 +1,31 @@ +import { bus } from "./bus"; +import { database } from "./planetscale"; +import { assumable } from "./secret"; + +const queue = new sst.aws.Queue("BillingQueue", { + fifo: true, + visibilityTimeout: "100 seconds", +}); + +queue.subscribe( + { + link: [database], + handler: "packages/functions/src/events/fetch-usage.handler", + permissions: [assumable], + timeout: "3 minutes", + }, + { + batch: { + size: 10, + }, + }, +); + +new sst.aws.Cron("BillingCron", { + schedule: "cron(0 5 * * ? *)", + job: { + handler: "packages/functions/src/billing/cron.handler", + permissions: [assumable], + link: [bus, database, queue], + }, +}); diff --git a/infra/bus.ts b/infra/bus.ts new file mode 100644 index 00000000..d770ce55 --- /dev/null +++ b/infra/bus.ts @@ -0,0 +1,20 @@ +export const bus = new sst.aws.Bus("Bus"); + +new aws.cloudwatch.EventBusPolicy("BusPolicy", { + eventBusName: bus.name, + policy: $jsonStringify({ + Version: "2012-10-17", + Statement: [ + { + Sid: "AllowEvents", + Effect: "Allow", + Principal: { + AWS: "*", + Service: "s3.amazonaws.com", + }, + Action: "events:PutEvents", + Resource: bus.nodes.bus.arn, + }, + ], + }), +}); diff --git a/infra/connect.ts b/infra/connect.ts new file mode 100644 index 00000000..d4e51237 --- /dev/null +++ b/infra/connect.ts @@ -0,0 +1,106 @@ +import { bus } from "./bus"; +import { database } from "./planetscale"; +import { storage } from "./storage"; +import { websocket } from "./websocket"; + +export const identity = aws.getCallerIdentityOutput(); +const connect = new sst.aws.Function("Connect", { + handler: "packages/functions/src/connect.handler", + permissions: [{ actions: ["sts:*", "iot:*"], resources: ["*"] }], + link: [bus, database, websocket], +}); + +new aws.lambda.Permission("ConnectInvoke", { + action: "lambda:InvokeFunction", + principal: "*", + statementId: "AllowCustomers", + function: connect.name, +}); + +new aws.s3.BucketObjectv2("ConnectTemplateFile", { + bucket: storage.name, + key: "connect/template.json", + acl: "public-read", + content: $jsonStringify({ + AWSTemplateFormatVersion: "2010-09-09", + Description: "Connect your AWS account to access the SST Console.", + Parameters: { + workspaceID: { + Type: "String", + Description: + "This is the ID of your SST Console workspace, do not edit.", + }, + }, + Outputs: {}, + Resources: { + SSTRole: { + Type: "AWS::IAM::Role", + Properties: { + RoleName: { + "Fn::Join": [ + "-", + [ + "sst", + { + Ref: "workspaceID", + }, + ], + ], + }, + AssumeRolePolicyDocument: { + Version: "2012-10-17", + Statement: [ + { + Effect: "Allow", + Principal: { + AWS: identity.accountId, + }, + Action: "sts:AssumeRole", + Condition: { + StringEquals: { + "sts:ExternalId": { + Ref: "workspaceID", + }, + }, + }, + }, + ], + }, + ManagedPolicyArns: ["arn:aws:iam::aws:policy/AdministratorAccess"], + }, + }, + SSTConnect: { + Type: "Custom::SSTConnect", + Properties: { + ServiceToken: connect.nodes.function.arn, + accountID: { + Ref: "AWS::AccountId", + }, + region: { + Ref: "AWS::Region", + }, + role: { + "Fn::GetAtt": ["SSTRole", "Arn"], + }, + workspaceID: { + Ref: "workspaceID", + }, + }, + }, + }, + Rules: { + testRegion: { + Assertions: [ + { + Assert: { + "Fn::Equals": [{ Ref: "AWS::Region" }, "us-east-1"], + }, + AssertDescription: "This stack needs to be deployed to us-east-1", + }, + ], + }, + }, + }), +}); + +export const connectTemplateUrl = $interpolate`https://${storage.nodes.bucket.bucketRegionalDomainName}/connect/template.json`; diff --git a/infra/dns.ts b/infra/dns.ts new file mode 100644 index 00000000..2d5b8c72 --- /dev/null +++ b/infra/dns.ts @@ -0,0 +1,39 @@ +const PRODUCTION = "console.sst.dev"; +const DEV = "dev.console.sst.dev"; + +export const { zone, domain } = (() => { + if ($app.stage === "production") + return { + zone: new aws.route53.Zone( + "Zone", + { + name: PRODUCTION, + }, + { + retainOnDelete: true, + import: "Z001790632MKQEXQUOINJ", + }, + ), + domain: PRODUCTION, + }; + + if ($app.stage === "dev") + return { + zone: new aws.route53.Zone( + "Zone", + { + name: DEV, + }, + { + import: "Z04733193GHYW3SIO6DKT", + ignoreChanges: ["*"], + }, + ), + domain: DEV, + }; + + return { + zone: aws.route53.Zone.get("Zone", "Z04733193GHYW3SIO6DKT"), + domain: `${$app.stage}.${DEV}`, + }; +})(); diff --git a/infra/email.ts b/infra/email.ts new file mode 100644 index 00000000..809c5d99 --- /dev/null +++ b/infra/email.ts @@ -0,0 +1,28 @@ +import { domain, zone } from "./dns"; + +export const email = new sst.aws.Email("Email", { + sender: domain, + dns: sst.aws.dns({ + override: true, + }), +}); + +// export const email = new sst.Linkable("Email", { +// properties: { +// sender: domain, +// }, +// }); + +// new aws.route53.Record("MX", { +// name: domain, +// zoneId: zone.zoneId, +// type: "MX", +// ttl: 60, +// records: [ +// "aspmx.l.google.com.", +// "alt1.aspmx.l.google.com.", +// "alt2.aspmx.l.google.com.", +// "alt3.aspmx.l.google.com.", +// "alt4.aspmx.l.google.com.", +// ], +// }); diff --git a/infra/event.ts b/infra/event.ts new file mode 100644 index 00000000..24bdf0b1 --- /dev/null +++ b/infra/event.ts @@ -0,0 +1,85 @@ +import { autodeploy } from "./autodeploy"; +import { bus } from "./bus"; +import { email } from "./email"; +import { issues } from "./issues"; +import { database } from "./planetscale"; +import { secret } from "./secret"; +import { websocket } from "./websocket"; + +bus.subscribe( + "EventSubscriber", + { + handler: "packages/functions/src/event.handler", + permissions: [ + { + actions: ["sts:*", "logs:*", "ses:*", "iot:*", "s3:*"], + resources: ["*"], + }, + { + actions: ["iam:PassRole"], + resources: [ + issues.properties.role, + autodeploy.properties.timeoutMonitorScheduleRoleArn, + ], + }, + ], + link: [ + database, + bus, + issues, + email, + autodeploy, + secret.GithubAppID, + secret.GithubPrivateKey, + websocket, + ], + timeout: "5 minute", + }, + { + pattern: { + source: [`console.${$app.stage}`], + }, + } +); + +bus.subscribe( + "StackUpdatedSubscriber", + { + handler: "packages/functions/src/events/stack-updated-external.handler", + link: [bus, database, websocket], + }, + { + pattern: { + source: ["aws.s3"], + }, + } +); + +bus.subscribe( + "RunnerUpdatedSubscriber", + { + handler: "packages/functions/src/events/runner-updated-external.handler", + link: [bus, database, websocket], + permissions: [{ actions: ["iot:*"], resources: ["*"] }], + }, + { + pattern: { + source: ["sst.runner"], + }, + } +); + +bus.subscribe( + "RunnerCodeBuildSubscriber", + { + handler: + "packages/functions/src/events/runner-updated-external.codebuildHandler", + link: [bus, database, websocket], + permissions: [{ actions: ["iot:*"], resources: ["*"] }], + }, + { + pattern: { + source: ["aws.codebuild"], + }, + } +); diff --git a/infra/issues.ts b/infra/issues.ts new file mode 100644 index 00000000..72d75740 --- /dev/null +++ b/infra/issues.ts @@ -0,0 +1,79 @@ +import { bus } from "./bus"; +import { identity } from "./connect"; +import { database } from "./planetscale"; +import { storage } from "./storage"; + +const stream = new sst.aws.KinesisStream("IssueStream"); + +stream.subscribe( + { + handler: "packages/functions/src/issues/subscriber.handler", + timeout: "15 minutes", + permissions: [{ actions: ["sts:*", "logs:*"], resources: ["*"] }], + nodejs: { + install: ["source-map"], + }, + link: [bus, storage, database], + }, + { + transform: { + eventSourceMapping: { + bisectBatchOnFunctionError: true, + startingPosition: "TRIM_HORIZON", + parallelizationFactor: 10, + }, + }, + }, +); + +const regions = aws.getRegionsOutput(); + +const role = new aws.iam.Role("IssueRole", { + assumeRolePolicy: aws.iam.getPolicyDocumentOutput({ + statements: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: regions.names.apply((regions) => + regions.map((r) => `logs.${r}.amazonaws.com`), + ), + type: "Service", + }, + ], + }, + ], + }).json, +}); + +new aws.iam.RolePolicy("IssuePolicy", { + role: role.name, + policy: aws.iam.getPolicyDocumentOutput({ + statements: [ + { + actions: ["kinesis:PutRecord"], + resources: [stream.arn], + }, + ], + }).json, +}); + +export const issues = new sst.Linkable("IssueDestination", { + properties: { + role: role.arn, + prefix: $interpolate`arn:aws:logs::${identity.accountId}:destination:`, + stream: stream.arn, + }, +}); + +new sst.aws.Cron("IssueCleanup", { + schedule: "cron(0 4 * * ? *)", + job: { + handler: "packages/functions/src/issues/cleanup.handler", + timeout: "15 minutes", + link: [database], + environment: { + DRIZZLE_LOG: "true", + }, + }, +}); diff --git a/infra/planetscale.ts b/infra/planetscale.ts new file mode 100644 index 00000000..8ab40d4e --- /dev/null +++ b/infra/planetscale.ts @@ -0,0 +1,30 @@ +const mysql = planetscale.Database.get("Database", "sst,sst"); + +const branch = + $app.stage === "production" + ? planetscale.Branch.get("DatabaseBranch", "sst,sst,production") + : new planetscale.Branch("DatabaseBranch", { + database: mysql.name, + organization: mysql.organization, + name: $app.stage, + parentBranch: "production", + production: $app.stage === "production", + }); + +const password = new planetscale.Password("DatabasePassword", { + database: mysql.name, + organization: mysql.organization, + branch: branch.name, + role: "admin", + name: `${$app.name}-${$app.stage}-password`, +}); + +export const database = new sst.Linkable("Database", { + properties: { + username: password.username, + host: branch.mysqlAddress, + password: password.plaintext, + database: password.database, + port: 3306, + }, +}); diff --git a/infra/secret.ts b/infra/secret.ts new file mode 100644 index 00000000..7851ec35 --- /dev/null +++ b/infra/secret.ts @@ -0,0 +1,12 @@ +export const secret = { + SlackClientID: new sst.Secret("SlackClientID"), + SlackClientSecret: new sst.Secret("SlackClientSecret"), + GithubAppID: new sst.Secret("GithubAppID"), + GithubPrivateKey: new sst.Secret("GithubPrivateKey"), + GithubWebhookSecret: new sst.Secret("GithubWebhookSecret"), + BotpoisonSecretKey: new sst.Secret("BotpoisonSecretKey"), +}; + +export const allSecrets = [...Object.values(secret)]; + +export const assumable = { actions: ["sts:*"], resources: ["*"] }; diff --git a/infra/storage.ts b/infra/storage.ts new file mode 100644 index 00000000..aa1f0d8d --- /dev/null +++ b/infra/storage.ts @@ -0,0 +1,64 @@ +export const storage = new sst.aws.Bucket("Storage", { + transform: { + publicAccessBlock: { + blockPublicAcls: false, + blockPublicPolicy: false, + ignorePublicAcls: false, + restrictPublicBuckets: false, + }, + }, +}); + +new aws.s3.BucketOwnershipControls("ownership-controls", { + bucket: storage.name, + rule: { + objectOwnership: "ObjectWriter", + }, +}); + +// export const storageAccess = new aws.s3.BucketPublicAccessBlock( +// "StorageAccess", +// { +// bucket: storage.name, +// blockPublicAcls: false, +// blockPublicPolicy: false, +// ignorePublicAcls: false, +// restrictPublicBuckets: false, +// }, +// ); + +new aws.s3.BucketLifecycleConfigurationV2("StorageLifecycle", { + bucket: storage.name, + rules: [ + { + id: "daily", + status: "Enabled", + filter: { + prefix: "temporary/daily/", + }, + expiration: { + days: 1, + }, + }, + { + id: "weekly", + status: "Enabled", + filter: { + prefix: "temporary/daily/", + }, + expiration: { + days: 7, + }, + }, + { + id: "monthly", + status: "Enabled", + filter: { + prefix: "temporary/monthly/", + }, + expiration: { + days: 30, + }, + }, + ], +}); diff --git a/infra/util.ts b/infra/util.ts new file mode 100644 index 00000000..db647bca --- /dev/null +++ b/infra/util.ts @@ -0,0 +1 @@ +export const ALL_REGIONS = aws.getRegionsOutput().names; diff --git a/infra/web.ts b/infra/web.ts new file mode 100644 index 00000000..90539989 --- /dev/null +++ b/infra/web.ts @@ -0,0 +1,27 @@ +import { apiRouter } from "./api"; +import { authRouter } from "./auth"; +import { connectTemplateUrl } from "./connect"; +import { domain } from "./dns"; +import { websocket } from "./websocket"; + +new sst.aws.StaticSite("Workspace", { + path: "./packages/web/workspace", + build: { + output: "./dist", + command: "pnpm build", + }, + domain: { + name: domain, + dns: sst.aws.dns({ + override: true, + }), + }, + environment: { + VITE_API_URL: apiRouter.url, + VITE_AUTH_URL: authRouter.url, + VITE_STAGE: $app.stage, + VITE_CONNECT_URL: connectTemplateUrl, + VITE_WEBSOCKET_HTTP: websocket.properties.http, + VITE_WEBSOCKET_REALTIME: websocket.properties.realtime, + }, +}); diff --git a/infra/websocket.ts b/infra/websocket.ts new file mode 100644 index 00000000..cc43e46a --- /dev/null +++ b/infra/websocket.ts @@ -0,0 +1,25 @@ +import { auth } from "./auth"; +import { database } from "./planetscale"; + +const websocketAuthorizer = new sst.aws.Function("WebsocketAuthorizer", { + handler: "packages/functions/src/auth-websocket.handler", + link: [database, auth], +}); + +export const websocket = new sst.Linkable("Websocket", { + properties: + $app.stage === "production" + ? { + http: "il74c3crpfbydaoqjni56kv6ky.appsync-api.us-east-1.amazonaws.com", + realtime: + "il74c3crpfbydaoqjni56kv6ky.appsync-realtime-api.us-east-1.amazonaws.com", + token: new sst.Secret("WebsocketToken").value, + } + : { + http: "oyq6tqbrczd5xfovlyvcsd3xtu.appsync-api.us-east-1.amazonaws.com", + realtime: + "oyq6tqbrczd5xfovlyvcsd3xtu.appsync-realtime-api.us-east-1.amazonaws.com", + token: new sst.Secret("WebsocketToken").value, + }, +}); +export {}; diff --git a/package.json b/package.json index 76f78eb5..ccea6465 100644 --- a/package.json +++ b/package.json @@ -3,6 +3,10 @@ "version": "0.0.0", "private": true, "type": "module", + "engines": { + "node": ">=20.0.0", + "pnpm": "9.12.x" + }, "workspaces": [ "packages/*", "packages/web/*" @@ -19,12 +23,18 @@ "typecheck": "turbo typecheck" }, "devDependencies": { - "@aws-cdk/aws-kinesisfirehose-alpha": "2.142.1-alpha.0", "@tsconfig/node16": "^16.1.0", - "aws-cdk-lib": "2.142.1", "constructs": "10.3.0", - "sst": "2.43.4", + "sst": "3.3.2", "turbo": "^1.10.13", "typescript": "^5.2.2" + }, + "dependencies": { + "@tsconfig/node20": "^20.1.4" + }, + "pnpm": { + "patchedDependencies": { + "@macaron-css/solid": "patches/@macaron-css__solid.patch" + } } } diff --git a/packages/build/buildspec/index.mjs b/packages/build/buildspec/index.mjs index 4bfe65f4..bf5c1d94 100644 --- a/packages/build/buildspec/index.mjs +++ b/packages/build/buildspec/index.mjs @@ -129,16 +129,16 @@ export async function handler(event, context) { } async function installSst() { - process.chdir(APP_PATH); - - // SST installed locally - if (fs.existsSync("node_modules/.bin/sst")) return; + // Check if SST is installed locally + if (findLocalSstBinary()) return; + // Install SST globally const { stage } = event; const semverPattern = sstConfig.app({ stage }).version; - console.log("Required SST version:", semverPattern ?? "Latest"); + console.log("Installing SST globally, version:", semverPattern ?? "Latest"); - shell(`npm -g install sst@${semverPattern ?? "ion"}`); + shell(`npm -g install sst@${semverPattern ?? "latest"}`); + return "sst"; } async function runWorkflow() { @@ -187,10 +187,8 @@ export async function handler(event, context) { process.chdir(APP_PATH); const { stage, credentials, runID } = event; - const binary = fs.existsSync("node_modules/.bin/sst") - ? "node_modules/.bin/sst" - : "sst"; - shell(`${binary} deploy --stage ${stage}`, { + const sstPath = findLocalSstBinary() ?? "sst"; + shell(`${sstPath} deploy --stage ${stage}`, { env: { AWS_ACCESS_KEY_ID: credentials.accessKeyId, AWS_SECRET_ACCESS_KEY: credentials.secretAccessKey, @@ -205,10 +203,8 @@ export async function handler(event, context) { process.chdir(APP_PATH); const { stage, credentials, runID } = event; - const binary = fs.existsSync("node_modules/.bin/sst") - ? "node_modules/.bin/sst" - : "sst"; - shell(`${binary} remove --stage ${stage}`, { + const sstPath = findLocalSstBinary() ?? "sst"; + shell(`${sstPath} remove --stage ${stage}`, { env: { AWS_ACCESS_KEY_ID: credentials.accessKeyId, AWS_SECRET_ACCESS_KEY: credentials.secretAccessKey, @@ -256,7 +252,7 @@ export async function handler(event, context) { new PutEventsCommand({ Entries: [ { - Source: "sst.external", + Source: "sst.runner", DetailType: type, Detail: JSON.stringify({ properties: { @@ -280,4 +276,17 @@ export async function handler(event, context) { dir = path.resolve(dir, ".."); } } + + function findLocalSstBinary() { + let searchPath = path.resolve(APP_PATH); + while (true) { + const sstPath = path.join(searchPath, "node_modules/.bin/sst"); + if (fs.existsSync(sstPath)) { + console.log("Using locally installed SST binary at", sstPath); + return sstPath; + } + if (searchPath === path.resolve(REPO_PATH)) break; + searchPath = path.resolve(searchPath, ".."); + } + } } diff --git a/packages/build/sst-env.d.ts b/packages/build/sst-env.d.ts new file mode 100644 index 00000000..b0900f6b --- /dev/null +++ b/packages/build/sst-env.d.ts @@ -0,0 +1,144 @@ +/* This file is auto-generated by SST. Do not edit. */ +/* tslint:disable */ +/* eslint-disable */ +/* deno-fmt-ignore-file */ +import "sst" +export {} +declare module "sst" { + export interface Resource { + "Alerts": { + "arn": string + "type": "sst.aws.SnsTopic" + } + "Api": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "ApiRouter": { + "type": "sst.aws.Router" + "url": string + } + "Auth": { + "publicKey": string + "type": "sst.aws.Auth" + } + "AuthAuthenticator": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "AuthRouter": { + "type": "sst.aws.Router" + "url": string + } + "AutodeployConfig": { + "buildImage": string + "buildspecBucketName": string + "buildspecVersion": string + "configParserFunctionArn": string + "runnerRemoverFunctionArn": string + "runnerRemoverScheduleGroupName": string + "runnerRemoverScheduleRoleArn": string + "timeoutMonitorFunctionArn": string + "timeoutMonitorScheduleGroupName": string + "timeoutMonitorScheduleRoleArn": string + "type": "sst.sst.Linkable" + } + "AutodeployConfigParser": { + "name": string + "type": "sst.aws.Function" + } + "AutodeployRunnerRemover": { + "name": string + "type": "sst.aws.Function" + } + "AutodeployTimeoutMonitor": { + "name": string + "type": "sst.aws.Function" + } + "BotpoisonSecretKey": { + "type": "sst.sst.Secret" + "value": string + } + "Bus": { + "arn": string + "name": string + "type": "sst.aws.Bus" + } + "Connect": { + "name": string + "type": "sst.aws.Function" + } + "Database": { + "database": string + "host": string + "password": string + "port": number + "type": "sst.sst.Linkable" + "username": string + } + "Email": { + "configSet": string + "sender": string + "type": "sst.aws.Email" + } + "Error": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "GithubAppID": { + "type": "sst.sst.Secret" + "value": string + } + "GithubPrivateKey": { + "type": "sst.sst.Secret" + "value": string + } + "GithubWebhookSecret": { + "type": "sst.sst.Secret" + "value": string + } + "IssueDestination": { + "prefix": string + "role": string + "stream": string + "type": "sst.sst.Linkable" + } + "IssueStream": { + "name": string + "type": "sst.aws.KinesisStream" + } + "SlackClientID": { + "type": "sst.sst.Secret" + "value": string + } + "SlackClientSecret": { + "type": "sst.sst.Secret" + "value": string + } + "Storage": { + "name": string + "type": "sst.aws.Bucket" + } + "Websocket": { + "http": string + "realtime": string + "token": string + "type": "sst.sst.Linkable" + } + "WebsocketAuthorizer": { + "name": string + "type": "sst.aws.Function" + } + "WebsocketToken": { + "type": "sst.sst.Secret" + "value": string + } + "Workspace": { + "type": "sst.aws.StaticSite" + "url": string + } + } +} diff --git a/packages/core/package.json b/packages/core/package.json index 928cf709..0d574324 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -14,7 +14,7 @@ "@types/react": "18.0.25", "@types/relaxed-json": "^1.0.1", "drizzle-kit": "0.22.8", - "sst": "2.43.4", + "sst": "3.3.2", "typescript": "^5.2.2", "vitest": "^0.25.3" }, diff --git a/packages/core/src/actor.ts b/packages/core/src/actor.ts index 09b50bb0..e460a664 100644 --- a/packages/core/src/actor.ts +++ b/packages/core/src/actor.ts @@ -1,5 +1,5 @@ -import { Context } from "sst/context/context2.js"; import { z } from "zod"; +import { createContext } from "./context"; export const PublicActor = z.object({ type: z.literal("public"), @@ -41,7 +41,7 @@ export const Actor = z.discriminatedUnion("type", [ ]); export type Actor = z.infer; -const ActorContext = Context.create("actor"); +const ActorContext = createContext("actor"); export const useActor = ActorContext.use; export const withActor = ActorContext.with; diff --git a/packages/core/src/alert/index.ts b/packages/core/src/alert/index.ts index e68272e4..d70a1774 100644 --- a/packages/core/src/alert/index.ts +++ b/packages/core/src/alert/index.ts @@ -75,7 +75,7 @@ export module Alert { .select() .from(alert) .where(eq(alert.workspaceID, useWorkspace())) - .execute() + .execute(), ); return alerts.filter( (alert) => @@ -83,9 +83,9 @@ export module Alert { (!stage || alert.source.stage === "*" || alert.source.stage.includes(stage)) && - events.includes(alert.event ?? "issue") + events.includes(alert.event ?? "issue"), ); - } + }, ); export const put = zod( @@ -120,19 +120,19 @@ export module Alert { and( eq(warning.workspaceID, useWorkspace()), eq(warning.type, "issue_alert_slack"), - eq(warning.target, id) - ) + eq(warning.target, id), + ), ); return id; - }) + }), ); export const remove = zod(Info.shape.id, (input) => useTransaction((tx) => tx .delete(alert) - .where(and(eq(alert.id, input), eq(alert.workspaceID, useWorkspace()))) - ) + .where(and(eq(alert.id, input), eq(alert.workspaceID, useWorkspace()))), + ), ); export const sendSlack = zod( @@ -170,7 +170,7 @@ export module Alert { }, }); } - } + }, ); export const sendEmail = zod( z.object({ @@ -193,12 +193,14 @@ export module Alert { ? undefined : inArray(user.id, destination.properties.users), isNull(user.timeDeleted), - isNotNull(user.timeSeen) - ) + isNotNull(user.timeSeen), + ), ); console.log( "sending email to", - users.map((u) => u.email) + users.map((u) => u.email), + "from", + fromAddress, ); if (!users.length) return; @@ -219,12 +221,12 @@ export module Alert { Subject: { Data: subject }, }, }, - }) + }), ); } catch (ex) { console.error(ex); } return; - }) + }), ); } diff --git a/packages/core/src/app/repo.ts b/packages/core/src/app/repo.ts index df18d1fd..913554e4 100644 --- a/packages/core/src/app/repo.ts +++ b/packages/core/src/app/repo.ts @@ -4,9 +4,10 @@ import { createTransactionEffect, useTransaction } from "../util/transaction"; import { appRepoTable } from "./app.sql"; import { useWorkspace } from "../actor"; import { createId } from "@paralleldrive/cuid2"; -import { and, eq, inArray } from "drizzle-orm"; -import { event } from "../event"; -import { Trigger } from "../run/run.sql"; +import { and, eq } from "drizzle-orm"; +import { createEvent } from "../event"; +import { bus } from "sst/aws/bus"; +import { Resource } from "sst"; export module AppRepo { export const Repo = z.object({ @@ -24,17 +25,17 @@ export module AppRepo { export type Repo = z.infer; export const Events = { - Connected: event( + Connected: createEvent( "app.repo.connected", z.object({ appID: Repo.shape.appID, repoID: Repo.shape.repoID, - }) + }), ), }; export function serializeAppRepo( - input: typeof appRepoTable.$inferSelect + input: typeof appRepoTable.$inferSelect, ): Repo { return { id: input.id, @@ -58,12 +59,12 @@ export module AppRepo { .where( and( eq(appRepoTable.workspaceID, useWorkspace()), - eq(appRepoTable.id, id) - ) + eq(appRepoTable.id, id), + ), ) .execute() - .then((rows) => rows[0]) - ) + .then((rows) => rows[0]), + ), ); export const getByAppID = zod(Repo.shape.appID, (appID) => @@ -74,12 +75,12 @@ export module AppRepo { .where( and( eq(appRepoTable.workspaceID, useWorkspace()), - eq(appRepoTable.appID, appID) - ) + eq(appRepoTable.appID, appID), + ), ) .execute() - .then((rows) => rows[0]) - ) + .then((rows) => rows[0]), + ), ); export const connect = zod( @@ -103,12 +104,15 @@ export module AppRepo { repoID: input.repoID, }, }) - .execute() + .execute(), ); await createTransactionEffect(() => - Events.Connected.publish({ appID: input.appID, repoID: input.repoID }) + bus.publish(Resource.Bus, Events.Connected, { + appID: input.appID, + repoID: input.repoID, + }), ); - } + }, ); export const disconnect = zod(Repo.shape.id, (input) => @@ -118,11 +122,11 @@ export module AppRepo { .where( and( eq(appRepoTable.id, input), - eq(appRepoTable.workspaceID, useWorkspace()) - ) + eq(appRepoTable.workspaceID, useWorkspace()), + ), ) .execute(); - }) + }), ); export const putPath = zod( @@ -139,11 +143,11 @@ export module AppRepo { .where( and( eq(appRepoTable.id, input.id), - eq(appRepoTable.workspaceID, useWorkspace()) - ) + eq(appRepoTable.workspaceID, useWorkspace()), + ), ) - .execute() + .execute(), ); - } + }, ); } diff --git a/packages/core/src/app/resource.ts b/packages/core/src/app/resource.ts index ce6265d9..fe12aecc 100644 --- a/packages/core/src/app/resource.ts +++ b/packages/core/src/app/resource.ts @@ -1,6 +1,4 @@ export * as Resource from "./resource"; - -import { InferModel } from "drizzle-orm"; import { resource } from "./app.sql"; import { GetFunctionCommand, LambdaClient } from "@aws-sdk/client-lambda"; import { @@ -8,7 +6,7 @@ import { DescribeStacksCommand, } from "@aws-sdk/client-cloudformation"; import type { Credentials } from "../aws"; -import { event } from "../event"; +import { createEvent } from "../event"; import { z } from "zod"; import { zod } from "../util/zod"; import { useTransaction } from "../util/transaction"; @@ -17,15 +15,15 @@ import { useWorkspace } from "../actor"; import { RETRY_STRATEGY } from "../util/aws"; export const Events = { - Updated: event( + Updated: createEvent( "app.resource.updated", z.object({ - resourceID: z.string().nonempty(), - }) + resourceID: z.string().min(1), + }), ), }; -type Model = InferModel; +type Model = typeof resource.$inferSelect; type Metadata = | { @@ -158,15 +156,16 @@ export const Enrichers = { const info = await client.send( new GetFunctionCommand({ FunctionName: resource.data.arn, - }) + }), ); client.destroy(); return { size: info.Configuration?.CodeSize, runtime: info.Configuration?.Runtime, live: Boolean( - info.Configuration?.Environment?.Variables?.SST_FUNCTION_ID + info.Configuration?.Environment?.Variables?.SST_FUNCTION_ID, ), + logGroup: info.Configuration?.LoggingConfig?.LogGroup, }; }, async WebSocketApi() { @@ -181,19 +180,19 @@ export const Enrichers = { const result = await client.send( new DescribeStacksCommand({ StackName: resource.id, - }) + }), ); client.destroy(); const [stack] = result.Stacks || []; const parsed = JSON.parse( stack?.Outputs?.find((o) => o.OutputKey === "SSTMetadata")?.OutputValue || - "{}" + "{}", ); return { outputs: stack?.Outputs?.filter( (o) => - o.OutputKey !== "SSTMetadata" && !o.OutputKey?.startsWith("Export") + o.OutputKey !== "SSTMetadata" && !o.OutputKey?.startsWith("Export"), ) || [], version: parsed.version as string | undefined, } as const; @@ -205,24 +204,24 @@ export const Enrichers = { data: Extract["data"]; }, credentials: Credentials, - region: string + region: string, ) => Promise; }; -export const fromID = zod(z.string().nonempty(), (id) => +export const fromID = zod(z.string().min(1), (id) => useTransaction((tx) => tx .select() .from(resource) .where(and(eq(resource.workspaceID, useWorkspace()), eq(resource.id, id))) .execute() - .then((x) => x[0]) - ) + .then((x) => x[0]), + ), ); export const enrich = zod( z.object({ - resourceID: z.string().nonempty(), + resourceID: z.string().min(1), credentials: z.custom(), region: z.string(), }), @@ -238,15 +237,15 @@ export const enrich = zod( data: resource.metadata as any, }, input.credentials, - input.region + input.region, ); - }) + }), ); export const listFromStageID = zod( z.object({ - stageID: z.string().nonempty(), - types: z.array(z.string().nonempty()), + stageID: z.string().min(1), + types: z.array(z.string().min(1)), }), (input) => useTransaction((tx) => @@ -257,10 +256,10 @@ export const listFromStageID = zod( and( eq(resource.workspaceID, useWorkspace()), eq(resource.stageID, input.stageID), - inArray(resource.type, input.types) - ) + inArray(resource.type, input.types), + ), ) .execute() - .then((rows) => rows as Info[]) - ) + .then((rows) => rows as Info[]), + ), ); diff --git a/packages/core/src/app/stage.ts b/packages/core/src/app/stage.ts index 36926dd8..48c064b4 100644 --- a/packages/core/src/app/stage.ts +++ b/packages/core/src/app/stage.ts @@ -19,37 +19,39 @@ import { } from "@aws-sdk/client-s3"; import { Enrichers, Resource } from "./resource"; import { db } from "../drizzle"; -import { event } from "../event"; +import { createEvent } from "../event"; import { Replicache } from "../replicache"; import { issueSubscriber } from "../issue/issue.sql"; - +import { bus } from "sst/aws/bus"; +import { Resource as SSTResource } from "sst"; +import { State } from "../state"; export * as Stage from "./stage"; export const Events = { - Connected: event( + Connected: createEvent( "app.stage.connected", z.object({ stageID: z.string().min(1), - }) + }), ), - Updated: event( + Updated: createEvent( "app.stage.updated", z.object({ stageID: z.string().min(1), - }) + }), ), - ResourcesUpdated: event( + ResourcesUpdated: createEvent( "app.stage.resources_updated", z.object({ stageID: z.string().min(1), - }) + }), ), - UsageRequested: event( + UsageRequested: createEvent( "app.stage.usage_requested", z.object({ stageID: z.string().min(1), daysOffset: z.number().int().min(1), - }) + }), ), }; @@ -70,8 +72,8 @@ export const fromID = zod(Info.shape.id, (stageID) => .from(stage) .where(and(eq(stage.workspaceID, useWorkspace()), eq(stage.id, stageID))) .execute() - .then((x) => x[0]) - ) + .then((x) => x[0]), + ), ); export const fromName = zod( @@ -93,12 +95,12 @@ export const fromName = zod( eq(stage.region, input.region), eq(stage.appID, input.appID), eq(stage.awsAccountID, input.awsAccountID), - isNull(stage.timeDeleted) - ) + isNull(stage.timeDeleted), + ), ) .execute() - .then((x) => x[0]) - ) + .then((x) => x[0]), + ), ); export const list = zod( @@ -120,294 +122,7 @@ export const list = zod( items, cursor: items.length < SIZE ? undefined : items.at(-1)?.id, }; - }) -); - -export const connect = zod( - Info.pick({ - name: true, - appID: true, - id: true, - awsAccountID: true, - region: true, - }).partial({ - id: true, - }), - (input) => - createTransaction(async (tx) => { - console.log({ input }); - const id = input.id ?? createId(); - const result = await tx - .insert(stage) - .values({ - id, - appID: input.appID, - workspaceID: useWorkspace(), - awsAccountID: input.awsAccountID, - name: input.name, - region: input.region, - }) - .onDuplicateKeyUpdate({ - set: { - awsAccountID: input.awsAccountID, - region: input.region, - timeDeleted: null, - }, - }) - .execute(); - console.log(result); - const { insertID } = await tx - .select({ insertID: stage.id }) - .from(stage) - .where( - and( - eq(stage.workspaceID, useWorkspace()), - eq(stage.appID, input.appID), - eq(stage.name, input.name), - eq(stage.region, input.region), - eq(stage.awsAccountID, input.awsAccountID) - ) - ) - .execute() - .then((x) => x[0]!); - await createTransactionEffect(() => - Events.Connected.publish({ - stageID: insertID, - }) - ); - return insertID; - }) -); - -export const syncMetadata = zod( - z.object({ - config: z.custom(), - remove: z.boolean().optional(), - }), - async (input) => { - console.log("syncing metadata", input.config.stageID); - const row = await db - .select({ - app: app.name, - stage: stage.name, - region: stage.region, - }) - .from(stage) - .innerJoin(app, eq(stage.appID, app.id)) - .where( - and( - eq(stage.id, input.config.stageID), - eq(stage.workspaceID, useWorkspace()) - ) - ) - .execute() - .then((x) => x[0]); - if (!row) { - return; - } - console.log(input.config.app, input.config.stage, input.config.region); - const bootstrap = await AWS.Account.bootstrap(input.config); - if (!bootstrap) return; - const resources = [] as { - [key in Resource.Info["type"]]: { - type: key; - id: string; - stackID: string; - addr: string; - data: Resource.InfoByType["metadata"]; - enrichment: Resource.InfoByType["enrichment"]; - }; - }[Resource.Info["type"]][]; - const s3 = new S3Client(input.config); - const key = `stackMetadata/app.${input.config.app}/stage.${input.config.stage}/`; - console.log("listing", key, "for", bootstrap.bucket); - const list = await s3 - .send( - new ListObjectsV2Command({ - Prefix: key, - Bucket: bootstrap.bucket, - }) - ) - .catch((err) => { - if (err.name === "AccessDenied") return; - if (err.name === "NoSuchBucket") return; - throw err; - }); - if (!list) { - console.log("could not list from bucket"); - return; - } - if (!list.Contents?.length && input.remove) { - const ion = await AWS.Account.bootstrapIon(input.config); - if (ion) { - const state = await s3 - .send( - new GetObjectCommand({ - Key: `app/${input.config.app}/${input.config.stage}.json`, - Bucket: ion.bucket, - }) - ) - .catch(() => {}); - if (state) return; - } - await remove(input.config.stageID); - return; - } - console.log("found", list.Contents?.length, "stacks"); - const results: any[] = []; - for (const obj of list.Contents || []) { - const stackID = obj.Key?.split("/").pop()!.split(".")[1]; - const result = await s3 - .send( - new GetObjectCommand({ - Key: obj.Key!, - Bucket: bootstrap.bucket, - }) - ) - .catch((err) => { - if (err.name === "AccessDenied") return; - if (err.name === "NoSuchBucket") return; - if (err.name === "NoSuchKey") return; - throw err; - }); - if (!result) continue; - const body = await result - .Body!.transformToString() - .then((x) => JSON.parse(x)); - const r = []; - body.push({ - type: "Stack", - id: stackID, - addr: stackID, - data: {}, - }); - for (let res of body) { - const { type } = res; - const enrichment = - type in Enrichers - ? await Enrichers[type as keyof typeof Enrichers]( - res, - input.config.credentials, - input.config.region - ).catch(() => ({})) - : {}; - r.push({ - ...res, - stackID, - enrichment, - }); - } - results.push(...r); - } - resources.push(...results); - s3.destroy(); - if (!resources.length) { - return; - } - - return createTransaction( - async (tx) => { - const existing = await tx - .select({ - id: resource.id, - addr: resource.addr, - }) - .from(resource) - .where( - and( - eq(resource.stageID, input.config.stageID), - eq(resource.workspaceID, useWorkspace()) - ) - ) - .execute() - .then((x) => new Map(x.map((x) => [x.addr, x.id] as const))); - if (resources.length) - await tx - .insert(resource) - .values( - resources.map((res) => { - const id = existing.get(res.addr) || createId(); - existing.delete(res.addr); - return { - workspaceID: useWorkspace(), - cfnID: res.id, - constructID: res.id, - addr: res.addr, - stackID: res.stackID, - stageID: input.config.stageID, - id, - type: res.type, - metadata: res.data, - enrichment: res.enrichment, - }; - }) - ) - .onDuplicateKeyUpdate({ - set: { - addr: sql`VALUES(addr)`, - stackID: sql`VALUES(stack_id)`, - type: sql`VALUES(type)`, - metadata: sql`VALUES(metadata)`, - enrichment: sql`VALUES(enrichment)`, - }, - }) - .execute(); - - const stacks = resources.filter((x) => x.type === "Stack"); - const unsupported = - stacks.length === - stacks.filter( - (x) => - // @ts-ignore - !x.enrichment.version || - // @ts-ignore - parseVersion(x.enrichment.version) < MINIMUM_VERSION - ).length; - - await tx - .update(stage) - .set({ unsupported }) - .where( - and( - eq(stage.id, input.config.stageID), - eq(stage.workspaceID, useWorkspace()) - ) - ); - - const toDelete = [...existing.values()]; - console.log("deleting", toDelete.length, "resources"); - if (toDelete.length) - await tx - .delete(resource) - .where( - and( - eq(resource.stageID, input.config.stageID), - eq(resource.workspaceID, useWorkspace()), - inArray(resource.id, toDelete) - ) - ); - - await tx - .update(stage) - .set({ timeUpdated: sql`CURRENT_TIMESTAMP(3)` }) - .where( - and( - eq(stage.id, input.config.stageID), - eq(stage.workspaceID, useWorkspace()) - ) - ); - await createTransactionEffect(() => Replicache.poke()); - await createTransactionEffect(() => - Events.ResourcesUpdated.publish({ - stageID: input.config.stageID, - }) - ); - }, - { - isolationLevel: "read committed", - } - ); - } + }), ); export type StageCredentials = Exclude< @@ -429,7 +144,7 @@ export const assumeRole = zod(Info.shape.id, async (stageID) => { .innerJoin(app, eq(stage.appID, app.id)) .where(and(eq(stage.id, stageID), eq(stage.workspaceID, useWorkspace()))) .execute() - .then((rows) => rows.at(0)) + .then((rows) => rows.at(0)), ); if (!result) return; const credentials = await AWS.assumeRole(result.accountID); @@ -454,7 +169,7 @@ export const remove = zod(Info.shape.id, (stageID) => timeDeleted: sql`CURRENT_TIMESTAMP(3)`, }) .where( - and(eq(stage.id, stageID), eq(stage.workspaceID, useWorkspace())) + and(eq(stage.id, stageID), eq(stage.workspaceID, useWorkspace())), ) .execute(); await tx @@ -462,8 +177,8 @@ export const remove = zod(Info.shape.id, (stageID) => .where( and( eq(resource.stageID, stageID), - eq(resource.workspaceID, useWorkspace()) - ) + eq(resource.workspaceID, useWorkspace()), + ), ) .execute(); await tx @@ -471,16 +186,16 @@ export const remove = zod(Info.shape.id, (stageID) => .where( and( eq(issueSubscriber.stageID, stageID), - eq(issueSubscriber.workspaceID, useWorkspace()) - ) + eq(issueSubscriber.workspaceID, useWorkspace()), + ), ) .execute(); await createTransactionEffect(() => Replicache.poke()); }, { isolationLevel: "read uncommitted", - } - ) + }, + ), ); function parseVersion(input: string) { diff --git a/packages/core/src/aws/account.ts b/packages/core/src/aws/account.ts index bee8d7fe..92e74e1b 100644 --- a/packages/core/src/aws/account.ts +++ b/packages/core/src/aws/account.ts @@ -1,17 +1,12 @@ export * as Account from "./account"; - import { createSelectSchema } from "drizzle-zod"; import { z } from "zod"; import { zod } from "../util/zod"; import { createId } from "@paralleldrive/cuid2"; -import { - createTransaction, - createTransactionEffect, - useTransaction, -} from "../util/transaction"; +import { createTransactionEffect, useTransaction } from "../util/transaction"; import { awsAccount } from "./aws.sql"; import { useWorkspace } from "../actor"; -import { and, eq, isNull, sql } from "drizzle-orm"; +import { and, eq, sql } from "drizzle-orm"; import { Credentials } from "."; import { CloudFormationClient, @@ -26,9 +21,6 @@ import { import { S3Client, PutBucketNotificationConfigurationCommand, - ListObjectsV2Command, - NoSuchBucket, - GetObjectCommand, } from "@aws-sdk/client-s3"; import { CreateRoleCommand, @@ -38,7 +30,6 @@ import { IAMClient, PutRolePolicyCommand, } from "@aws-sdk/client-iam"; -import { event } from "../event"; export const Info = createSelectSchema(awsAccount, { id: (schema) => schema.id.cuid2(), @@ -48,13 +39,13 @@ export const Info = createSelectSchema(awsAccount, { export type Info = z.infer; export const Events = { - Created: event( + Created: createEvent( "aws.account.created", z.object({ awsAccountID: z.string().cuid2(), }), ), - Removed: event( + Removed: createEvent( "aws.account.removed", z.object({ awsAccountID: z.string().cuid2(), @@ -97,7 +88,7 @@ export const create = zod( ) .then((rows) => rows.at(0)); await createTransactionEffect(() => - Events.Created.publish({ + bus.publish(Resource.Bus, Events.Created, { awsAccountID: existing!.id, }), ); @@ -119,7 +110,7 @@ export const scan = zod(Info.shape.id, (input) => ), ); await createTransactionEffect(() => - Events.Created.publish({ + bus.publish(Resource.Bus, Events.Created, { awsAccountID: input, }), ); @@ -207,7 +198,7 @@ export const bootstrap = zod( return { bucket, - version: "normal" as const, + version: "v2" as const, }; } @@ -244,11 +235,10 @@ export const bootstrapIon = zod( ) .catch(() => {}); if (!param?.Parameter?.Value) return; - console.log("found ion bucket", param.Parameter.Value); const parsed = JSON.parse(param.Parameter.Value); return { bucket: parsed.state, - version: "ion" as const, + version: "v3" as const, }; } catch { return; @@ -259,14 +249,15 @@ export const bootstrapIon = zod( ); import { DescribeRegionsCommand, EC2Client } from "@aws-sdk/client-ec2"; -import { App } from "../app"; import { Replicache } from "../replicache"; -import { Config } from "sst/node/config"; import { db } from "../drizzle"; -import { app, stage } from "../app/app.sql"; -import { createPipe, groupBy, mapValues } from "remeda"; +import { stage } from "../app/app.sql"; import { RETRY_STRATEGY } from "../util/aws"; import { GetParameterCommand, SSMClient } from "@aws-sdk/client-ssm"; +import { Resource } from "sst"; +import { createEvent } from "../event"; +import { bus } from "sst/aws/bus"; +import { State } from "../state"; export const regions = zod( bootstrap.schema.shape.credentials, @@ -306,7 +297,8 @@ export const integrate = zod( const iam = new IAMClient({ credentials: input.credentials, }); - const suffix = Config.STAGE !== "production" ? "-" + Config.STAGE : ""; + const suffix = + Resource.App.stage !== "production" ? "-" + Resource.App.stage : ""; const roleName = "SSTConsolePublisher" + suffix; await iam .send( @@ -360,7 +352,7 @@ export const integrate = zod( { Effect: "Allow", Action: ["events:PutEvents"], - Resource: [process.env.EVENT_BUS_ARN], + Resource: [Resource.Bus.arn], }, ], }), @@ -432,7 +424,7 @@ export const integrate = zod( Rule: "SSTConsole" + suffix, Targets: [ { - Arn: process.env.EVENT_BUS_ARN, + Arn: Resource.Bus.arn, Id: "SSTConsole", RoleArn: `arn:aws:iam::${account.accountID}:role/${roleName}`, }, @@ -440,168 +432,11 @@ export const integrate = zod( }), ); console.log(region, "created eventbus rule"); - - let token: string | undefined; - const existing = await useTransaction((tx) => - tx - .select({ - stageName: stage.name, - stageID: stage.id, - appName: app.name, - }) - .from(stage) - .innerJoin(app, eq(stage.appID, app.id)) - .where( - and( - eq(stage.awsAccountID, account.id), - eq(stage.region, region), - eq(stage.workspaceID, useWorkspace()), - isNull(stage.timeDeleted), - ), - ), - ).then( - createPipe( - groupBy((r) => r.appName), - mapValues((rows) => - rows.map((r) => [r.stageName, r.stageID] as const), - ), - mapValues((rows) => new Map(rows)), - ), - ); - - const stages = [] as { app: string; stage: string }[]; - for (const b of bootstrapBuckets) { - console.log("scanning", b.bucket); - while (true) { - if (b.version === "normal") { - const list = await s3 - .send( - new ListObjectsV2Command({ - Prefix: "stackMetadata", - Bucket: b.bucket, - ContinuationToken: token, - }), - ) - .catch((err) => { - if (err instanceof NoSuchBucket) { - console.log("couldn't find this bucket"); - return; - } - throw err; - }); - if (!list) break; - const distinct = new Set( - list.Contents?.filter((item) => item.Key).map((item) => - item.Key!.split("/").slice(0, 3).join("/"), - ) || [], - ); - - console.log("found", b.version, distinct); - for (const item of distinct) { - const [, appHint, stageHint] = item.split("/") || []; - if (!appHint || !stageHint) continue; - const [, stageName] = stageHint?.split("."); - const [, appName] = appHint?.split("."); - if (!stageName || !appName) continue; - stages.push({ - app: appName, - stage: stageName, - }); - existing[appName]?.delete(stageName); - console.log(region, "found", stageName, appName); - } - - if (!list.ContinuationToken) break; - token = list.ContinuationToken; - } - - if (b.version === "ion") { - const list = await s3 - .send( - new ListObjectsV2Command({ - Prefix: "app/", - Bucket: b.bucket, - ContinuationToken: token, - }), - ) - .catch((err) => { - if (err instanceof NoSuchBucket) { - console.log("couldn't find this bucket"); - return; - } - throw err; - }); - if (!list) break; - for (const item of list.Contents || []) { - const key = item.Key; - if (!key) continue; - if (!key.endsWith(".json")) continue; - const splits = key.split("/"); - const appName = splits.at(-2); - const stageName = splits.at(-1)?.split(".").at(0); - if (!appName || !stageName) continue; - const state = await s3 - .send( - new GetObjectCommand({ - Bucket: b.bucket, - Key: key, - }), - ) - .then( - async (result) => - JSON.parse(await result.Body!.transformToString()) - .checkpoint.latest || {}, - ) - .catch(() => {}); - if (!state) continue; - if (!state.resources) continue; - if (state.resources.length === 0) continue; - existing[appName]?.delete(stageName); - stages.push({ - app: appName!, - stage: stageName!, - }); - } - if (!list.ContinuationToken) break; - token = list.ContinuationToken; - } - } - } - for (const item of stages) { - console.log("found stage", item); - await createTransaction(async () => { - let app = await App.fromName(item.app).then((a) => a?.id); - if (!app) { - console.log("creating app", item.app); - app = await App.create({ - name: item.app, - }); - } - - let stage = await App.Stage.fromName({ - appID: app, - name: item.stage, - region, - awsAccountID: input.awsAccountID, - }).then((s) => s?.id); - if (!stage) { - console.log("connecting stage", item.app, item.stage); - stage = await App.Stage.connect({ - name: item.stage, - appID: app, - region: config.region, - awsAccountID: account.id, - }); - await Replicache.poke(); - } - }); - } - for (const [appName, stages] of Object.entries(existing)) { - for (const [stageName, stageID] of stages) { - console.log("could not find", appName, stageName, stageID); - await App.Stage.remove(stageID); - } - } + await State.scan({ + awsAccountID: input.awsAccountID, + credentials: input.credentials, + region, + }); } await db .update(awsAccount) @@ -615,7 +450,6 @@ export const integrate = zod( ), ); await Replicache.poke(); - console.log("done"); }, ); diff --git a/packages/core/src/aws/index.ts b/packages/core/src/aws/index.ts index c7556489..afe49087 100644 --- a/packages/core/src/aws/index.ts +++ b/packages/core/src/aws/index.ts @@ -24,7 +24,7 @@ export const assumeRole = zod(z.string(), async (id) => { RoleSessionName: "sst", ExternalId: workspaceID, DurationSeconds: 3600, - }) + }), ); await useTransaction((tx) => tx @@ -35,10 +35,10 @@ export const assumeRole = zod(z.string(), async (id) => { .where( and( eq(awsAccount.accountID, id), - eq(awsAccount.workspaceID, workspaceID) - ) + eq(awsAccount.workspaceID, workspaceID), + ), ) - .execute() + .execute(), ); return { secretAccessKey: result.Credentials!.SecretAccessKey!, @@ -57,10 +57,10 @@ export const assumeRole = zod(z.string(), async (id) => { .where( and( eq(awsAccount.accountID, id), - eq(awsAccount.workspaceID, workspaceID) - ) + eq(awsAccount.workspaceID, workspaceID), + ), ) - .execute() + .execute(), ); return; } diff --git a/packages/core/src/context.ts b/packages/core/src/context.ts new file mode 100644 index 00000000..9b6bb780 --- /dev/null +++ b/packages/core/src/context.ts @@ -0,0 +1,17 @@ +import { AsyncLocalStorage } from "node:async_hooks"; + +export function createContext(name: string) { + const storage = new AsyncLocalStorage(); + return { + use() { + const result = storage.getStore(); + if (!result) { + throw new Error("Context not provided: " + name); + } + return result; + }, + with(value: T, fn: () => R) { + return storage.run(value, fn); + }, + }; +} diff --git a/packages/core/src/drizzle/index.ts b/packages/core/src/drizzle/index.ts index 3d700dff..0a39d2b5 100644 --- a/packages/core/src/drizzle/index.ts +++ b/packages/core/src/drizzle/index.ts @@ -1,13 +1,13 @@ import { drizzle } from "drizzle-orm/planetscale-serverless"; -import { Client, connect } from "@planetscale/database"; -import { Config } from "sst/node/config"; +import { Client } from "@planetscale/database"; +import { Resource } from "sst"; import { fetch } from "undici"; export * from "drizzle-orm"; const client = new Client({ - host: "aws.connect.psdb.cloud", - username: Config.PLANETSCALE_USERNAME, - password: Config.PLANETSCALE_PASSWORD, + host: Resource.Database.host, + username: Resource.Database.username, + password: Resource.Database.password, fetch, }); diff --git a/packages/core/src/event.ts b/packages/core/src/event.ts index 50b0f189..fa37432f 100644 --- a/packages/core/src/event.ts +++ b/packages/core/src/event.ts @@ -1,10 +1,10 @@ -import { createEventBuilder, ZodValidator } from "sst/node/event-bus"; +import { event } from "sst/event"; +import { ZodValidator } from "sst/event/validator"; import { useActor } from "./actor"; -export const event = createEventBuilder({ - bus: "bus", +export const createEvent = event.builder({ validator: ZodValidator, - metadataFn() { + metadata() { return { actor: useActor(), }; diff --git a/packages/core/src/git/github.ts b/packages/core/src/git/github.ts index bb6c0a40..70df0db0 100644 --- a/packages/core/src/git/github.ts +++ b/packages/core/src/git/github.ts @@ -6,20 +6,12 @@ import { githubOrgTable, githubRepoTable } from "./git.sql"; import { useWorkspace } from "../actor"; import { createId } from "@paralleldrive/cuid2"; import { and, eq, ne, inArray, isNull, notInArray, or, sql } from "../drizzle"; -import { Config } from "sst/node/config"; -import { event } from "../event"; +import { createEvent } from "../event"; import { appRepoTable } from "../app/app.sql"; +import { Resource } from "sst"; +import { bus } from "sst/aws/bus"; export module Github { - export const Events = { - Installed: event( - "github.installed", - z.object({ - installationID: z.number().int(), - }) - ), - }; - export const Org = z.object({ id: z.string().cuid2(), externalOrgID: z.number().int(), @@ -93,8 +85,8 @@ export module Github { } function createClient(installationID: number) { const app = new App({ - appId: Config.GITHUB_APP_ID, - privateKey: Config.GITHUB_PRIVATE_KEY, + appId: Resource.GithubAppID.value, + privateKey: Resource.GithubPrivateKey.value, }); return app.getInstallationOctokit(installationID); } @@ -175,7 +167,6 @@ export module Github { ) ); }); - await Events.Installed.publish({ installationID }); } ); diff --git a/packages/core/src/issue/extract.ts b/packages/core/src/issue/extract.ts index dda03ab2..d48bd4f6 100644 --- a/packages/core/src/issue/extract.ts +++ b/packages/core/src/issue/extract.ts @@ -17,6 +17,8 @@ import { } from "../util/transaction"; import { zod } from "../util/zod"; import { issueCount, issue, issueSubscriber } from "./issue.sql"; +import { bus } from "sst/aws/bus"; +import { Resource } from "sst"; export const extract = zod( z.custom<(typeof Events.ErrorDetected.$output.records)[number]>(), @@ -24,7 +26,7 @@ export const extract = zod( // do not process self if ( input.logGroup.startsWith( - "/aws/lambda/production-console-Issues-issuesConsumer", + "/aws/lambda/console-production-IssueStreamSubscriber", ) ) return; @@ -93,23 +95,8 @@ export const extract = zod( } if (count > 10_000) { - const limited = await db - .select({ - workspaceID: issueSubscriber.workspaceID, - stageID: issueSubscriber.stageID, - }) - .from(issueSubscriber) - .where( - and( - inArray( - issueSubscriber.workspaceID, - workspaces.map((x) => x.workspaceID), - ), - eq(issueSubscriber.logGroup, input.logGroup), - ), - ); await Promise.all( - limited.map((row) => + workspaces.map((row) => withActor( { type: "system", @@ -118,7 +105,7 @@ export const extract = zod( }, }, () => - Events.RateLimited.publish({ + bus.publish(Resource.Bus, Events.RateLimited, { stageID: row.stageID, logGroup: input.logGroup, }), @@ -353,7 +340,7 @@ export const extract = zod( }, }, () => - Events.IssueDetected.publish({ + bus.publish(Resource.Bus, Events.IssueDetected, { stageID: item.workspace.stageID, group: item.group, }), diff --git a/packages/core/src/issue/index.ts b/packages/core/src/issue/index.ts index 7421f935..0477e036 100644 --- a/packages/core/src/issue/index.ts +++ b/packages/core/src/issue/index.ts @@ -2,14 +2,13 @@ export * as Issue from "./index"; export * from "./extract"; import { useActor, useWorkspace } from "../actor"; -import { and, db, eq, inArray, lt, not, sql } from "../drizzle"; +import { and, db, eq, inArray, lt, sql } from "../drizzle"; import { issue, issueAlertLimit, issueCount as issueCount, issueSubscriber, } from "./issue.sql"; -import { createId } from "@paralleldrive/cuid2"; import { zod } from "../util/zod"; import { createSelectSchema } from "drizzle-zod"; import { @@ -21,20 +20,18 @@ import { PutSubscriptionFilterCommand, ResourceAlreadyExistsException, ResourceNotFoundException, - DeleteDestinationCommand, DeleteSubscriptionFilterCommand, } from "@aws-sdk/client-cloudwatch-logs"; -import { Resource } from "../app/resource"; +import { Resource as SSTResource } from "sst"; import { z } from "zod"; import { RETRY_STRATEGY } from "../util/aws"; import { Stage, StageCredentials } from "../app/stage"; -import { event } from "../event"; -import { Config } from "sst/node/config"; +import { createEvent } from "../event"; import { Warning } from "../warning"; import { createTransaction, useTransaction } from "../util/transaction"; import { Log } from "../log"; import { stateResourceTable } from "../state/state.sql"; -import * as Send from "./send"; +import { filter, map, pipe, unique } from "remeda"; export const Info = createSelectSchema(issue, {}); export type Info = typeof issue.$inferSelect; @@ -42,7 +39,7 @@ export type Count = typeof issueCount.$inferSelect; export * as Send from "./send"; export const Events = { - ErrorDetected: event( + ErrorDetected: createEvent( "issue.error_detected", z.object({ records: z @@ -61,21 +58,21 @@ export const Events = { .array(), }), ), - RateLimited: event( + RateLimited: createEvent( "issue.rate_limited", z.object({ stageID: z.string(), logGroup: z.string(), }), ), - IssueDetected: event( + IssueDetected: createEvent( "issue.detected", z.object({ stageID: z.string(), group: z.string(), }), ), - SubscribeRequested: event( + SubscribeRequested: createEvent( "issue.subscribe_requested", z.object({ stageID: z.string(), @@ -155,8 +152,8 @@ export const connectStage = zod( "creating", config.region, uniqueIdentifier, - Config.ISSUES_ROLE_ARN, - Config.ISSUES_STREAM_ARN, + SSTResource.IssueDestination.role, + SSTResource.IssueDestination.stream, ); const cw = new CloudWatchLogsClient({ region: config.region, @@ -167,8 +164,8 @@ export const connectStage = zod( const destination = await cw.send( new PutDestinationCommand({ destinationName: uniqueIdentifier, - roleArn: Config.ISSUES_ROLE_ARN, - targetArn: Config.ISSUES_STREAM_ARN, + roleArn: SSTResource.IssueDestination.role, + targetArn: SSTResource.IssueDestination.stream, }), ); console.log("created destination", destination.destination); @@ -198,294 +195,14 @@ export const connectStage = zod( }, ); -/** @deprecated */ -const disconnectStage = zod(z.custom(), async (config) => { - const uniqueIdentifier = destinationIdentifier(config); - console.log("deleting", uniqueIdentifier); - const cw = new CloudWatchLogsClient({ - region: config.region, - credentials: { - accessKeyId: process.env.AWS_ACCESS_KEY_ID!, - secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY!, - sessionToken: process.env.AWS_SESSION_TOKEN!, - }, - // retryStrategy: RETRY_STRATEGY, - }); - - try { - await cw.send( - new DeleteDestinationCommand({ - destinationName: uniqueIdentifier, - }), - ); - return true; - } catch (ex: any) { - if (ex instanceof ResourceNotFoundException) return false; - if (ex.name === "ThrottlingException") return false; - throw ex; - } finally { - cw.destroy(); - } -}); - -export const subscribe = zod(z.custom(), async (config) => { - const uniqueIdentifier = destinationIdentifier(config); - console.log("subscribing", uniqueIdentifier); - const destination = - Config.ISSUES_DESTINATION_PREFIX.replace("", config.region) + - uniqueIdentifier; - const cw = new CloudWatchLogsClient({ - region: config.region, - credentials: config.credentials, - retryStrategy: RETRY_STRATEGY, - }); - - try { - // Get all function resources - const resources = await Resource.listFromStageID({ - stageID: config.stageID, - types: ["Function", "NextjsSite"], - }); - const functions = resources.filter( - (x) => x.type === "Function" && !x.enrichment.live, - ); - if (!functions.length) { - console.log("no functions"); - return; - } - - const toDelete = await db - .select({ - id: issueSubscriber.id, - logGroup: issueSubscriber.logGroup, - }) - .from(issueSubscriber) - .where( - and( - eq(issueSubscriber.workspaceID, useWorkspace()), - eq(issueSubscriber.stageID, config.stageID), - not( - inArray( - issueSubscriber.functionID, - functions.map((x) => x.id), - ), - ), - ), - ); - - for (const item of toDelete) { - console.log("deleting", item.logGroup); - await cw - .send( - new DeleteSubscriptionFilterCommand({ - filterName: - uniqueIdentifier + (Config.STAGE === "production" ? "" : `#dev`), - logGroupName: item.logGroup!, - }), - ) - .catch((e) => { - if (e instanceof ResourceNotFoundException) return; - throw e; - }); - - await db - .delete(issueSubscriber) - .where( - and( - eq(issueSubscriber.workspaceID, useWorkspace()), - eq(issueSubscriber.id, item.id), - ), - ); - } - - await db.delete(issueSubscriber).where( - and( - eq(issueSubscriber.workspaceID, useWorkspace()), - eq(issueSubscriber.stageID, config.stageID), - not( - inArray( - issueSubscriber.functionID, - functions.map((x) => x.id), - ), - ), - ), - ); - - const exists = await db - .select({ - functionID: issueSubscriber.functionID, - logGroup: issueSubscriber.logGroup, - }) - .from(issueSubscriber) - .where( - and( - eq(issueSubscriber.stageID, config.stageID), - eq(issueSubscriber.workspaceID, useWorkspace()), - ), - ) - .execute(); - - console.log("updating", resources.length, "functions"); - async function subscribe(logGroup: string, functionID: string) { - if ( - exists.find( - (item) => - item.functionID === functionID && item.logGroup === logGroup, - ) - ) - return; - console.log("subscribing", logGroup); - while (true) { - try { - await cw.send( - new PutSubscriptionFilterCommand({ - destinationArn: destination, - filterName: - uniqueIdentifier + - (Config.STAGE === "production" ? "" : `#dev`), - filterPattern: [ - `?"Invoke Error"`, - // OOM and other runtime error - `?"Error: Runtime exited"`, - // Timeout - `?"Task timed out after"`, - // NodeJS Uncaught and console.error - `?"\tERROR\t"`, - `?"[ERROR]"`, - // ...(fn.enrichment.runtime?.startsWith("nodejs") - // ? [`?"\tERROR\t"`] - // : []), - ].join(" "), - logGroupName: logGroup, - }), - ); - - if (functionID) - await db - .insert(issueSubscriber) - .ignore() - .values({ - stageID: config.stageID, - workspaceID: useWorkspace(), - functionID: functionID, - id: createId(), - logGroup, - }) - .execute(); - - await Warning.remove({ - target: logGroup, - type: "log_subscription", - stageID: config.stageID, - }); - - break; - } catch (e: any) { - // Create log group if the function has never been invoked - if ( - e instanceof ResourceNotFoundException && - e.message.startsWith("The specified log group does not exist") - ) { - console.log("creating log group"); - await cw - .send( - new CreateLogGroupCommand({ - logGroupName: logGroup, - }), - ) - .catch((e) => { - if (e instanceof ResourceAlreadyExistsException) return; - throw e; - }); - continue; - } - - // There are too many log subscribers - if (e instanceof LimitExceededException) { - await Warning.create({ - stageID: config.stageID, - target: logGroup, - type: "log_subscription", - data: { - error: "limited", - }, - }); - break; - } - - // Permissions issue - if (e.name === "AccessDeniedException") { - await Warning.create({ - stageID: config.stageID, - target: logGroup, - type: "log_subscription", - data: { - error: "permissions", - }, - }); - break; - } - - // The destination hasn't been created yet so try again - if ( - e instanceof ResourceNotFoundException && - e.message === "The specified destination does not exist." - ) { - await connectStage(config); - continue; - } - - console.error(e); - await Warning.create({ - stageID: config.stageID, - target: logGroup, - type: "log_subscription", - data: { - error: "unknown", - message: e.toString(), - }, - }); - break; - } - } - } - - for (const resource of resources) { - if (resource.type === "Function") { - if (resource.enrichment.live) continue; - const logGroup = `/aws/lambda/${resource.metadata.arn.split(":")[6]}`; - await subscribe(logGroup, resource.id); - } - - if (resource.type === "NextjsSite") { - const routes = resource.metadata.routes?.data; - if (!routes) continue; - const fn = resources.find( - (r) => - r.type === "Function" && - r.metadata.arn === resource.metadata.server, - ); - if (!fn) continue; - - for (const route of routes) { - const logGroup = - resource.metadata.routes?.logGroupPrefix + route.logGroupPath; - await subscribe(logGroup, fn.id); - } - } - } - } finally { - cw.destroy(); - } -}); - export const subscribeIon = zod( z.custom(), async (config) => { const uniqueIdentifier = destinationIdentifier(config); console.log("subscribing", uniqueIdentifier); + await connectStage(config); const destination = - Config.ISSUES_DESTINATION_PREFIX.replace("", config.region) + + SSTResource.IssueDestination.prefix.replace("", config.region) + uniqueIdentifier; const cw = new CloudWatchLogsClient({ region: config.region, @@ -494,106 +211,42 @@ export const subscribeIon = zod( }); try { - // Get all function resources const resources = await db .select() .from(stateResourceTable) .where( and( - eq(stateResourceTable.type, "aws:lambda/function:Function"), + inArray(stateResourceTable.type, [ + "aws:lambda/function:Function", + "aws:cloudwatch/logGroup:LogGroup", + ]), eq(stateResourceTable.workspaceID, useWorkspace()), eq(stateResourceTable.stageID, config.stageID), ), ); - const functions = resources.filter( - (r) => !Boolean(r.outputs?.environment?.variables?.SST_FUNCTION_ID), - ); - if (!functions.length) { - console.log("no functions"); - return; - } - - const toDelete = await db - .select({ - id: issueSubscriber.id, - logGroup: issueSubscriber.logGroup, - }) - .from(issueSubscriber) - .where( - and( - eq(issueSubscriber.workspaceID, useWorkspace()), - eq(issueSubscriber.stageID, config.stageID), - not( - inArray( - issueSubscriber.functionID, - functions.map((x) => x.id), - ), - ), - ), - ); - - for (const item of toDelete) { - console.log("deleting", item.logGroup); - await cw - .send( - new DeleteSubscriptionFilterCommand({ - filterName: - uniqueIdentifier + - (Config.STAGE === "production" ? "" : `#dev`), - logGroupName: item.logGroup!, - }), - ) - .catch((e) => { - if (e instanceof ResourceNotFoundException) return; - throw e; - }); - await db - .delete(issueSubscriber) - .where( - and( - eq(issueSubscriber.workspaceID, useWorkspace()), - eq(issueSubscriber.id, item.id), - ), - ); - } - - await db.delete(issueSubscriber).where( - and( - eq(issueSubscriber.workspaceID, useWorkspace()), - eq(issueSubscriber.stageID, config.stageID), - not( - inArray( - issueSubscriber.functionID, - functions.map((x) => x.id), - ), - ), - ), + const groups = pipe( + resources, + map((resource): string | undefined => { + if ( + resource.type === "aws:lambda/function:Function" && + resource.outputs.loggingConfig + ) { + return resource.outputs.loggingConfig.logGroup; + } + if (resource.type === "aws:cloudwatch/logGroup:LogGroup") { + return resource.outputs.name; + } + }), + filter(Boolean), + unique(), ); - const exists = await db - .select({ - functionID: issueSubscriber.functionID, - logGroup: issueSubscriber.logGroup, - }) - .from(issueSubscriber) - .where( - and( - eq(issueSubscriber.stageID, config.stageID), - eq(issueSubscriber.workspaceID, useWorkspace()), - ), - ) - .execute(); + for (const group of groups) { + await subscribe(group as string); + } - console.log("updating", resources.length, "functions"); - async function subscribe(logGroup: string, functionID: string) { - if ( - exists.find( - (item) => - item.functionID === functionID && item.logGroup === logGroup, - ) - ) - return; + async function subscribe(logGroup: string) { console.log("subscribing", logGroup); while (true) { try { @@ -602,7 +255,7 @@ export const subscribeIon = zod( destinationArn: destination, filterName: uniqueIdentifier + - (Config.STAGE === "production" ? "" : `#dev`), + (SSTResource.App.stage === "production" ? "" : `#dev`), filterPattern: [ `?"Invoke Error"`, // OOM and other runtime error @@ -620,19 +273,6 @@ export const subscribeIon = zod( }), ); - if (functionID) - await db - .insert(issueSubscriber) - .ignore() - .values({ - stageID: config.stageID, - workspaceID: useWorkspace(), - functionID: functionID, - id: createId(), - logGroup, - }) - .execute(); - await Warning.remove({ target: logGroup, type: "log_subscription", @@ -725,12 +365,6 @@ export const subscribeIon = zod( } } } - - for (const fn of functions) { - const logGroup = fn.outputs?.loggingConfig?.logGroup; - if (!logGroup) continue; - await subscribe(logGroup, fn.id); - } } finally { cw.destroy(); } @@ -767,7 +401,8 @@ export const disableLogGroup = zod( .send( new DeleteSubscriptionFilterCommand({ filterName: - uniqueIdentifier + (Config.STAGE === "production" ? "" : `#dev`), + uniqueIdentifier + + (SSTResource.App.stage === "production" ? "" : `#dev`), logGroupName: input.logGroup, }), ) @@ -796,34 +431,6 @@ export const disableLogGroup = zod( }, ); -/** @deprecated */ -export const unsubscribe = zod(z.custom(), async (config) => { - const disconnected = await disconnectStage(config); - if (!disconnected) return; - - await db - .delete(issueSubscriber) - .where( - and( - eq(issueSubscriber.workspaceID, useWorkspace()), - eq(issueSubscriber.stageID, config.stageID), - ), - ) - .execute(); - await Warning.create({ - target: "none", - type: "issue_rate_limited", - stageID: config.stageID, - data: {}, - }); - await Send.triggerRateLimit({ - app: config.app, - stage: config.stage, - stageID: config.stageID, - }); - return; -}); - export async function cleanup() { { const result = await db diff --git a/packages/core/src/issue/issue.sql.ts b/packages/core/src/issue/issue.sql.ts index 9c23cec7..19c3928f 100644 --- a/packages/core/src/issue/issue.sql.ts +++ b/packages/core/src/issue/issue.sql.ts @@ -49,7 +49,7 @@ export const issue = mysqlTable( primary: primaryKey({ columns: [table.workspaceID, table.id] }), group: unique("group").on(table.workspaceID, table.stageID, table.group), updated: index("updated").on(table.workspaceID, table.timeUpdated), - }) + }), ); export const issueSubscriber = mysqlTable( @@ -69,9 +69,9 @@ export const issueSubscriber = mysqlTable( table.workspaceID, table.stageID, table.functionID, - table.logGroup + table.logGroup, ), - }) + }), ); export const issueCount = mysqlTable( @@ -93,9 +93,9 @@ export const issueCount = mysqlTable( table.workspaceID, table.stageID, table.group, - table.hour + table.hour, ), - }) + }), ); export const issueAlertLimit = mysqlTable( @@ -106,5 +106,5 @@ export const issueAlertLimit = mysqlTable( }, (table) => ({ primary: primaryKey({ columns: [table.workspaceID, table.id] }), - }) + }), ); diff --git a/packages/core/src/issue/send.ts b/packages/core/src/issue/send.ts index 8778e26e..9a1e18ad 100644 --- a/packages/core/src/issue/send.ts +++ b/packages/core/src/issue/send.ts @@ -1,4 +1,14 @@ -import { eq, and, isNull, gt, sql, getTableColumns, or, lt } from "drizzle-orm"; +import { + eq, + and, + isNull, + gt, + sql, + getTableColumns, + or, + lt, + isNotNull, +} from "drizzle-orm"; import { useWorkspace } from "../actor"; import { app, stage } from "../app/app.sql"; import { db } from "../drizzle"; @@ -13,6 +23,8 @@ import { render } from "@jsx-email/render"; import type { KnownBlock } from "@slack/web-api"; import { Workspace } from "../workspace"; import { Alert } from "../alert"; +import { useTransaction } from "../util/transaction"; +import { Resource } from "sst"; export const Limit = createSelectSchema(issueAlertLimit); @@ -23,48 +35,67 @@ export const triggerIssue = zod( }), async (input) => { console.log("triggering issue", input); - const result = await db - .select({ - ...getTableColumns(issue), - slug: workspace.slug, - appName: app.name, - stageName: stage.name, - workspaceSlug: workspace.slug, - }) - .from(issue) - .innerJoin(workspace, eq(workspace.id, issue.workspaceID)) - .innerJoin( - stage, - and(eq(stage.id, issue.stageID), eq(stage.workspaceID, useWorkspace())) - ) - .innerJoin( - app, - and(eq(app.id, stage.appID), eq(app.workspaceID, useWorkspace())) - ) - .leftJoin( - issueAlertLimit, - and( - eq(issueAlertLimit.workspaceID, useWorkspace()), - eq(issueAlertLimit.id, issue.id) + const result = await useTransaction(async (tx) => { + await tx + .select() + .from(issue) + .for("update") + .where( + and( + eq(issue.workspaceID, useWorkspace()), + eq(issue.stageID, input.stageID), + eq(issue.group, input.group), + ), + ); + return await tx + .select({ + ...getTableColumns(issue), + slug: workspace.slug, + appName: app.name, + stageName: stage.name, + workspaceSlug: workspace.slug, + }) + .from(issue) + .innerJoin(workspace, eq(workspace.id, issue.workspaceID)) + .innerJoin( + stage, + and( + eq(stage.id, issue.stageID), + eq(stage.workspaceID, useWorkspace()), + ), ) - ) - .where( - and( - eq(issue.workspaceID, useWorkspace()), - eq(issue.stageID, input.stageID), - eq(issue.group, input.group), - or( - // alert first time - isNull(issueAlertLimit.timeUpdated), - // do not alert more than once every 30min - lt(issueAlertLimit.timeUpdated, sql`NOW() - INTERVAL 30 MINUTE`), - // if issue resolved after last alert, send alert - gt(issue.timeResolved, issueAlertLimit.timeUpdated) + .innerJoin( + app, + and(eq(app.id, stage.appID), eq(app.workspaceID, useWorkspace())), + ) + .leftJoin( + issueAlertLimit, + and( + eq(issueAlertLimit.workspaceID, useWorkspace()), + eq(issueAlertLimit.id, issue.id), ), - isNull(issue.timeIgnored) ) - ) - .then((rows) => rows[0]); + .where( + and( + eq(issue.workspaceID, useWorkspace()), + eq(issue.stageID, input.stageID), + eq(issue.group, input.group), + or( + // alert first time + isNull(issueAlertLimit.timeUpdated), + // do not alert more than once every 30min + lt(issueAlertLimit.timeUpdated, sql`NOW() - INTERVAL 30 MINUTE`), + // if issue resolved after last alert, send alert + and( + isNotNull(issue.timeResolved), + gt(issue.timeResolved, issueAlertLimit.timeUpdated), + ), + ), + isNull(issue.timeIgnored), + ), + ) + .then((rows) => rows[0]); + }); if (!result) { console.log("not alertable"); @@ -147,11 +178,11 @@ export const triggerIssue = zod( assetsUrl: `https://console.sst.dev/email`, consoleUrl: "https://console.sst.dev", workspace: result.workspaceSlug, - }) + }), ), plain: result.message, - replyToAddress: `alert+issues+${result.id}@${process.env.EMAIL_DOMAIN}`, - fromAddress: `${result.appName}/${result.stageName} via SST `, + replyToAddress: `alert+issues+${result.id}@${Resource.Email.sender}`, + fromAddress: `${result.appName}/${result.stageName} via SST `, }); } } @@ -168,7 +199,7 @@ export const triggerIssue = zod( timeUpdated: sql`NOW()`, }, }); - } + }, ); export const triggerRateLimit = zod( @@ -234,7 +265,7 @@ export const triggerRateLimit = zod( assetsUrl: `https://console.sst.dev/email`, consoleUrl: "https://console.sst.dev", workspace: workspace!.slug, - }) + }), ), plain: message, replyToAddress: `alert+issues@${process.env.EMAIL_DOMAIN}`, @@ -242,5 +273,5 @@ export const triggerRateLimit = zod( }); } } - } + }, ); diff --git a/packages/core/src/log/index.ts b/packages/core/src/log/index.ts index b78aff9e..cf17c210 100644 --- a/packages/core/src/log/index.ts +++ b/packages/core/src/log/index.ts @@ -55,6 +55,8 @@ export interface Log { message: string; } +export type LogEntry = Invocation | Log; + export type LogEvent = LogEventBase & ( | { diff --git a/packages/core/src/log/search.ts b/packages/core/src/log/search.ts index 4dda3aef..aada50bb 100644 --- a/packages/core/src/log/search.ts +++ b/packages/core/src/log/search.ts @@ -1,13 +1,14 @@ import { z } from "zod"; -import { event } from "../event"; +import { createEvent } from "../event"; import { createTransactionEffect, useTransaction } from "../util/transaction"; import { zod } from "../util/zod"; import { log_search } from "./log.sql"; import { assertActor, useWorkspace } from "../actor"; import { createSelectSchema } from "drizzle-zod"; import { and, eq, lt, sql } from "drizzle-orm"; -import { sqliteView } from "drizzle-orm/sqlite-core"; import { db } from "../drizzle"; +import { bus } from "sst/aws/bus"; +import { Resource } from "sst"; export * as Search from "./search"; export const Info = createSelectSchema(log_search, { @@ -16,11 +17,11 @@ export const Info = createSelectSchema(log_search, { export type Info = z.infer; export const Events = { - Created: event( + Created: createEvent( "log.search.created", z.object({ id: z.string().cuid2(), - }) + }), ), }; @@ -30,11 +31,11 @@ export const fromID = zod(Info.shape.id, (id) => .select() .from(log_search) .where( - and(eq(log_search.id, id), eq(log_search.workspaceID, useWorkspace())) + and(eq(log_search.id, id), eq(log_search.workspaceID, useWorkspace())), ) .execute() - .then((rows) => rows.at(0)) - ) + .then((rows) => rows.at(0)), + ), ); export const search = zod( @@ -71,11 +72,11 @@ export const search = zod( }) .execute(); await createTransactionEffect(() => - Events.Created.publish({ + bus.publish(Resource.Bus, Events.Created, { id: input.id, - }) + }), ); - }) + }), ); export const setStart = zod( @@ -94,11 +95,11 @@ export const setStart = zod( .where( and( eq(log_search.id, input.id), - eq(log_search.workspaceID, useWorkspace()) - ) + eq(log_search.workspaceID, useWorkspace()), + ), ) - .execute() - ) + .execute(), + ), ); export const complete = zod( @@ -116,11 +117,11 @@ export const complete = zod( .where( and( eq(log_search.id, input.id), - eq(log_search.workspaceID, useWorkspace()) - ) + eq(log_search.workspaceID, useWorkspace()), + ), ) - .execute() - ) + .execute(), + ), ); export async function cleanup() { diff --git a/packages/core/src/realtime/index.ts b/packages/core/src/realtime/index.ts index 56062a0d..6b452d65 100644 --- a/packages/core/src/realtime/index.ts +++ b/packages/core/src/realtime/index.ts @@ -3,15 +3,15 @@ import { IoTDataPlaneClient, PublishCommand, } from "@aws-sdk/client-iot-data-plane"; -import { Config } from "sst/node/config"; import { useWorkspace } from "../actor"; +import { Resource } from "sst"; const data = new IoTDataPlaneClient({}); export async function publish( topic: string, properties: any, - profileID?: string + profileID?: string, ) { const workspaceID = useWorkspace(); await data.send( @@ -20,11 +20,11 @@ export async function publish( JSON.stringify({ properties, workspaceID, - }) + }), ), - topic: `console/${Config.STAGE}/${workspaceID}/${ + topic: `console/${Resource.App.stage}/${workspaceID}/${ profileID ? profileID : "all" }/${topic}`, - }) + }), ); } diff --git a/packages/core/src/replicache/index.ts b/packages/core/src/replicache/index.ts index 31215eb6..7b85179a 100644 --- a/packages/core/src/replicache/index.ts +++ b/packages/core/src/replicache/index.ts @@ -6,12 +6,13 @@ import { S3Client, } from "@aws-sdk/client-s3"; import { Realtime } from "../realtime"; -import { Bucket } from "sst/node/bucket"; import { compress, decompress } from "../util/compress"; +import { Resource } from "sst"; +import { Websocket } from "../websocket"; export async function poke(profileID?: string) { console.log("sending poke"); - await Realtime.publish("poke", {}); + await Websocket.publish("poke", {}); console.log("poke sent"); } @@ -35,7 +36,7 @@ export module CVR { const result = await s3 .send( new GetObjectCommand({ - Bucket: Bucket.storage.bucketName, + Bucket: Resource.Storage.name, Key: await path, }), ) @@ -52,7 +53,7 @@ export module CVR { const path = await key(clientGroupID, version); await s3.send( new PutObjectCommand({ - Bucket: Bucket.storage.bucketName, + Bucket: Resource.Storage.name, Key: path, ContentEncoding: "gzip", ContentType: "application/json", diff --git a/packages/core/src/run/codebuild-runner.ts b/packages/core/src/run/codebuild-runner.ts index 06cb4496..43771fd3 100644 --- a/packages/core/src/run/codebuild-runner.ts +++ b/packages/core/src/run/codebuild-runner.ts @@ -34,10 +34,10 @@ export module CodebuildRunner { export const createResource = zod( z.object({ credentials: z.custom(), - awsAccountExternalID: z.string().nonempty(), - region: z.string().nonempty(), - suffix: z.string().nonempty(), - image: z.string().nonempty(), + awsAccountExternalID: z.string().min(1), + region: z.string().min(1), + suffix: z.string().min(1), + image: z.string().min(1), architecture: z.enum(Architecture), compute: z.enum(Compute), }), diff --git a/packages/core/src/run/config.ts b/packages/core/src/run/config.ts index f46c41fd..ff0d96f5 100644 --- a/packages/core/src/run/config.ts +++ b/packages/core/src/run/config.ts @@ -1,22 +1,23 @@ import { z } from "zod"; -import { minimatch } from "minimatch"; import { zod } from "../util/zod"; import { createTransactionEffect, useTransaction } from "../util/transaction"; import { Env, runConfigTable } from "./run.sql"; import { useWorkspace } from "../actor"; import { createId } from "@paralleldrive/cuid2"; import { and, eq } from "drizzle-orm"; -import { event } from "../event"; +import { createEvent } from "../event"; +import { bus } from "sst/aws/bus"; +import { Resource } from "sst"; export module RunConfig { export const Events = { - Updated: event( + Updated: createEvent( "app.config.updated", z.object({ appID: z.string().cuid2(), stagePattern: z.string().min(1), awsAccountExternalID: z.string(), - }) + }), ), }; @@ -42,12 +43,12 @@ export module RunConfig { .where( and( eq(runConfigTable.workspaceID, useWorkspace()), - eq(runConfigTable.appID, appID) - ) + eq(runConfigTable.appID, appID), + ), ) .execute() - .then((rows) => rows) - ) + .then((rows) => rows), + ), ); export const put = zod( @@ -87,8 +88,8 @@ export module RunConfig { and( eq(runConfigTable.workspaceID, useWorkspace()), eq(runConfigTable.appID, input.appID), - eq(runConfigTable.stagePattern, input.stagePattern) - ) + eq(runConfigTable.stagePattern, input.stagePattern), + ), ) .then((rows) => rows[0]); if (!match) return; @@ -104,13 +105,13 @@ export module RunConfig { .execute(); }); await createTransactionEffect(() => - Events.Updated.publish({ + bus.publish(Resource.Bus, Events.Updated, { appID: input.appID, stagePattern: input.stagePattern, awsAccountExternalID: input.awsAccountExternalID, - }) + }), ); - } + }, ); export const remove = zod(z.string().cuid2(), (input) => @@ -120,10 +121,10 @@ export module RunConfig { .where( and( eq(runConfigTable.id, input), - eq(runConfigTable.workspaceID, useWorkspace()) - ) + eq(runConfigTable.workspaceID, useWorkspace()), + ), ) .execute(); - }) + }), ); } diff --git a/packages/core/src/run/index.ts b/packages/core/src/run/index.ts index 53e09d5f..71569c25 100644 --- a/packages/core/src/run/index.ts +++ b/packages/core/src/run/index.ts @@ -1,3 +1,4 @@ +import { Resource as SSTResource } from "sst"; import { createHash } from "crypto"; import { z } from "zod"; import { @@ -8,11 +9,13 @@ import { InvokeCommand, LambdaClient } from "@aws-sdk/client-lambda"; import { GetRoleCommand, IAMClient } from "@aws-sdk/client-iam"; import { EventBridgeClient, + DescribeRuleCommand, PutRuleCommand, PutTargetsCommand, + DeleteRuleCommand, + RemoveTargetsCommand, + ResourceNotFoundException, } from "@aws-sdk/client-eventbridge"; -import { Config } from "sst/node/config"; -import { Bucket } from "sst/node/bucket"; import { zod } from "../util/zod"; import { createTransaction, @@ -21,19 +24,10 @@ import { } from "../util/transaction"; import { useWorkspace, withActor } from "../actor"; import { createId } from "@paralleldrive/cuid2"; -import { - and, - eq, - getTableColumns, - gt, - inArray, - isNotNull, - isNull, -} from "../drizzle"; -import { event } from "../event"; +import { and, eq, getTableColumns, gt, inArray, isNull } from "../drizzle"; +import { createEvent } from "../event"; import { Log, - Resource, Trigger, runnerTable, runTable, @@ -47,20 +41,19 @@ import { import { App, Stage } from "../app"; import { RunConfig } from "./config"; import { RETRY_STRATEGY } from "../util/aws"; -import { Function } from "sst/node/function"; import { AWS, Credentials } from "../aws"; import { AppRepo } from "../app/repo"; import { Github } from "../git/github"; -import { LambdaRunner } from "./lambda-runner"; import { CodebuildRunner } from "./codebuild-runner"; import { Replicache } from "../replicache"; import { minimatch } from "minimatch"; -import { app, stage as stageTable } from "../app/app.sql"; +import { app, stage, stage as stageTable } from "../app/app.sql"; import { workspace } from "../workspace/workspace.sql"; import { Alert } from "../alert"; import { render } from "@jsx-email/render"; import { AutodeployEmail } from "@console/mail/emails/templates/AutodeployEmail"; import path from "path"; +import { bus } from "sst/aws/bus"; export { RunConfig } from "./config"; @@ -69,8 +62,6 @@ export module Run { const DEFAULT_ARCHITECTURE = "x86_64"; const DEFAULT_COMPUTE = "small"; const RUNNER_INACTIVE_TIME = 604800000; // 1 week - const RUNNER_WARMING_INTERVAL = 300000; // 5 minutes - const RUNNER_WARMING_INACTIVE_TIME = 86400000; // 1 day const ERROR_STATUS_MAP = (error: RunError | null) => { if (!error) return "succeeded"; switch (error.type) { @@ -138,50 +129,28 @@ export module Run { removeIfNotUsedAfter: number; }; - export type RunnerWarmerEvent = { + export type RunnerEvent = { + engine: string; + runID: string; workspaceID: string; - runnerID: string; + stage: string; + env: Record; + repo: { + cloneUrl: string; + path?: string; + }; + buildspec: { + version: string; + bucket: string; + }; + credentials: { + accessKeyId: string; + secretAccessKey: string; + sessionToken: string; + }; + trigger: Trigger; }; - export type RunnerEvent = - | { - warm: true; - repo: { - cloneUrl: string; - }; - buildspec: { - version: string; - bucket: string; - }; - credentials: { - accessKeyId: string; - secretAccessKey: string; - sessionToken: string; - }; - } - | { - warm: false; - engine: string; - runID: string; - workspaceID: string; - stage: string; - env: Record; - repo: { - cloneUrl: string; - path?: string; - }; - buildspec: { - version: string; - bucket: string; - }; - credentials: { - accessKeyId: string; - secretAccessKey: string; - sessionToken: string; - }; - trigger: Trigger; - }; - export type ConfigParserEvent = { content: string; trigger: Trigger; @@ -207,26 +176,26 @@ export module Run { export type SstConfigParseError = z.infer; export const Event = { - Created: event( + Created: createEvent( "run.created", z.object({ stageID: z.string().min(1), }) ), - CreateFailed: event( + CreateFailed: createEvent( "run.create-failed", z.object({ runID: z.string().min(1), }) ), - Completed: event( + Completed: createEvent( "run.completed", z.object({ runID: z.string().min(1), stageID: z.string().min(1), }) ), - RunnerStarted: event( + RunnerStarted: createEvent( "runner.started", z.object({ workspaceID: z.string().min(1), @@ -238,7 +207,7 @@ export module Run { timestamp: z.number().int(), }) ), - RunnerCompleted: event( + RunnerCompleted: createEvent( "runner.completed", z.object({ workspaceID: z.string().min(1), @@ -328,7 +297,7 @@ export module Run { const lambda = new LambdaClient({ retryStrategy: RETRY_STRATEGY }); const ret = await lambda.send( new InvokeCommand({ - FunctionName: Function.ConfigParser.functionName, + FunctionName: SSTResource.AutodeployConfig.configParserFunctionArn, InvocationType: "RequestResponse", Payload: JSON.stringify({ content: input.content, @@ -406,7 +375,7 @@ export module Run { }) .execute(); await createTransactionEffect(() => - Event.CreateFailed.publish({ runID }) + bus.publish(SSTResource.Bus, Event.CreateFailed, { runID }) ); }); @@ -494,12 +463,17 @@ export module Run { if (!stageID) { console.log("creating stage", { appID, stageID }); - stageID = await App.Stage.connect({ - name: stageName, - appID, - region, - awsAccountID: awsAccount.id, - }); + stageID = createId(); + await useTransaction((tx) => + tx.insert(stage).ignore().values({ + id: stageID, + name: stageName, + region, + awsAccountID: awsAccount.id, + workspaceID: useWorkspace(), + appID, + }) + ); } // Create Run @@ -517,7 +491,7 @@ export module Run { .execute(); await createTransactionEffect(() => - Event.Created.publish({ stageID }) + bus.publish(SSTResource.Bus, Event.Created, { stageID }) ); }); } @@ -649,20 +623,18 @@ export module Run { const cloneUrl = await Github.getCloneUrl(gitRepo); // Run runner - const Runner = useRunner(runner.engine); const timeoutInMinutes = timeoutToMinutes(run.config.target?.runner?.timeout) ?? - Runner.DEFAULT_BUILD_TIMEOUT_IN_MINUTES; - await Runner.invoke({ + CodebuildRunner.DEFAULT_BUILD_TIMEOUT_IN_MINUTES; + await CodebuildRunner.invoke({ credentials: awsConfig.credentials, region: runner.region, resource: runner.resource, payload: { - warm: false, engine: runner.engine, buildspec: { - version: Config.BUILDSPEC_VERSION, - bucket: Bucket.Buildspec.bucketName, + version: SSTResource.AutodeployConfig.buildspecVersion, + bucket: SSTResource.AutodeployConfig.buildspecBucketName, }, runID: run.id, workspaceID: useWorkspace(), @@ -717,15 +689,14 @@ export module Run { } // Schedule timeout monitor - const Runner = useRunner(runner.engine); const timeoutInMinutes = timeoutToMinutes(run.config.target?.runner?.timeout) ?? - Runner.DEFAULT_BUILD_TIMEOUT_IN_MINUTES; + CodebuildRunner.DEFAULT_BUILD_TIMEOUT_IN_MINUTES; const scheduler = new SchedulerClient({ retryStrategy: RETRY_STRATEGY }); await scheduler.send( new CreateScheduleCommand({ Name: `run-timeout-${run.id}`, - GroupName: process.env.RUN_TIMEOUT_MONITOR_SCHEDULE_GROUP_NAME!, + GroupName: SSTResource.AutodeployConfig.timeoutMonitorScheduleGroupName, FlexibleTimeWindow: { Mode: "OFF", }, @@ -735,8 +706,8 @@ export module Run { .split(".")[0] })`, Target: { - Arn: process.env.RUN_TIMEOUT_MONITOR_FUNCTION_ARN, - RoleArn: process.env.RUN_TIMEOUT_MONITOR_SCHEDULE_ROLE_ARN, + Arn: SSTResource.AutodeployConfig.timeoutMonitorFunctionArn, + RoleArn: SSTResource.AutodeployConfig.timeoutMonitorScheduleRoleArn, Input: JSON.stringify({ workspaceID: useWorkspace(), runID: run.id, @@ -745,9 +716,6 @@ export module Run { ActionAfterCompletion: "DELETE", }) ); - - // Schedule warmer if not scheduled - if (!runner.warmer) await scheduleRunnerWarmer(runner.id); }); export const complete = zod( @@ -771,6 +739,7 @@ export module Run { .then((x) => x[0]) ); if (!run) return; + if (run.timeCompleted) return; await createTransaction(async (tx) => { await tx @@ -796,7 +765,10 @@ export module Run { .execute(); await createTransactionEffect(() => - Event.Completed.publish({ runID, stageID: run.stageID! }) + bus.publish(SSTResource.Bus, Event.Completed, { + runID, + stageID: run.stageID!, + }) ); }); } @@ -817,20 +789,11 @@ export module Run { .update(runTable) .set({ timeStarted: new Date(), - log: - input.engine === "lambda" - ? { - engine: "lambda", - requestID: input.awsRequestId!, - logGroup: input.logGroup, - logStream: input.logStream, - timestamp: input.timestamp, - } - : { - engine: "codebuild", - logGroup: input.logGroup, - logStream: input.logStream, - }, + log: { + engine: "codebuild", + logGroup: input.logGroup, + logStream: input.logStream, + }, }) .where( and( @@ -859,77 +822,6 @@ export module Run { ); }); - export const getRunnerActiveUsage = zod( - z.string().cuid2(), - async (runnerID) => { - return await useTransaction((tx) => - tx - .select() - .from(runnerUsageTable) - .where( - and( - eq(runnerUsageTable.workspaceID, useWorkspace()), - eq(runnerUsageTable.id, runnerID), - gt( - runnerUsageTable.timeRun, - new Date(Date.now() - RUNNER_WARMING_INACTIVE_TIME) - ) - ) - ) - .execute() - ); - } - ); - - const useRunner = zod( - z.enum(Engine), - (engine) => - ({ - lambda: LambdaRunner, - codebuild: CodebuildRunner, - }[engine]) - ); - - export const setRunnerWarmer = zod( - z.object({ - runnerID: z.string().cuid2(), - warmer: z.string().min(1), - }), - async (input) => { - return await useTransaction((tx) => - tx - .update(runnerTable) - .set({ - warmer: input.warmer, - }) - .where( - and( - eq(runnerTable.workspaceID, useWorkspace()), - eq(runnerTable.id, input.runnerID) - ) - ) - .execute() - ); - } - ); - - export const unsetRunnerWarmer = zod(z.string().cuid2(), async (runnerID) => { - return await useTransaction((tx) => - tx - .update(runnerTable) - .set({ - warmer: null, - }) - .where( - and( - eq(runnerTable.workspaceID, useWorkspace()), - eq(runnerTable.id, runnerID) - ) - ) - .execute() - ); - }); - export const lookupRunner = zod( z.object({ region: z.string().min(1), @@ -939,12 +831,12 @@ export module Run { }), async (input) => { const engine = input.runnerConfig?.engine ?? DEFAULT_ENGINE; - const Runner = useRunner(engine); const architecture = input.runnerConfig?.architecture ?? DEFAULT_ARCHITECTURE; - const image = input.runnerConfig?.image ?? Runner.getImage(architecture); + const image = + input.runnerConfig?.image ?? CodebuildRunner.getImage(architecture); const compute = input.runnerConfig?.compute ?? DEFAULT_COMPUTE; - const type = `${engine}-${architecture}-${image}-${compute}`; + const type = `${engine}-${architecture}-${image}-${compute}.20241106`; return await useTransaction((tx) => tx .select() @@ -979,17 +871,19 @@ export module Run { const region = input.region; const credentials = input.credentials; const engine = input.runnerConfig?.engine ?? DEFAULT_ENGINE; - const Runner = useRunner(engine); const architecture = input.runnerConfig?.architecture ?? DEFAULT_ARCHITECTURE; - const image = input.runnerConfig?.image ?? Runner.getImage(architecture); + const image = + input.runnerConfig?.image ?? CodebuildRunner.getImage(architecture); const compute = input.runnerConfig?.compute ?? DEFAULT_COMPUTE; - const type = `${engine}-${architecture}-${image}-${compute}`; + const type = `${engine}-${architecture}-${image}-${compute}.20241106`; const runnerSuffix = architecture + "-" + createHash("sha256").update(type).digest("hex").substring(0, 8) + - (Config.STAGE !== "production" ? "-" + Config.STAGE : ""); + (SSTResource.App.stage !== "production" + ? "-" + SSTResource.App.stage + : ""); const runnerID = createId(); let resource; @@ -1011,7 +905,7 @@ export module Run { ); // Create resources - resource = await Runner.createResource({ + resource = await CodebuildRunner.createResource({ credentials, awsAccountExternalID, region, @@ -1021,50 +915,123 @@ export module Run { compute, }); - // Create event target in user account to forward external events + // Create bus target to forward two types of events to SST Console + // - "sst.runner" events: events fired from within the runner + // - "aws.codebuild" events: events fired by AWS CodeBuild + const suffix = + SSTResource.App.stage !== "production" + ? "-" + SSTResource.App.stage + : ""; + let roleArn: string | undefined; + const useRoleArn = async () => { + if (roleArn) return roleArn; + const iam = new IAMClient({ credentials }); + const roleRet = await iam.send( + new GetRoleCommand({ + RoleName: "SSTConsolePublisher" + suffix, + }) + ); + roleArn = roleRet.Role?.Arn!; + return roleArn; + }; const eb = new EventBridgeClient({ credentials, region, retryStrategy: RETRY_STRATEGY, }); - const suffix = Config.STAGE !== "production" ? "-" + Config.STAGE : ""; - const ruleName = "SSTConsoleExternal" + suffix; - try { + + // Create "sst.runner" forwarder + await (async () => { + const ruleName = "SSTConsoleExternal" + suffix; + const ruleSource = "sst.runner"; + const targetId = "SSTConsoleExternal"; + try { + const rule = await eb.send( + new DescribeRuleCommand({ Name: ruleName }) + ); + const eventPattern = JSON.parse(rule.EventPattern ?? "{}"); + if (eventPattern.source?.includes(ruleSource)) return; + await eb.send( + new RemoveTargetsCommand({ Rule: ruleName, Ids: [targetId] }) + ); + await eb.send(new DeleteRuleCommand({ Name: ruleName })); + } catch (e) { + if (!(e instanceof ResourceNotFoundException)) throw e; + } await eb.send( new PutRuleCommand({ Name: ruleName, State: "ENABLED", - EventPattern: JSON.stringify({ - source: ["sst.external"], - }), + EventPattern: JSON.stringify({ source: [ruleSource] }), }) ); - - const iam = new IAMClient({ credentials }); - const roleName = "SSTConsolePublisher" + suffix; - const roleRet = await iam.send( - new GetRoleCommand({ - RoleName: roleName, + await eb.send( + new PutTargetsCommand({ + Rule: ruleName, + Targets: [ + { + Arn: SSTResource.Bus.arn, + Id: targetId, + RoleArn: await useRoleArn(), + }, + ], + }) + ); + })(); + + // Create "aws.codebuild" forwarder + await (async () => { + const ruleName = "SSTConsoleCodebuild" + suffix; + const ruleSource = "aws.codebuild"; + const ruleDetailType = "CodeBuild Build State Change"; + const targetId = "SSTConsoleCodebuild"; + try { + const rule = await eb.send( + new DescribeRuleCommand({ Name: ruleName }) + ); + const eventPattern = JSON.parse(rule.EventPattern ?? "{}"); + if ( + eventPattern.source?.includes(ruleSource) && + eventPattern["detail-type"]?.includes(ruleDetailType) && + eventPattern.detail?.["build-status"]?.includes("FAILED") && + eventPattern.detail?.["build-status"]?.includes("FAULT") && + eventPattern.detail?.["build-status"]?.includes("STOPPED") && + eventPattern.detail?.["build-status"]?.includes("TIMED_OUT") + ) + return; + await eb.send( + new RemoveTargetsCommand({ Rule: ruleName, Ids: [targetId] }) + ); + await eb.send(new DeleteRuleCommand({ Name: ruleName })); + } catch (e) { + if (!(e instanceof ResourceNotFoundException)) throw e; + } + await eb.send( + new PutRuleCommand({ + Name: ruleName, + State: "ENABLED", + EventPattern: JSON.stringify({ + source: [ruleSource], + "detail-type": [ruleDetailType], + detail: { + "build-status": ["FAILED", "FAULT", "STOPPED", "TIMED_OUT"], + }, + }), }) ); - await eb.send( new PutTargetsCommand({ Rule: ruleName, Targets: [ { - Arn: process.env.EVENT_BUS_ARN, - Id: "SSTConsoleExternal", - RoleArn: roleRet.Role?.Arn!, + Arn: SSTResource.Bus.arn, + Id: targetId, + RoleArn: await useRoleArn(), }, ], }) ); - } catch (e: any) { - if (e.name !== "ResourceConflictException") { - throw e; - } - } + })(); // Store resource await useTransaction((tx) => @@ -1097,7 +1064,7 @@ export module Run { await scheduleRunnerRemover(runnerID); - return { id: runnerID, region, engine, resource, warmer: null }; + return { id: runnerID, region, engine, resource }; } ); @@ -1108,11 +1075,10 @@ export module Run { }), async (input) => { const { runner, credentials } = input; - const Runner = useRunner(runner.engine); // Remove resources if (runner.resource) { - await Runner.removeResource({ + await CodebuildRunner.removeResource({ credentials, region: runner.region, resource: runner.resource, @@ -1134,77 +1100,6 @@ export module Run { } ); - export const warmRunner = zod( - z.object({ - region: z.string().min(1), - engine: z.enum(Engine), - resource: Resource, - credentials: z.custom(), - cloneUrl: z.string().min(1), - instances: z.number().int(), - }), - async ({ region, engine, resource, credentials, cloneUrl, instances }) => { - const Runner = useRunner(engine); - await Promise.all( - Array(instances) - .fill(0) - .map((_) => - Runner.invoke({ - region, - resource, - credentials, - payload: { - warm: true, - buildspec: { - version: Config.BUILDSPEC_VERSION, - bucket: Bucket.Buildspec.bucketName, - }, - repo: { - cloneUrl, - }, - credentials, - }, - timeoutInMinutes: Runner.DEFAULT_BUILD_TIMEOUT_IN_MINUTES, - }) - ) - ); - } - ); - - export const scheduleRunnerWarmer = zod( - z.string().cuid2(), - async (runnerID) => { - const now = Date.now(); - const scheduler = new SchedulerClient({ - retryStrategy: RETRY_STRATEGY, - }); - const name = `runner-warmer-${runnerID}-${now}`; - await scheduler.send( - new CreateScheduleCommand({ - Name: name, - GroupName: process.env.RUNNER_WARMER_SCHEDULE_GROUP_NAME!, - FlexibleTimeWindow: { - Mode: "OFF", - }, - ScheduleExpression: `at(${ - new Date(now + RUNNER_WARMING_INTERVAL).toISOString().split(".")[0] - })`, - Target: { - Arn: process.env.RUNNER_WARMER_FUNCTION_ARN, - RoleArn: process.env.RUNNER_WARMER_SCHEDULE_ROLE_ARN, - Input: JSON.stringify({ - workspaceID: useWorkspace(), - runnerID, - } satisfies Run.RunnerWarmerEvent), - }, - ActionAfterCompletion: "DELETE", - }) - ); - - await setRunnerWarmer({ runnerID, warmer: name }); - } - ); - export const scheduleRunnerRemover = zod( z.string().cuid2(), async (runnerID) => { @@ -1218,7 +1113,9 @@ export module Run { return scheduler.send( new CreateScheduleCommand({ Name: `runner-remover-${runnerID}-${now}`, - GroupName: process.env.RUNNER_REMOVER_SCHEDULE_GROUP_NAME!, + GroupName: + SSTResource.AutodeployConfig.runnerRemoverScheduleGroupName ?? + process.env.RUNNER_REMOVER_SCHEDULE_GROUP_NAME!, FlexibleTimeWindow: { Mode: "OFF", }, @@ -1228,8 +1125,12 @@ export module Run { .split(".")[0] })`, Target: { - Arn: process.env.RUNNER_REMOVER_FUNCTION_ARN, - RoleArn: process.env.RUNNER_REMOVER_SCHEDULE_ROLE_ARN, + Arn: + SSTResource.AutodeployConfig.runnerRemoverFunctionArn ?? + process.env.RUNNER_REMOVER_FUNCTION_ARN, + RoleArn: + SSTResource.AutodeployConfig.runnerRemoverScheduleRoleArn ?? + process.env.RUNNER_REMOVER_SCHEDULE_ROLE_ARN, Input: JSON.stringify({ workspaceID: useWorkspace(), runnerID, @@ -1379,10 +1280,10 @@ export module Run { }) ), plain: message, - replyToAddress: `alert+autodeploy@${process.env.EMAIL_DOMAIN}`, + replyToAddress: `alert+autodeploy@${SSTResource.Email.sender}`, fromAddress: `${[appName, stageName] .filter((name) => name) - .join("/")} via SST `, + .join("/")} via SST `, }); } } diff --git a/packages/core/src/run/lambda-runner.ts b/packages/core/src/run/lambda-runner.ts deleted file mode 100644 index e1057595..00000000 --- a/packages/core/src/run/lambda-runner.ts +++ /dev/null @@ -1,285 +0,0 @@ -import { z } from "zod"; -import { - CreateFunctionCommand, - DeleteFunctionCommand, - GetFunctionCommand, - InvokeCommand, - LambdaClient, - PutFunctionEventInvokeConfigCommand, -} from "@aws-sdk/client-lambda"; -import { - AttachRolePolicyCommand, - PutRolePolicyCommand, - CreateRoleCommand, - GetRoleCommand, - IAMClient, - DeleteRoleCommand, - DetachRolePolicyCommand, - DeleteRolePolicyCommand, -} from "@aws-sdk/client-iam"; -import { Config } from "sst/node/config"; -import { zod } from "../util/zod"; -import { Resource, Architecture, Compute } from "./run.sql"; -import { RETRY_STRATEGY } from "../util/aws"; -import { Credentials } from "../aws"; -import { Run } from "."; - -export module LambdaRunner { - export const DEFAULT_BUILD_TIMEOUT_IN_MINUTES = 15; // 15 minutes - - export const getImage = zod(z.enum(Architecture), (architecture) => - architecture === "x86_64" - ? `${Config.IMAGE_URI}:x86_64-1` - : `${Config.IMAGE_URI}:arm64-1` - ); - - export const createResource = zod( - z.object({ - credentials: z.custom(), - awsAccountExternalID: z.string().nonempty(), - region: z.string().nonempty(), - suffix: z.string().nonempty(), - image: z.string().nonempty(), - architecture: z.enum(Architecture), - compute: z.enum(Compute), - }), - async ({ - credentials, - region, - suffix, - image, - architecture, - }): Promise => { - const roleArn = await createIamRoleInUserAccount(); - const functionArn = await createFunctionInUserAccount(); - return { - engine: "lambda", - properties: { - role: roleArn, - function: functionArn, - }, - }; - - async function createIamRoleInUserAccount() { - const iam = new IAMClient({ - credentials, - retryStrategy: RETRY_STRATEGY, - }); - const roleName = `sst-runner-${region}-${suffix}`; - try { - const ret = await iam.send( - new CreateRoleCommand({ - RoleName: roleName, - AssumeRolePolicyDocument: JSON.stringify({ - Version: "2012-10-17", - Statement: [ - { - Effect: "Allow", - Principal: { - Service: "lambda.amazonaws.com", - }, - Action: "sts:AssumeRole", - }, - ], - }), - }) - ); - await iam.send( - new PutRolePolicyCommand({ - RoleName: roleName, - PolicyName: "eventbridge", - PolicyDocument: JSON.stringify({ - Version: "2012-10-17", - Statement: [ - { - Effect: "Allow", - Action: "events:PutEvents", - Resource: "*", - }, - ], - }), - }) - ); - await iam.send( - new AttachRolePolicyCommand({ - RoleName: roleName, - PolicyArn: - "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole", - }) - ); - return ret.Role?.Arn!; - } catch (e: any) { - if (e.name !== "EntityAlreadyExistsException") { - throw e; - } - - return await iam - .send( - new GetRoleCommand({ - RoleName: roleName, - }) - ) - .then((ret) => ret.Role?.Arn!); - } - } - - async function createFunctionInUserAccount() { - const lambda = new LambdaClient({ - credentials, - region, - retryStrategy: RETRY_STRATEGY, - }); - const functionName = `sst-runner-${suffix}`; - try { - const ret = await lambda.send( - new CreateFunctionCommand({ - FunctionName: functionName, - Role: roleArn, - Code: { ImageUri: image }, - Timeout: 900, - MemorySize: 10240, - EphemeralStorage: { - Size: 10240, - }, - PackageType: "Image", - Architectures: [architecture], - }) - ); - - await lambda.send( - new PutFunctionEventInvokeConfigCommand({ - FunctionName: ret.FunctionArn!, - MaximumRetryAttempts: 0, - MaximumEventAgeInSeconds: 3600, - }) - ); - } catch (e: any) { - if (e.name === "InvalidParameterValueException") - return createFunctionInUserAccount(); - else if (e.name === "ResourceConflictException") { - /* ignore */ - } else throw e; - } - - // Wait or function state is ACTIVE - while (true) { - const ret = await lambda.send( - new GetFunctionCommand({ - FunctionName: functionName, - }) - ); - - if (ret.Configuration?.State !== "Pending") { - return ret.Configuration?.FunctionArn!; - } - await new Promise((r) => setTimeout(r, 5000)); - } - } - } - ); - - export const removeResource = zod( - z.object({ - credentials: z.custom(), - region: z.string().nonempty(), - resource: z.custom(), - }), - async ({ region, resource, credentials }) => { - await removeIamRoleInUserAccount(); - await removeFunctionInUserAccount(); - - async function removeIamRoleInUserAccount() { - const roleArn = resource.properties.role; - if (!roleArn) return; - const roleName = roleArn.split("/").pop()!; - - const iam = new IAMClient({ - credentials, - retryStrategy: RETRY_STRATEGY, - }); - try { - await iam.send( - new DeleteRolePolicyCommand({ - RoleName: roleName, - PolicyName: "eventbridge", - }) - ); - } catch (e: any) { - console.error(e); - } - - try { - await iam.send( - new DetachRolePolicyCommand({ - RoleName: roleName, - PolicyArn: - "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole", - }) - ); - } catch (e: any) { - if (e.name !== "NoSuchEntityException") { - console.error(e); - } - } - - try { - await iam.send( - new DeleteRoleCommand({ - RoleName: roleName, - }) - ); - } catch (e: any) { - console.error(e); - } - } - - async function removeFunctionInUserAccount() { - if (resource.engine !== "lambda") return; - - const functionName = resource.properties.function; - if (!functionName) return; - - const lambda = new LambdaClient({ - credentials, - region, - retryStrategy: RETRY_STRATEGY, - }); - try { - const ret = await lambda.send( - new DeleteFunctionCommand({ - FunctionName: functionName, - }) - ); - } catch (e: any) { - console.error(e); - } - } - } - ); - - export const invoke = zod( - z.object({ - credentials: z.custom(), - region: z.string().nonempty(), - resource: z.custom(), - payload: z.custom(), - timeoutInMinutes: z.number().int(), - }), - async ({ credentials, region, resource, payload }) => { - if (resource.engine !== "lambda") return; - - const lambda = new LambdaClient({ - credentials, - region, - retryStrategy: RETRY_STRATEGY, - }); - await lambda.send( - new InvokeCommand({ - FunctionName: resource.properties.function, - InvocationType: "Event", - Payload: JSON.stringify(payload), - }) - ); - } - ); -} diff --git a/packages/core/src/run/run.sql.ts b/packages/core/src/run/run.sql.ts index 9d45cd7c..084cd750 100644 --- a/packages/core/src/run/run.sql.ts +++ b/packages/core/src/run/run.sql.ts @@ -16,13 +16,6 @@ import { workspaceIndexes } from "../workspace/workspace.sql"; import { awsAccount } from "../aws/aws.sql"; export const Resource = z.discriminatedUnion("engine", [ - z.object({ - engine: z.literal("lambda"), - properties: z.object({ - role: z.string().min(1), - function: z.string().min(1), - }), - }), z.object({ engine: z.literal("codebuild"), properties: z.object({ @@ -32,7 +25,7 @@ export const Resource = z.discriminatedUnion("engine", [ }), ]); export type Resource = z.infer; -export const Engine = ["lambda", "codebuild"] as const; +export const Engine = ["codebuild"] as const; export const Architecture = ["x86_64", "arm64"] as const; export const Compute = ["small", "medium", "large", "xlarge"] as const; type RunErrors = { @@ -59,13 +52,6 @@ export type RunError = { }[keyof RunErrors]; export const Log = z.discriminatedUnion("engine", [ - z.object({ - engine: z.literal("lambda"), - requestID: z.string().min(1), - logGroup: z.string().min(1), - logStream: z.string().min(1), - timestamp: z.number().int(), - }), z.object({ engine: z.literal("codebuild"), logGroup: z.string().min(1), diff --git a/packages/core/src/state/index.ts b/packages/core/src/state/index.ts index 5962a873..2356b6a7 100644 --- a/packages/core/src/state/index.ts +++ b/packages/core/src/state/index.ts @@ -16,39 +16,65 @@ import { } from "../util/transaction"; import { createId } from "@paralleldrive/cuid2"; import { useWorkspace } from "../actor"; -import { and, count, eq, inArray, isNull, sql } from "drizzle-orm"; -import { event } from "../event"; -import { StageCredentials } from "../app/stage"; +import { and, count, eq, inArray, notInArray, sql } from "drizzle-orm"; +import { createEvent } from "../event"; +import { Stage, StageCredentials } from "../app/stage"; import { S3Client, GetObjectCommand, ListObjectsV2Command, } from "@aws-sdk/client-s3"; import { RETRY_STRATEGY } from "../util/aws"; -import { AWS } from "../aws"; +import { AWS, Credentials } from "../aws"; import { Replicache } from "../replicache"; -import { useTransition } from "react"; -import { stage } from "../app/app.sql"; +import { app, stage } from "../app/app.sql"; +import { bus } from "sst/aws/bus"; +import { Resource as SSTResource } from "sst"; +import { map, pipe, unique } from "remeda"; +import { Enrichers } from "../app/resource"; +import { queue } from "../util/queue"; +import { Issue } from "../issue"; export module State { export const Event = { - LockCreated: event( + UpdateCreated: createEvent( + "state.update.created", + z.object({ stageID: z.string(), updateID: z.string() }), + ), + SnapshotCreated: createEvent( + "state.snapshot.created", + z.object({ stageID: z.string(), updateID: z.string() }), + ), + StateUpdated: createEvent( + "state.updated", + z.object({ stageID: z.string() }), + ), + /** @deprecated */ + LockCreated: createEvent( "state.lock.created", z.object({ stageID: z.string(), versionID: z.string().optional() }), ), - LockRemoved: event( + /** @deprecated */ + LockRemoved: createEvent( "state.lock.removed", z.object({ stageID: z.string(), versionID: z.string().optional() }), ), - SummaryCreated: event( + /** @deprecated */ + SummaryCreated: createEvent( "state.summary.created", z.object({ stageID: z.string(), updateID: z.string() }), ), - HistoryCreated: event( + /** @deprecated */ + HistoryCreated: createEvent( "state.history.created", - z.object({ stageID: z.string(), key: z.string() }), + z.object({ + stageID: z.string(), + key: z.string(), + initial: z.boolean().optional(), + }), ), - HistorySynced: event( + /** @deprecated */ + HistorySynced: createEvent( "state.history.synced", z.object({ stageID: z.string(), updateID: z.string() }), ), @@ -291,7 +317,6 @@ export module State { ); const eventInserts = [] as (typeof stateEventTable.$inferInsert)[]; - const resourceInserts = [] as (typeof stateResourceTable.$inferInsert)[]; const resourceDeletes = [] as string[]; const counts = {} as Record; console.log({ @@ -312,35 +337,28 @@ export module State { return "same"; })(); counts[action] = (counts[action] || 0) + 1; - resourceInserts.push({ - stageID: input.config.stageID, - updateID: updateID, - updateCreatedID: action === "created" ? updateID : undefined, - updateModifiedID: action === "updated" ? updateID : undefined, - id: createId(), - timeStateModified: resource.modified - ? new Date(resource.modified) - : null, - timeStateCreated: resource.created - ? new Date(resource.created) - : null, - workspaceID: useWorkspace(), - type: resource.type, - urn: resource.urn, - custom: resource.custom, - inputs: resource.inputs, - outputs: resource.outputs, - parent: resource.parent, - }); if (action !== "same") { eventInserts.push({ - ...resourceInserts.at(-1)!, + stageID: input.config.stageID, updateID: updateID, + id: createId(), + timeStateModified: resource.modified + ? new Date(resource.modified) + : null, + timeStateCreated: resource.created + ? new Date(resource.created) + : null, + workspaceID: useWorkspace(), + type: resource.type, + urn: resource.urn, + custom: resource.custom, + inputs: resource.inputs, + outputs: resource.outputs, + parent: resource.parent, action: action, }); } } - for (const urn of Object.keys(previousResources)) { const resource = previousResources[urn]; counts["deleted"] = (counts["deleted"] || 0) + 1; @@ -378,23 +396,6 @@ export module State { ); if (eventInserts.length) await tx.insert(stateEventTable).ignore().values(eventInserts); - if (resourceInserts.length) - await tx - .insert(stateResourceTable) - .values(resourceInserts) - .onDuplicateKeyUpdate({ - set: { - updateModifiedID: sql`COALESCE(VALUES(update_modified_id), update_modified_id)`, - updateCreatedID: sql`COALESCE(VALUES(update_created_id), update_created_id)`, - timeStateCreated: sql`VALUES(time_state_created)`, - timeStateModified: sql`VALUES(time_state_modified)`, - type: sql`VALUES(type)`, - custom: sql`VALUES(custom)`, - inputs: sql`VALUES(inputs)`, - outputs: sql`VALUES(outputs)`, - parent: sql`VALUES(parent)`, - }, - }); if (resourceDeletes.length) await tx .delete(stateResourceTable) @@ -421,7 +422,7 @@ export module State { ), ); await createTransactionEffect(() => - Event.HistorySynced.publish({ + bus.publish(SSTResource.Bus, Event.HistorySynced, { stageID: input.config.stageID, updateID: updateID, }), @@ -464,7 +465,9 @@ export module State { runID?: string; command: string; created: string; + ignore: boolean; }; + if (lock.ignore) return; if (!lock.updateID) return; if (!lock.command) return; if (!lock.created) return; @@ -544,11 +547,7 @@ export module State { if (!obj) return; const summary = JSON.parse(await obj.Body!.transformToString()) as { version: string; - updateID: string; - resourceUpdated: number; - resourceCreated: number; - resourceDeleted: number; - resourceSame: number; + command?: UpdateCommand; timeStarted: string; timeCompleted: string; errors: { @@ -560,12 +559,9 @@ export module State { await tx .update(stateUpdateTable) .set({ - // resourceUpdated: summary.resourceUpdated, - // resourceCreated: summary.resourceCreated, - // resourceDeleted: summary.resourceDeleted, - // resourceSame: summary.resourceSame, errors: summary.errors, timeCompleted: new Date(summary.timeCompleted), + command: summary.command, }) .where( and( @@ -588,4 +584,657 @@ export module State { }); }, ); + + export const receiveUpdate = zod( + z.object({ + updateID: z.string(), + config: z.custom(), + }), + async (input) => { + console.log("receive update", input.updateID); + const s3 = new S3Client({ + ...input.config, + retryStrategy: RETRY_STRATEGY, + }); + const bootstrap = await AWS.Account.bootstrapIon(input.config); + if (!bootstrap) return; + const obj = await s3 + .send( + new GetObjectCommand({ + Bucket: bootstrap.bucket, + Key: + [ + "update", + input.config.app, + input.config.stage, + input.updateID, + ].join("/") + ".json", + }), + ) + .catch(() => {}); + if (!obj) return; + const update = JSON.parse(await obj.Body!.transformToString()) as { + version: string; + command: UpdateCommand; + timeStarted: string; + timeCompleted?: string; + errors: { + urn: string; + message: string; + }[]; + }; + console.log("update", update); + await createTransaction(async (tx) => { + const max = await tx + .select({ + count: count(), + }) + .from(stateUpdateTable) + .where( + and( + eq(stateUpdateTable.workspaceID, useWorkspace()), + eq(stateUpdateTable.stageID, input.config.stageID), + ), + ) + .then((result) => result[0]?.count || 0); + await tx + .insert(stateUpdateTable) + .values({ + id: input.updateID, + index: max + 1, + errors: update.errors, + stageID: input.config.stageID, + workspaceID: useWorkspace(), + timeStarted: new Date(update.timeStarted), + timeCompleted: update.timeCompleted + ? new Date(update.timeCompleted) + : null, + command: update.command, + }) + .onDuplicateKeyUpdate({ + set: { + errors: update.errors, + timeStarted: new Date(update.timeStarted), + timeCompleted: update.timeCompleted + ? new Date(update.timeCompleted) + : null, + command: update.command, + }, + }); + await tx + .update(stage) + .set({ + timeUpdated: sql`CURRENT_TIMESTAMP(6)`, + }) + .where( + and( + eq(stage.workspaceID, useWorkspace()), + eq(stage.id, input.config.stageID), + ), + ); + await createTransactionEffect(() => Replicache.poke()); + }); + }, + ); + + export const receiveSnapshot = zod( + z.object({ + updateID: z.string(), + config: z.custom(), + }), + async (input) => { + const existing = await useTransaction((tx) => + tx + .select() + .from(stateUpdateTable) + .where( + and( + eq(stateUpdateTable.workspaceID, useWorkspace()), + eq(stateUpdateTable.id, input.updateID), + ), + ) + .then((result) => result.at(0)), + ); + if (!existing) { + console.log("update not found", { updateID: input.updateID }); + return; + } + const s3 = new S3Client({ + ...input.config, + retryStrategy: RETRY_STRATEGY, + }); + const bootstrap = await AWS.Account.bootstrapIon(input.config); + if (!bootstrap) return; + const key = `snapshot/${input.config.app}/${input.config.stage}/${input.updateID}.json`; + console.log("processing", key); + const state = await s3 + .send( + new GetObjectCommand({ + Bucket: bootstrap.bucket, + Key: key, + }), + ) + .then( + async (result) => + JSON.parse(await result.Body!.transformToString()).checkpoint + .latest || {}, + ) + .catch(() => {}); + if (!state) return; + if (!state.resources) state.resources = []; + let continueToken: string | undefined; + let previousKey = await s3 + .send( + new ListObjectsV2Command({ + Bucket: bootstrap.bucket, + Prefix: `snapshot/${input.config.app}/${input.config.stage}/`, + StartAfter: key, + ContinuationToken: continueToken, + }), + ) + .then((result) => result.Contents?.[0]?.Key); + // migrate from old history + if (!previousKey) { + previousKey = await s3 + .send( + new ListObjectsV2Command({ + Bucket: bootstrap.bucket, + Prefix: `history/${input.config.app}/${input.config.stage}/`, + ContinuationToken: continueToken, + MaxKeys: 1, + }), + ) + .then((result) => result.Contents?.[0]?.Key); + } + let previousState = { + resources: [], + }; + if (previousKey) { + previousState = await s3 + .send( + new GetObjectCommand({ + Bucket: bootstrap.bucket, + Key: previousKey, + }), + ) + .then( + async (result) => + JSON.parse(await result.Body!.transformToString()).checkpoint + .latest, + ) + .catch(() => ({})); + console.log("found previous", previousKey); + } + if (!previousState) + previousState = { + resources: [], + }; + if (!previousState.resources) previousState.resources = []; + + const resources = Object.fromEntries( + state.resources.map((r: any) => [r.urn, r]), + ); + const previousResources = Object.fromEntries( + previousState.resources.map((r: any) => [r.urn, r]), + ); + + const eventInserts = [] as (typeof stateEventTable.$inferInsert)[]; + const resourceDeletes = [] as string[]; + const counts = {} as Record; + console.log({ + stage: input.config.stageID, + update: input.updateID, + }); + for (const [urn, resource] of Object.entries(resources)) { + const previous = previousResources[urn]; + delete previousResources[urn]; + resource.inputs = resource.inputs || {}; + resource.outputs = resource.outputs || {}; + delete resource.inputs["__provider"]; + delete resource.outputs["__provider"]; + const action = (() => { + if (!previous) return "created"; + if (previous.created !== resource.created) return "created"; + if (previous.modified !== resource.modified) return "updated"; + return "same"; + })(); + counts[action] = (counts[action] || 0) + 1; + if (action !== "same") { + eventInserts.push({ + stageID: input.config.stageID, + updateID: input.updateID, + id: createId(), + timeStateModified: resource.modified + ? new Date(resource.modified) + : null, + timeStateCreated: resource.created + ? new Date(resource.created) + : null, + workspaceID: useWorkspace(), + type: resource.type, + urn: resource.urn, + custom: resource.custom, + inputs: resource.inputs, + outputs: resource.outputs, + parent: resource.parent, + action: action, + }); + } + } + + for (const urn of Object.keys(previousResources)) { + const resource = previousResources[urn]; + counts["deleted"] = (counts["deleted"] || 0) + 1; + eventInserts.push({ + stageID: input.config.stageID, + updateID: input.updateID, + action: "deleted", + id: createId(), + workspaceID: useWorkspace(), + type: resource.type, + urn: resource.urn, + custom: resource.custom, + inputs: {}, + outputs: {}, + parent: resource.parent, + }); + resourceDeletes.push(resource.urn); + } + await createTransaction( + async (tx) => { + await createTransactionEffect(() => Replicache.poke()); + await tx + .update(stateUpdateTable) + .set({ + resourceSame: counts.same || 0, + resourceCreated: counts.created || 0, + resourceUpdated: counts.updated || 0, + resourceDeleted: counts.deleted || 0, + }) + .where( + and( + eq(stateUpdateTable.workspaceID, useWorkspace()), + eq(stateUpdateTable.id, input.updateID), + ), + ); + if (eventInserts.length) + await tx.insert(stateEventTable).ignore().values(eventInserts); + if (resourceDeletes.length) + await tx + .delete(stateResourceTable) + .where( + and( + eq(stateResourceTable.workspaceID, useWorkspace()), + eq(stateResourceTable.stageID, input.config.stageID), + inArray(stateResourceTable.urn, resourceDeletes), + ), + ); + await tx + .update(stage) + .set({ + timeUpdated: sql`CURRENT_TIMESTAMP(6)`, + timeDeleted: + existing.command === "remove" && state.resources.length === 0 + ? sql`CURRENT_TIMESTAMP(6)` + : null, + }) + .where( + and( + eq(stage.workspaceID, useWorkspace()), + eq(stage.id, input.config.stageID), + ), + ); + }, + { + isolationLevel: "read uncommitted", + }, + ); + }, + ); + + export const refreshState = zod( + z.object({ + config: z.custom(), + }), + async (input) => { + const s3 = new S3Client({ + ...input.config, + retryStrategy: RETRY_STRATEGY, + }); + const resourceInserts = [] as (typeof stateResourceTable.$inferInsert)[]; + const workspaceID = useWorkspace(); + + const v3bootstrap = await AWS.Account.bootstrapIon(input.config); + if (v3bootstrap) { + const key = `app/${input.config.app}/${input.config.stage}.json`; + console.log("looking for v3", key); + const state = await s3 + .send( + new GetObjectCommand({ + Bucket: v3bootstrap.bucket, + Key: key, + }), + ) + .then( + async (result) => + JSON.parse(await result.Body!.transformToString()).checkpoint + .latest || {}, + ) + .catch(() => {}); + for (const resource of state?.resources || []) { + resource.inputs = resource.inputs || {}; + resource.outputs = resource.outputs || {}; + delete resource.inputs["__provider"]; + delete resource.outputs["__provider"]; + resourceInserts.push({ + stageID: input.config.stageID, + updateID: "", + id: createId(), + timeStateModified: resource.modified + ? new Date(resource.modified) + : null, + timeStateCreated: resource.created + ? new Date(resource.created) + : null, + workspaceID, + type: resource.type, + urn: resource.urn, + custom: resource.custom, + inputs: resource.inputs, + outputs: resource.outputs, + parent: resource.parent, + }); + } + } + + const v2bootstrap = await AWS.Account.bootstrap(input.config); + if (v2bootstrap) { + console.log("looking for v2"); + const list = await s3 + .send( + new ListObjectsV2Command({ + Prefix: `stackMetadata/app.${input.config.app}/stage.${input.config.stage}/`, + Bucket: v2bootstrap.bucket, + }), + ) + .catch(() => {}); + if (list && list.Contents?.length) { + console.log("found", list.Contents?.length, "stacks"); + for (const obj of list.Contents || []) { + console.log("processing", obj.Key); + const stackID = obj.Key?.split("/").pop()!.split(".")[1]; + const result = await s3 + .send( + new GetObjectCommand({ + Key: obj.Key!, + Bucket: v2bootstrap.bucket, + }), + ) + .catch((err) => { + if (err.name === "AccessDenied") return; + if (err.name === "NoSuchBucket") return; + if (err.name === "NoSuchKey") return; + throw err; + }); + if (!result) continue; + const body = await result + .Body!.transformToString() + .then((x) => JSON.parse(x)); + const r = []; + body.push({ + type: "Stack", + id: stackID, + addr: stackID, + data: {}, + }); + for (let res of body) { + const enrichment = + res.type in Enrichers + ? await Enrichers[res.type as keyof typeof Enrichers]( + res, + input.config.credentials, + input.config.region, + ).catch(() => ({})) + : {}; + r.push({ + ...res, + stackID, + enrichment, + }); + const type = `sstv2:aws:${res.type}`; + const urn = `urn:pulumi:${input.config.stage}::${input.config.app}::${stackID}$${type}::${res.id}`; + resourceInserts.push({ + workspaceID, + type, + urn, + id: createId(), + custom: true, + inputs: { + addr: res.addr, + stackID: stackID, + }, + outputs: { + ...res.data, + enrichment, + }, + stageID: input.config.stageID, + updateID: "", + }); + } + } + } + } + + await createTransaction( + async (tx) => { + if (resourceInserts.length) + await tx + .insert(stateResourceTable) + .values(resourceInserts) + .onDuplicateKeyUpdate({ + set: { + updateModifiedID: sql`COALESCE(VALUES(update_modified_id), update_modified_id)`, + updateCreatedID: sql`COALESCE(VALUES(update_created_id), update_created_id)`, + timeStateCreated: sql`VALUES(time_state_created)`, + timeStateModified: sql`VALUES(time_state_modified)`, + type: sql`VALUES(type)`, + custom: sql`VALUES(custom)`, + inputs: sql`VALUES(inputs)`, + outputs: sql`VALUES(outputs)`, + parent: sql`VALUES(parent)`, + }, + }); + await tx.delete(stateResourceTable).where( + and( + eq(stateResourceTable.workspaceID, useWorkspace()), + eq(stateResourceTable.stageID, input.config.stageID), + resourceInserts.length + ? notInArray( + stateResourceTable.urn, + resourceInserts.map((i) => i.urn), + ) + : undefined, + ), + ); + if (!resourceInserts.length) { + await Stage.remove(input.config.stageID); + } + if (resourceInserts.length) { + await tx + .update(stage) + .set({ + timeDeleted: null, + }) + .where( + and( + eq(stage.id, input.config.stageID), + eq(stage.workspaceID, workspaceID), + ), + ); + } + await createTransactionEffect(() => Issue.subscribeIon(input.config)); + await createTransactionEffect(() => Replicache.poke()); + }, + { + isolationLevel: "read uncommitted", + }, + ); + }, + ); + + export const scan = zod( + z.object({ + credentials: z.custom(), + awsAccountID: z.string().cuid2(), + region: z.string(), + }), + async (input) => { + console.log("scanning", input.awsAccountID, input.region); + const stages = [] as { + app: string; + stage: string; + version: "v2" | "v3"; + }[]; + + const s3 = new S3Client({ + credentials: input.credentials, + retryStrategy: RETRY_STRATEGY, + region: input.region, + }); + const v2bootstrap = await AWS.Account.bootstrap(input); + if (v2bootstrap) { + console.log("scanning v2"); + let token: string | undefined; + while (true) { + const list = await s3 + .send( + new ListObjectsV2Command({ + Prefix: "stackMetadata", + Bucket: v2bootstrap.bucket, + ContinuationToken: token, + }), + ) + .catch(() => {}); + if (!list) break; + for (const obj of list.Contents || []) { + const [, appHint, stageHint] = obj.Key!.split("/"); + if (!appHint || !stageHint) continue; + const [, stageName] = stageHint.split("."); + const [, appName] = appHint.split("."); + if (!stageName || !appName) continue; + stages.push({ + app: appName, + stage: stageName, + version: "v2", + }); + } + if (!list.ContinuationToken) break; + token = list.ContinuationToken; + } + } + const v3bootstrap = await AWS.Account.bootstrapIon(input); + if (v3bootstrap) { + console.log("scanning v3"); + let token: string | undefined; + while (true) { + const list = await s3 + .send( + new ListObjectsV2Command({ + Prefix: "app/", + Bucket: v3bootstrap.bucket, + ContinuationToken: token, + }), + ) + .catch((err) => { + console.error(err); + }); + if (!list) break; + for (const obj of list.Contents || []) { + const splits = obj.Key!.split("/"); + const appName = splits.at(-2); + const stageName = splits.at(-1)?.split(".").at(0); + if (!appName || !stageName) continue; + stages.push({ + app: appName, + stage: stageName, + version: "v3", + }); + } + if (!list.ContinuationToken) break; + token = list.ContinuationToken; + } + } + const apps = pipe( + stages, + map((x) => x.app), + unique(), + ); + const workspaceID = useWorkspace(); + if (!apps.length) return; + const toResync = await useTransaction(async (tx) => { + await tx + .insert(app) + .values( + apps.map((app) => ({ + id: createId(), + name: app, + workspaceID, + })), + ) + .onDuplicateKeyUpdate({ + set: { + timeDeleted: null, + }, + }); + const allApps = await tx + .select({ id: app.id, name: app.name }) + .from(app) + .where(eq(app.workspaceID, workspaceID)) + .execute() + .then((rows) => new Map(rows.map((row) => [row.name, row.id]))); + await tx + .insert(stage) + .ignore() + .values( + stages.map((item) => ({ + id: createId(), + appID: allApps.get(item.app)!, + workspaceID, + name: item.stage, + region: input.region, + awsAccountID: input.awsAccountID, + })), + ); + const allStages = await tx + .select({ id: stage.id }) + .from(stage) + .where( + and( + eq(stage.workspaceID, workspaceID), + eq(stage.awsAccountID, input.awsAccountID), + eq(stage.region, input.region), + ), + ) + .execute(); + return allStages; + }); + await queue(5, toResync, async (item) => { + let retries = 0; + while (true) { + try { + const config = await Stage.assumeRole(item.id); + if (!config) return; + await State.refreshState({ + config, + }); + break; + } catch (ex) { + console.log("failed to refresh state for " + item.id, ex); + retries++; + if (retries > 3) break; + } + } + }); + }, + ); } diff --git a/packages/core/src/state/state.sql.ts b/packages/core/src/state/state.sql.ts index f31e2494..aa50d219 100644 --- a/packages/core/src/state/state.sql.ts +++ b/packages/core/src/state/state.sql.ts @@ -23,6 +23,8 @@ export const UpdateCommand = z.union([ z.literal("edit"), ]); +export type UpdateCommand = z.infer; + export const Command = ["deploy", "refresh", "remove", "edit"] as const; export const stateUpdateTable = mysqlTable( @@ -56,7 +58,7 @@ export const stateUpdateTable = mysqlTable( foreignColumns: [runTable.workspaceID, runTable.id], }).onDelete("cascade"), // index: unique("index").on(table.workspaceID, table.stageID, table.index), - }) + }), ); export const Action = ["created", "updated", "deleted"] as const; @@ -94,14 +96,14 @@ export const stateEventTable = mysqlTable( table.workspaceID, table.stageID, table.updateID, - table.urn + table.urn, ), updateID: foreignKey({ name: "state_event_update_id", columns: [table.workspaceID, table.updateID], foreignColumns: [stateUpdateTable.workspaceID, stateUpdateTable.id], }).onDelete("cascade"), - }) + }), ); export const stateResourceTable = mysqlTable( @@ -140,5 +142,5 @@ export const stateResourceTable = mysqlTable( columns: [table.workspaceID, table.updateModifiedID], foreignColumns: [stateUpdateTable.workspaceID, stateUpdateTable.id], }).onDelete("cascade"), - }) + }), ); diff --git a/packages/core/src/storage/index.ts b/packages/core/src/storage/index.ts index 86ff9e23..9cee1c96 100644 --- a/packages/core/src/storage/index.ts +++ b/packages/core/src/storage/index.ts @@ -8,8 +8,8 @@ import { } from "@aws-sdk/client-s3"; import { getSignedUrl } from "@aws-sdk/s3-request-presigner"; import { createId } from "@paralleldrive/cuid2"; -import { Bucket } from "sst/node/bucket"; import { compress } from "../util/compress"; +import { Resource } from "sst"; const s3 = new S3Client({}); export async function putEphemeral( @@ -17,26 +17,26 @@ export async function putEphemeral( options?: Omit< PutObjectCommandInput, "Body" | "Key" | "Bucket" | "ContentEncoding" - > + >, ) { const key = `ephemeral/${createId()}`; await s3.send( new PutObjectCommand({ Key: key, - Bucket: Bucket.storage.bucketName, + Bucket: Resource.Storage.name, ContentEncoding: "gzip", Body: await compress(body), ...options, - }) + }), ); const url = await getSignedUrl( s3, new GetObjectCommand({ - Bucket: Bucket.storage.bucketName, + Bucket: Resource.Storage.name, Key: key, - }) + }), ); return url; diff --git a/packages/core/src/stripe/index.ts b/packages/core/src/stripe/index.ts index 63dfd995..2bd54631 100644 --- a/packages/core/src/stripe/index.ts +++ b/packages/core/src/stripe/index.ts @@ -1,6 +1,7 @@ +import { Resource } from "sst"; import { Stripe } from "stripe"; -import { Config } from "sst/node/config"; -export const stripe = new Stripe(Config.STRIPE_SECRET_KEY, { +// TODO: add stripe secret +export const stripe = new Stripe("", { apiVersion: "2024-06-20", }); diff --git a/packages/core/src/user/index.ts b/packages/core/src/user/index.ts index e763dcc4..04d6d446 100644 --- a/packages/core/src/user/index.ts +++ b/packages/core/src/user/index.ts @@ -9,23 +9,25 @@ import { and, eq, sql } from "drizzle-orm"; import { createTransactionEffect, useTransaction } from "../util/transaction"; import { user } from "./user.sql"; import { useActor, useWorkspace } from "../actor"; -import { event } from "../event"; +import { createEvent } from "../event"; import { render } from "@jsx-email/render"; import { InviteEmail } from "@console/mail/emails/templates/InviteEmail"; import { SESv2Client, SendEmailCommand } from "@aws-sdk/client-sesv2"; import { workspace } from "../workspace/workspace.sql"; +import { bus } from "sst/aws/bus"; +import { Resource } from "sst"; const ses = new SESv2Client({}); export const Info = createSelectSchema(user, { id: (schema) => schema.id.cuid2(), - email: (schema) => schema.email.trim().toLowerCase().nonempty(), + email: (schema) => schema.email.trim().toLowerCase().min(1), workspaceID: (schema) => schema.workspaceID.cuid2(), }); export type Info = z.infer; export const Events = { - UserCreated: event( + UserCreated: createEvent( "user.created", z.object({ userID: z.string().cuid2(), @@ -80,7 +82,7 @@ export const create = zod( ) .then((rows) => rows[0]!.id); await createTransactionEffect(() => - Events.UserCreated.publish({ userID: id }), + bus.publish(Resource.Bus, Events.UserCreated, { userID: id }), ); return id; }), diff --git a/packages/core/src/util/transaction.ts b/packages/core/src/util/transaction.ts index 1ad277f1..8f6899c6 100644 --- a/packages/core/src/util/transaction.ts +++ b/packages/core/src/util/transaction.ts @@ -6,9 +6,9 @@ import { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT, } from "drizzle-orm/planetscale-serverless"; -import { Context } from "sst/context/context2.js"; import { db } from "../drizzle"; import { ExtractTablesWithRelations } from "drizzle-orm"; +import { createContext } from "../context"; export type Transaction = MySqlTransaction< PlanetscaleQueryResultHKT, @@ -19,7 +19,7 @@ export type Transaction = MySqlTransaction< export type TxOrDb = Transaction | typeof db; -const TransactionContext = Context.create<{ +const TransactionContext = createContext<{ tx: TxOrDb; effects: (() => void | Promise)[]; }>("TransactionContext"); diff --git a/packages/core/src/websocket/index.ts b/packages/core/src/websocket/index.ts new file mode 100644 index 00000000..e2c0777e --- /dev/null +++ b/packages/core/src/websocket/index.ts @@ -0,0 +1,27 @@ +import { Resource } from "sst"; +import { useWorkspace } from "../actor"; + +export namespace Websocket { + export async function publish(type: string, properties: Record) { + const event = { + type, + properties: { + ...properties, + workspaceID: useWorkspace(), + }, + }; + const channel = `/workspace/${event.properties.workspaceID}`; + const body = JSON.stringify({ + channel, + events: [JSON.stringify(event)], + }); + await fetch("https://" + Resource.Websocket.http + "/event", { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-api-key": Resource.Websocket.token, + }, + body, + }); + } +} diff --git a/packages/core/src/workspace/index.ts b/packages/core/src/workspace/index.ts index 6c0c9e6c..684c3315 100644 --- a/packages/core/src/workspace/index.ts +++ b/packages/core/src/workspace/index.ts @@ -7,17 +7,19 @@ import { zod } from "../util/zod"; import { createId } from "@paralleldrive/cuid2"; import { and, eq, sql } from "drizzle-orm"; import { createTransactionEffect, useTransaction } from "../util/transaction"; -import { event } from "../event"; +import { createEvent } from "../event"; import { VisibleError } from "../util/error"; import { assertActor } from "../actor"; import { user } from "../user/user.sql"; +import { bus } from "sst/aws/bus"; +import { Resource } from "sst"; export const Events = { - Created: event( + Created: createEvent( "workspace.created", z.object({ workspaceID: z.string().nonempty(), - }) + }), ), }; @@ -25,7 +27,7 @@ export class WorkspaceExistsError extends VisibleError { constructor(slug: string) { super( "workspace.slug_exists", - `there is already a workspace named "${slug}"` + `there is already a workspace named "${slug}"`, ); } } @@ -60,12 +62,12 @@ export const create = zod( }); if (!result.rowsAffected) throw new WorkspaceExistsError(input.slug); await createTransactionEffect(() => - Events.Created.publish({ + bus.publish(Resource.Bus, Events.Created, { workspaceID: id, - }) + }), ); return id; - }) + }), ); export const remove = zod(Info.shape.id, (input) => @@ -79,8 +81,8 @@ export const remove = zod(Info.shape.id, (input) => .where( and( eq(user.workspaceID, input), - eq(user.email, account.properties.email) - ) + eq(user.email, account.properties.email), + ), ) .execute() .then((rows) => rows.at(0)); @@ -91,7 +93,7 @@ export const remove = zod(Info.shape.id, (input) => timeDeleted: sql`now()`, }) .where(eq(workspace.id, row.workspaceID)); - }) + }), ); export const list = zod(z.void(), () => @@ -100,8 +102,8 @@ export const list = zod(z.void(), () => .select() .from(workspace) .execute() - .then((rows) => rows) - ) + .then((rows) => rows), + ), ); export const fromID = zod(z.string().min(1), async (id) => @@ -112,5 +114,5 @@ export const fromID = zod(z.string().min(1), async (id) => .where(eq(workspace.id, id)) .execute() .then((rows) => rows[0]); - }) + }), ); diff --git a/packages/core/sst-env.d.ts b/packages/core/sst-env.d.ts index a9187e85..b0900f6b 100644 --- a/packages/core/sst-env.d.ts +++ b/packages/core/sst-env.d.ts @@ -1 +1,144 @@ -/// +/* This file is auto-generated by SST. Do not edit. */ +/* tslint:disable */ +/* eslint-disable */ +/* deno-fmt-ignore-file */ +import "sst" +export {} +declare module "sst" { + export interface Resource { + "Alerts": { + "arn": string + "type": "sst.aws.SnsTopic" + } + "Api": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "ApiRouter": { + "type": "sst.aws.Router" + "url": string + } + "Auth": { + "publicKey": string + "type": "sst.aws.Auth" + } + "AuthAuthenticator": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "AuthRouter": { + "type": "sst.aws.Router" + "url": string + } + "AutodeployConfig": { + "buildImage": string + "buildspecBucketName": string + "buildspecVersion": string + "configParserFunctionArn": string + "runnerRemoverFunctionArn": string + "runnerRemoverScheduleGroupName": string + "runnerRemoverScheduleRoleArn": string + "timeoutMonitorFunctionArn": string + "timeoutMonitorScheduleGroupName": string + "timeoutMonitorScheduleRoleArn": string + "type": "sst.sst.Linkable" + } + "AutodeployConfigParser": { + "name": string + "type": "sst.aws.Function" + } + "AutodeployRunnerRemover": { + "name": string + "type": "sst.aws.Function" + } + "AutodeployTimeoutMonitor": { + "name": string + "type": "sst.aws.Function" + } + "BotpoisonSecretKey": { + "type": "sst.sst.Secret" + "value": string + } + "Bus": { + "arn": string + "name": string + "type": "sst.aws.Bus" + } + "Connect": { + "name": string + "type": "sst.aws.Function" + } + "Database": { + "database": string + "host": string + "password": string + "port": number + "type": "sst.sst.Linkable" + "username": string + } + "Email": { + "configSet": string + "sender": string + "type": "sst.aws.Email" + } + "Error": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "GithubAppID": { + "type": "sst.sst.Secret" + "value": string + } + "GithubPrivateKey": { + "type": "sst.sst.Secret" + "value": string + } + "GithubWebhookSecret": { + "type": "sst.sst.Secret" + "value": string + } + "IssueDestination": { + "prefix": string + "role": string + "stream": string + "type": "sst.sst.Linkable" + } + "IssueStream": { + "name": string + "type": "sst.aws.KinesisStream" + } + "SlackClientID": { + "type": "sst.sst.Secret" + "value": string + } + "SlackClientSecret": { + "type": "sst.sst.Secret" + "value": string + } + "Storage": { + "name": string + "type": "sst.aws.Bucket" + } + "Websocket": { + "http": string + "realtime": string + "token": string + "type": "sst.sst.Linkable" + } + "WebsocketAuthorizer": { + "name": string + "type": "sst.aws.Function" + } + "WebsocketToken": { + "type": "sst.sst.Secret" + "value": string + } + "Workspace": { + "type": "sst.aws.StaticSite" + "url": string + } + } +} diff --git a/packages/core/tsconfig.json b/packages/core/tsconfig.json index b34bf329..b738570c 100644 --- a/packages/core/tsconfig.json +++ b/packages/core/tsconfig.json @@ -1,10 +1,10 @@ { - "extends": "@tsconfig/node16/tsconfig.json", + "extends": "@tsconfig/node20/tsconfig.json", "compilerOptions": { "strict": true, "jsx": "react", "module": "esnext", - "moduleResolution": "node", + "moduleResolution": "bundler", "noUncheckedIndexedAccess": true } } diff --git a/packages/functions/package.json b/packages/functions/package.json index a251d54d..34895ae7 100644 --- a/packages/functions/package.json +++ b/packages/functions/package.json @@ -11,7 +11,7 @@ "@types/aws-lambda": "^8.10.114", "@types/luxon": "^3.3.1", "@types/node": "^18.16.0", - "sst": "2.43.4", + "sst": "3.3.2", "typescript": "^5.2.2" }, "dependencies": { @@ -27,11 +27,13 @@ "@botpoison/node": "^0.1.10", "@console/mail": "workspace:*", "@esbuild/linux-arm64": "0.21.4", + "@hono/zod-validator": "^0.4.1", "drizzle-orm": "0.31.2", "esbuild": "0.21.4", "fast-jwt": "^2.2.1", - "hono": "^3.9.2", + "hono": "4.6.5", "luxon": "^3.3.0", + "oauth4webapi": "^3.1.2", "octokit": "^4.0.2", "remeda": "^1.17.1", "replicache": "^14.2.2", diff --git a/packages/functions/src/api.ts b/packages/functions/src/api.ts deleted file mode 100644 index 1c5f73c6..00000000 --- a/packages/functions/src/api.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { Actor, withActor, useActor } from "@console/core/actor"; -import { useHeader, Response } from "sst/node/api"; -import { User } from "@console/core/user"; -import { sessions } from "./sessions"; - -export const withApiAuth = (cb: () => Promise) => { - return function () { - const session = sessions.use(); - - const workspaceID = useHeader("x-sst-workspace"); - if (!workspaceID) return withActor(session, cb); - if (session.type !== "account") - throw new Response({ - statusCode: 401, - body: "Unauthorized", - }); - - const result = withActor( - { - type: "system", - properties: { - workspaceID, - }, - }, - async () => { - const user = await User.fromEmail(session.properties.email); - if (!user || user.timeDeleted) - throw new Response({ - statusCode: 401, - body: "Unauthorized", - }); - return withActor( - { - type: "user", - properties: { - workspaceID, - userID: user.id, - }, - }, - cb - ); - } - ); - return result; - }; -}; - -export function NotPublic() { - const actor = useActor(); - if (actor.type === "public") - throw new Response({ - statusCode: 401, - body: "Unauthorized", - }); -} diff --git a/packages/functions/src/api/account.ts b/packages/functions/src/api/account.ts new file mode 100644 index 00000000..e0bf233b --- /dev/null +++ b/packages/functions/src/api/account.ts @@ -0,0 +1,50 @@ +import { Hono } from "hono"; +import { notPublic } from "./auth"; +import { assertActor, withActor } from "@console/core/actor"; +import { Account } from "@console/core/account"; +import { zValidator } from "@hono/zod-validator"; +import { Workspace } from "@console/core/workspace"; +import { User } from "@console/core/user"; +import { VisibleError } from "@console/core/util/error"; + +export const AccountRoute = new Hono() + .use(notPublic) + .get("/", async (c) => { + const actor = assertActor("account"); + return c.json({ + id: actor.properties.accountID, + email: actor.properties.email, + workspaces: await Account.workspaces(), + }); + }) + .post( + "/workspace", + zValidator("json", Workspace.create.schema), + async (c) => { + const actor = assertActor("account"); + const body = c.req.valid("json"); + try { + const workspaceID = await Workspace.create(body); + const workspace = await Workspace.fromID(workspaceID); + await withActor( + { + type: "system", + properties: { + workspaceID, + }, + }, + () => + User.create({ + email: actor.properties.email, + first: true, + }), + ); + return c.json(workspace); + } catch { + throw new VisibleError( + "workspace.slug", + "Workspace slug already exists", + ); + } + }, + ); diff --git a/packages/functions/src/api/api.ts b/packages/functions/src/api/api.ts new file mode 100644 index 00000000..5d57de30 --- /dev/null +++ b/packages/functions/src/api/api.ts @@ -0,0 +1,76 @@ +import { VisibleError } from "@console/core/util/error"; +import { Hono } from "hono"; +import { ZodError } from "zod"; +import { ReplicacheRoute } from "./replicache"; +import { auth } from "./auth"; +import { WebhookRoute } from "./webhook"; +import { GithubRoute } from "./github"; +import { BillingRoute } from "./billing"; +import { handle } from "hono/aws-lambda"; +import { AccountRoute } from "./account"; +import { logger } from "hono/logger"; +import { compress } from "hono/compress"; +import { DebugRoute } from "./debug"; +import { LogRoute } from "./log"; +import { LambdaRoute } from "./lambda"; +import { SlackRoute } from "./slack"; +import { HTTPException } from "hono/http-exception"; + +export const app = new Hono() + .use(logger()) + .use(compress()) + .use(async (c, next) => { + c.header("Cache-Control", "no-store"); + return next(); + }) + .use(auth) + .onError((error, c) => { + if (error instanceof VisibleError) { + return c.json( + { + code: error.code, + message: error.message, + }, + 400, + ); + } + if (error instanceof HTTPException) { + return c.json( + { + message: error.message, + }, + error.status, + ); + } + console.error(error); + if (error instanceof ZodError) { + const e = error.errors[0]; + if (e) { + return c.json( + { + code: e?.code, + message: e?.message, + }, + 400, + ); + } + } + return c.json( + { + code: "internal", + message: "Internal server error", + }, + 500, + ); + }) + .route("/replicache", ReplicacheRoute) + .route("/webhook", WebhookRoute) + .route("/github", GithubRoute) + .route("/billing", BillingRoute) + .route("/account", AccountRoute) + .route("/debug", DebugRoute) + .route("/lambda", LambdaRoute) + .route("/slack", SlackRoute) + .route("/log", LogRoute); + +export const handler = handle(app); diff --git a/packages/functions/src/api/auth.ts b/packages/functions/src/api/auth.ts new file mode 100644 index 00000000..9f928950 --- /dev/null +++ b/packages/functions/src/api/auth.ts @@ -0,0 +1,66 @@ +import { useActor, withActor } from "@console/core/actor"; +import { User } from "@console/core/user"; +import { VisibleError } from "@console/core/util/error"; +import { MiddlewareHandler } from "hono"; +import { HTTPException } from "hono/http-exception"; +import { sessions } from "src/sessions"; + +export const notPublic: MiddlewareHandler = async (c, next) => { + const actor = useActor(); + if (actor.type === "public") + throw new HTTPException(401, { message: "Unauthorized" }); + return next(); +}; + +export const auth: MiddlewareHandler = async (c, next) => { + const authHeader = + c.req.query("authorization") ?? c.req.header("authorization"); + if (!authHeader) return next(); + const match = authHeader.match(/^Bearer (.+)$/); + if (!match) { + throw new VisibleError( + "auth.token", + "Bearer token not found or improperly formatted", + ); + } + const bearerToken = match[1]; + let result = await sessions.verify(bearerToken!).catch(() => undefined); + if (!result) { + throw new HTTPException(401, { + message: "Unauthorized", + }); + } + + if (result.type === "public") { + return withActor({ type: "public", properties: {} }, next); + } + + if (result.type === "account") { + const workspaceID = + c.req.header("x-sst-workspace") || c.req.query("workspaceID"); + if (!workspaceID) return withActor(result, next); + const email = result.properties.email; + return withActor( + { + type: "system", + properties: { + workspaceID, + }, + }, + async () => { + const user = await User.fromEmail(email); + if (!user || user.timeDeleted) { + c.status(401); + return c.text("Unauthorized"); + } + return withActor( + { + type: "user", + properties: { userID: user.id, workspaceID: user.workspaceID }, + }, + next, + ); + }, + ); + } +}; diff --git a/packages/functions/src/billing/create-checkout-session.ts b/packages/functions/src/api/billing.ts similarity index 61% rename from packages/functions/src/billing/create-checkout-session.ts rename to packages/functions/src/api/billing.ts index 74da7214..8e1b8d0f 100644 --- a/packages/functions/src/billing/create-checkout-session.ts +++ b/packages/functions/src/api/billing.ts @@ -1,13 +1,13 @@ -import { DateTime } from "luxon"; -import { ApiHandler, useJsonBody } from "sst/node/api"; -import { withApiAuth } from "src/api"; -import { Config } from "sst/node/config"; +import { Hono } from "hono"; +import { notPublic } from "./auth"; import { Billing } from "@console/core/billing"; import { stripe } from "@console/core/stripe"; +import { DateTime } from "luxon"; -export const handler = ApiHandler( - withApiAuth(async () => { - const body = useJsonBody(); +export const BillingRoute = new Hono() + .use(notPublic) + .post("/checkout", async (c) => { + const body = await c.req.json(); const item = await Billing.Stripe.get(); if (!item?.customerID) { @@ -18,7 +18,8 @@ export const handler = ApiHandler( mode: "subscription", line_items: [ { - price: Config.STRIPE_PRICE_ID, + // TODO: add stripe price id + price: "", }, ], customer: item.customerID, @@ -30,9 +31,27 @@ export const handler = ApiHandler( }, }); - return { statusCode: 200, body: JSON.stringify({ url: session.url }) }; + return c.json({ + url: session.cancel_url, + }); }) -); + .post("/portal", async (c) => { + const body = await c.req.json(); + + const item = await Billing.Stripe.get(); + if (!item?.customerID) { + throw new Error("No stripe customer ID"); + } + + const session = await stripe.billingPortal.sessions.create({ + customer: item.customerID, + return_url: body.return_url, + }); + + return c.json({ + url: session.url, + }); + }); function getAnchorDate() { const now = DateTime.now(); diff --git a/packages/functions/src/api/debug.ts b/packages/functions/src/api/debug.ts new file mode 100644 index 00000000..a3edad6f --- /dev/null +++ b/packages/functions/src/api/debug.ts @@ -0,0 +1,4 @@ +import { Hono } from "hono"; +import { notPublic } from "./auth"; + +export const DebugRoute = new Hono().use(notPublic).get("/", async (c) => {}); diff --git a/packages/functions/src/github/webhook.ts b/packages/functions/src/api/github.ts similarity index 61% rename from packages/functions/src/github/webhook.ts rename to packages/functions/src/api/github.ts index e318be94..4caf91be 100644 --- a/packages/functions/src/github/webhook.ts +++ b/packages/functions/src/api/github.ts @@ -1,17 +1,80 @@ -import path from "path"; +import { Hono } from "hono"; +import { App } from "octokit"; +import { HTTPException } from "hono/http-exception"; +import { withActor } from "@console/core/actor"; import { Github } from "@console/core/git/github"; +import { Resource } from "sst"; import { Run } from "@console/core/run"; -import { App, Octokit } from "octokit"; -import { ApiHandler, useBody, useHeader } from "sst/node/api"; -import { Config } from "sst/node/config"; -import { withActor } from "@console/core/actor"; -import { Trigger } from "@console/core/run/run.sql"; + +export const GithubRoute = new Hono(); + +GithubRoute.get("/installed", async (c) => { + const workspaceID = c.req.query("state"); + const installationID = parseInt(c.req.query("installation_id") ?? ""); + + if (!installationID) + throw new HTTPException(401, { message: "Unauthorized" }); + + // User has authorized the app + if (workspaceID) { + await withActor( + { + type: "system", + properties: { + workspaceID, + }, + }, + async () => { + await Github.connect(installationID); + await Github.syncRepos(installationID); + } + ); + } + + // No workspaceID when the installation is updated from GitHub console + if (!workspaceID) { + await withActor({ type: "public", properties: {} }, async () => { + await Github.syncRepos(installationID); + }); + } + + return c.html(` + + `); +}); + +GithubRoute.get("/connect", async (c) => { + const workspaceID = c.req.query("workspaceID"); + const appName = + Resource.App.stage === "production" + ? "sst-console" + : `sst-console-${Resource.App.stage}`; + return c.redirect( + `https://github.com/apps/${appName}/installations/new?state=${workspaceID}` + ); +}); + +GithubRoute.post("/webhook", async (c) => { + const ret = await app.webhooks.verifyAndReceive({ + id: c.req.header("x-github-delivery")!, + name: c.req.header("x-github-event") as any, + signature: c.req.header("x-hub-signature-256")!, + payload: await c.req.text(), + }); + + return c.text("ok"); +}); const app = new App({ - appId: Config.GITHUB_APP_ID, - privateKey: Config.GITHUB_PRIVATE_KEY, + appId: Resource.GithubAppID.value, + privateKey: Resource.GithubPrivateKey.value, webhooks: { - secret: Config.GITHUB_WEBHOOK_SECRET, + secret: Resource.GithubWebhookSecret.value, }, }); app.webhooks.on("installation.deleted", async (event) => { @@ -103,18 +166,3 @@ app.webhooks.on("push", async (event) => { }, }); }); - -export const handler = ApiHandler(async (event) => { - const ret = await app.webhooks.verifyAndReceive({ - id: useHeader("x-github-delivery")!, - name: useHeader("x-github-event") as any, - signature: useHeader("x-hub-signature-256")!, - payload: useBody()!, - }); - - //console.log(useHeader("x-github-event"), event); - return { - statusCode: 200, - body: "ok", - }; -}); diff --git a/packages/functions/src/api/lambda.ts b/packages/functions/src/api/lambda.ts new file mode 100644 index 00000000..24f62dec --- /dev/null +++ b/packages/functions/src/api/lambda.ts @@ -0,0 +1,13 @@ +import { Hono } from "hono"; +import { notPublic } from "./auth"; +import { zValidator } from "@hono/zod-validator"; +import { Lambda } from "@console/core/lambda"; + +export const LambdaRoute = new Hono() + .use(notPublic) + .post("/invoke", zValidator("json", Lambda.invoke.schema), async (c) => { + const requestID = await Lambda.invoke(c.req.valid("json")); + return c.json({ + requestID, + }); + }); diff --git a/packages/functions/src/api/log.ts b/packages/functions/src/api/log.ts new file mode 100644 index 00000000..de68de52 --- /dev/null +++ b/packages/functions/src/api/log.ts @@ -0,0 +1,350 @@ +import { Hono } from "hono"; +import { HTTPException } from "hono/http-exception"; +import { notPublic } from "./auth"; +import { Stage } from "@console/core/app"; +import { Invocation, Log, LogEntry } from "@console/core/log"; +import { Storage } from "@console/core/storage"; +import { Issue } from "@console/core/issue"; +import { Replicache } from "@console/core/replicache"; +import { z } from "zod"; +import { + CloudWatchLogsClient, + DescribeLogStreamsCommand, + FilterLogEventsCommand, + GetQueryResultsCommand, + StartQueryCommand, +} from "@aws-sdk/client-cloudwatch-logs"; +import { zValidator } from "@hono/zod-validator"; +import { Realtime } from "@console/core/realtime"; +import { DateTime } from "luxon"; + +export const LogRoute = new Hono() + .use(notPublic) + .get("/", async (c) => { + const pointer = JSON.parse(c.req.query("pointer") || "{}"); + const stageID = c.req.query("stageID")!; + const groupID = c.req.query("groupID")!; + + const config = await Stage.assumeRole(stageID); + if (!config) { + throw new HTTPException(400); + } + + if (groupID.length !== 64) { + const result = await Log.expand({ + group: groupID, + logGroup: pointer.logGroup, + logStream: pointer.logStream, + timestamp: pointer.timestamp, + config, + }); + return c.json(result); + } + + await Issue.expand({ + group: groupID, + stageID, + }); + await Replicache.poke(); + return c.json({}); + }) + .get( + "/aws/tail", + zValidator( + "query", + z.object({ + stageID: z.string(), + logGroup: z.string(), + hint: z.enum(["normal", "lambda"]), + }), + ), + async (c) => { + const query = c.req.valid("query"); + let start = Date.now() - 2 * 60 * 1000; + console.log("tailing from", start); + const config = await Stage.assumeRole(query.stageID); + if (!config) throw new HTTPException(500); + const client = new CloudWatchLogsClient(config); + const sourcemapKey = + `arn:aws:lambda:${config.region}:${config.awsAccountID}:function:` + + query.logGroup.split("/").slice(3, 5).join("/"); + + async function* fetchStreams(logGroup: string) { + let nextToken: string | undefined; + console.log("fetching streams for", logGroup); + + while (true) { + try { + const response = await client.send( + new DescribeLogStreamsCommand({ + logGroupIdentifier: logGroup, + nextToken: nextToken, + orderBy: "LastEventTime", + descending: true, + }), + ); + + for (const logStream of response.logStreams || []) { + yield logStream; + } + + nextToken = response.nextToken; + if (!nextToken) { + break; + } + } catch (e) { + break; + } + } + } + + async function* fetchEvents( + logGroup: string, + startTime: number, + streams: string[], + ) { + let nextToken: string | undefined; + console.log("fetching logs for", streams.length, "streams"); + + while (true) { + const response = await client.send( + new FilterLogEventsCommand({ + logGroupIdentifier: logGroup, + logStreamNames: streams, + nextToken, + startTime, + }), + ); + + for (const event of response.events || []) { + yield event; + } + + nextToken = response.nextToken; + if (!nextToken) { + break; + } + } + } + + const streams: string[] = []; + + for await (const stream of fetchStreams(query.logGroup)) { + streams.push(stream.logStreamName || ""); + if (streams.length === 100) break; + } + if (!streams.length) return; + if (!start) start = Date.now() - 2 * 60 * 1000; + + console.log("fetching since", new Date(start).toLocaleString()); + if (query.hint === "normal") { + const events = []; + for await (const event of fetchEvents(query.logGroup, start, streams)) { + events.push({ + id: event.eventId!, + timestamp: event.timestamp!, + message: event.message!, + }); + } + return c.json(events); + } + + if (query.hint === "lambda") { + const processor = Log.createProcessor({ + sourcemapKey, + group: query.logGroup + "-tail", + config, + }); + + for await (const event of fetchEvents(query.logGroup, start, streams)) { + await processor.process({ + timestamp: event.timestamp!, + line: event.message!, + streamName: event.logStreamName!, + id: event.eventId!, + }); + } + const data = processor.flush(); + return c.json(data); + } + }, + ) + .get( + "/aws/past", + zValidator( + "query", + z.object({ + logGroup: z.string(), + stageID: z.string(), + end: z.string().optional(), + hint: z.enum(["normal", "lambda"]), + }), + ), + async (c) => { + const query = c.req.valid("query"); + const config = await Stage.assumeRole(query.stageID); + if (!config) + throw new HTTPException(400, { + message: "Failed to assume role for stage: " + query.stageID, + }); + const client = new CloudWatchLogsClient(config); + const entries: LogEntry[] = []; + let end = query.end ? DateTime.fromISO(query.end) : DateTime.now(); + let start = query.end + ? end.minus({ hours: 1 }) + : await (async () => { + const response = await client + .send( + new DescribeLogStreamsCommand({ + logGroupIdentifier: query.logGroup, + orderBy: "LastEventTime", + descending: true, + limit: 1, + }), + ) + .catch((ex) => { + if (ex.name === "ResourceNotFoundException") return; + throw ex; + }); + if (!response) return; + return DateTime.fromMillis( + response.logStreams?.[0]?.lastEventTimestamp! - 30 * 60 * 1000, + ).startOf("hour"); + })(); + const result = await (async () => { + let iteration = 0; + + if (!start) return; + console.log("start", start.toLocaleString(DateTime.DATETIME_SHORT)); + + const processor = Log.createProcessor({ + sourcemapKey: + `arn:aws:lambda:${config.region}:${config.awsAccountID}:function:` + + query.logGroup.split("/").slice(3, 5).join("/"), + group: query.logGroup, + config, + }); + + while (true) { + console.log( + "scanning from", + start.toLocaleString(DateTime.DATETIME_SHORT), + "to", + end.toLocaleString(DateTime.DATETIME_SHORT), + ); + const result = await client + .send( + new StartQueryCommand({ + logGroupIdentifiers: [query.logGroup], + queryString: `fields @timestamp, @message, @logStream | sort @timestamp desc | limit 10000`, + startTime: start.toMillis() / 1000, + endTime: end.toMillis() / 1000, + }), + ) + .catch((ex) => {}); + if (!result) return true; + console.log("created query", result.queryId); + + while (true) { + const response = await client.send( + new GetQueryResultsCommand({ + queryId: result.queryId, + }), + ); + const results = response.results || []; + console.log("log insights results", results.length); + results.sort((a, b) => a[0]!.value!.localeCompare(b[0]!.value!)); + + if (response.status === "Complete") { + if (query.hint === "lambda") { + let index = 0; + async function flush() { + const data = processor.flush(-1); + if (data.length) { + entries.push(...data); + } + } + + for (const result of results) { + await processor.process({ + id: index.toString(), + timestamp: new Date(result[0]?.value! + " Z").getTime(), + streamName: result[2]?.value!, + line: result[1]?.value!, + }); + index++; + } + await flush(); + } + + if (query.hint === "normal") { + for (const result of results) { + entries.push({ + id: result[3]!.value!, + message: result[1]?.value!, + timestamp: new Date(result[0]?.value! + " Z").getTime(), + }); + } + } + + if (entries.length >= 50) { + return false; + } + + break; + } + + await new Promise((resolve) => setTimeout(resolve, 200)); + } + + iteration++; + end = start; + start = start.minus({ millisecond: delay(iteration) }); + } + })(); + + return c.json({ + completed: result, + start: start!.toISO()!, + invocations: entries, + }); + }, + ) + .get( + "/aws/scan", + zValidator( + "query", + z.object({ + stageID: z.string(), + requestID: z.string().optional(), + timestamp: z.number({ coerce: true }).optional(), + logGroup: z.string(), + logStream: z.string(), + }), + ), + async (c) => { + const body = c.req.valid("query"); + let start = Date.now() - 2 * 60 * 1000; + console.log("tailing from", start); + const config = await Stage.assumeRole(body.stageID); + if (!config) + return { + statusCode: 500, + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ error: "Failed to assume role" }), + }; + const logs = await Log.scan({ + ...body, + timestamp: body.timestamp || undefined, + config, + }); + return c.json(logs); + }, + ); + +function delay(iteration: number) { + const hours = Math.pow(2, iteration) - 1; + return hours * 60 * 60 * 1000; +} diff --git a/packages/functions/src/api/replicache.ts b/packages/functions/src/api/replicache.ts new file mode 100644 index 00000000..31741d8c --- /dev/null +++ b/packages/functions/src/api/replicache.ts @@ -0,0 +1,633 @@ +import { DateTime } from "luxon"; +import { useActor, useWorkspace } from "@console/core/actor"; +import { user } from "@console/core/user/user.sql"; +import { createTransaction } from "@console/core/util/transaction"; +import { + eq, + and, + gt, + gte, + inArray, + isNull, + SQLWrapper, + sql, + SQL, +} from "drizzle-orm"; +import { workspace } from "@console/core/workspace/workspace.sql"; +import { stripe, usage } from "@console/core/billing/billing.sql"; +import { app, appRepoTable, resource, stage } from "@console/core/app/app.sql"; +import { awsAccount } from "@console/core/aws/aws.sql"; +import { + replicache_client, + replicache_client_group, +} from "@console/core/replicache/replicache.sql"; +import { lambdaPayload } from "@console/core/lambda/lambda.sql"; +import { chunk, isDeepEqual, mapValues } from "remeda"; +import { log_poller, log_search } from "@console/core/log/log.sql"; +import { + PatchOperation, + PullRequest, + PullResponseV1, + PushRequest, +} from "replicache"; +import { warning } from "@console/core/warning/warning.sql"; +import { issue, issueCount } from "@console/core/issue/issue.sql"; +import { MySqlColumn } from "drizzle-orm/mysql-core"; +import { + db, + getTableColumns, + isNotNull, + notInArray, +} from "@console/core/drizzle"; +import { githubOrgTable, githubRepoTable } from "@console/core/git/git.sql"; +import { slackTeam } from "@console/core/slack/slack.sql"; +import { + stateEventTable, + stateResourceTable, + stateUpdateTable, +} from "@console/core/state/state.sql"; +import { State } from "@console/core/state"; +import { runConfigTable, runTable } from "@console/core/run/run.sql"; +import { Run } from "@console/core/run"; +import { Replicache } from "@console/core/replicache"; +import { AppRepo } from "@console/core/app/repo"; +import { Github } from "@console/core/git/github"; +import { alert } from "@console/core/alert/alert.sql"; +import { Alert } from "@console/core/alert"; +import { S3Client } from "@aws-sdk/client-s3"; +import { Hono } from "hono"; +import { notPublic } from "./auth"; +import { server } from "src/replicache/server"; +import { VisibleError } from "@console/core/util/error"; + +export const ReplicacheRoute = new Hono().use(notPublic); + +export const TABLES = { + stateUpdate: stateUpdateTable, + stateResource: stateResourceTable, + stateEvent: stateEventTable, + workspace, + stripe, + user, + awsAccount, + app, + appRepo: appRepoTable, + stage, + resource, + log_poller, + log_search, + lambdaPayload, + warning, + issue, + issueCount, + alert, + githubOrg: githubOrgTable, + githubRepo: githubRepoTable, + slackTeam, + usage, + run: runTable, + runConfig: runConfigTable, +}; + +type TableName = keyof typeof TABLES; + +const TABLE_KEY = { + appRepo: [appRepoTable.appID, appRepoTable.id], + runConfig: [runConfigTable.appID, runConfigTable.id], + issue: [issue.stageID, issue.id], + resource: [resource.stageID, resource.id], + issueCount: [issueCount.group, issueCount.id], + warning: [warning.stageID, warning.type, warning.id], + usage: [usage.stageID, usage.id], + stateUpdate: [stateUpdateTable.stageID, stateUpdateTable.id], + stateResource: [stateResourceTable.stageID, stateResourceTable.id], + stateEvent: [ + stateEventTable.stageID, + stateEventTable.updateID, + stateEventTable.id, + ], + run: [runTable.stageID, runTable.id], + stripe: [], +} as { + [key in TableName]?: MySqlColumn[]; +}; + +const TABLE_SELECT = { + stateEvent: (() => { + const { inputs, outputs, ...rest } = getTableColumns(stateEventTable); + return rest; + })(), +} as { + [key in TableName]?: any; +}; + +const TABLE_PROJECTION = { + alert: (input) => Alert.serialize(input), + appRepo: (input) => AppRepo.serializeAppRepo(input), + githubOrg: (input) => Github.serializeOrg(input), + githubRepo: (input) => Github.serializeRepo(input), + stateUpdate: (input) => State.serializeUpdate(input), + stateEvent: (input) => State.serializeEvent(input), + stateResource: (input) => State.serializeResource(input), + runConfig: (input) => { + if (!input.env) return input; + for (const key of Object.keys(input.env)) { + input.env[key] = "__secret"; + } + return input; + }, + run: (input) => Run.serializeRun(input), +} as { + [key in TableName]?: (input: (typeof TABLES)[key]["$inferSelect"]) => any; +}; + +const s3 = new S3Client({}); + +ReplicacheRoute.post("/pull1", async (c) => { + const actor = useActor(); + function log(...args: any[]) { + if (process.env.SST_DEV) return; + console.log(...args); + } + log("actor", actor); + + const req: PullRequest = await c.req.json(); + log("request", req); + if (req.pullVersion !== 1) { + return c.redirect("/replicache/pull"); + } + + await db.insert(replicache_client_group).ignore().values({ + id: req.clientGroupID, + cvrVersion: 0, + actor, + clientVersion: 0, + }); + const resp = await createTransaction( + async (tx): Promise => { + const patch: PatchOperation[] = []; + + const group = await tx + .select({ + id: replicache_client_group.id, + cvrVersion: replicache_client_group.cvrVersion, + clientVersion: replicache_client_group.clientVersion, + actor: replicache_client_group.actor, + }) + .from(replicache_client_group) + .for("update") + .where(and(eq(replicache_client_group.id, req.clientGroupID))) + .execute() + .then((rows) => rows.at(0)!); + + if (!isDeepEqual(group.actor, actor)) { + log("compare failed", group.actor, actor); + return; + } + + const oldCvr = await Replicache.CVR.get( + req.clientGroupID, + req.cookie as number, + ); + + const cvr = oldCvr ?? { + data: {}, + clientVersion: 0, + }; + + const toPut: Record = {}; + const nextCvr = { + data: {} as Record, + version: Math.max(req.cookie as number, group.cvrVersion) + 1, + }; + + if (!oldCvr) { + patch.push({ + op: "clear", + }); + patch.push({ + op: "put", + key: "/init", + value: true, + }); + } + + const results: [ + string, + { id: string; version: string; key: string }[], + ][] = []; + + if (actor.type === "user") { + log("syncing user"); + + const deletedStages = await tx + .select({ id: stage.id }) + .from(stage) + .where( + and( + isNotNull(stage.timeDeleted), + eq(stage.workspaceID, useWorkspace()), + ), + ) + .then((rows) => rows.map((row) => row.id)); + + const updates = await tx + .select({ + id: stateUpdateTable.id, + rowNumber: + sql`ROW_NUMBER() OVER (PARTITION BY ${stateUpdateTable.stageID} ORDER BY ${stateUpdateTable.index} DESC)`.as( + "row_number", + ), + }) + .from(stateUpdateTable) + .where( + and( + eq(stateUpdateTable.workspaceID, useWorkspace()), + deletedStages.length + ? notInArray(stateUpdateTable.stageID, deletedStages) + : undefined, + ), + ) + .then((rows) => + rows + .filter((row) => parseInt(row.rowNumber) <= 50) + .map((row) => row.id), + ); + + const runs = await tx + .select({ + id: runTable.id, + rowNumber: + sql`ROW_NUMBER() OVER (PARTITION BY ${runTable.stageID} ORDER BY ${runTable.timeCreated} DESC)`.as( + "row_number", + ), + }) + .from(runTable) + .where( + and( + eq(runTable.workspaceID, useWorkspace()), + deletedStages.length + ? notInArray(runTable.stageID, deletedStages) + : undefined, + ), + ) + .then((rows) => + rows + .filter((row) => parseInt(row.rowNumber) <= 50) + .map((row) => row.id), + ); + const tableFilters = { + log_search: eq(log_search.userID, actor.properties.userID), + usage: gte( + usage.day, + DateTime.now().toUTC().startOf("month").toSQLDate()!, + ), + issueCount: gte( + issueCount.hour, + DateTime.now() + .toUTC() + .startOf("hour") + .minus({ day: 1 }) + .toSQL({ includeOffset: false })!, + ), + issue: isNull(issue.timeDeleted), + ...(updates.length + ? { + stateEvent: inArray(stateEventTable.updateID, updates), + stateUpdate: inArray(stateUpdateTable.id, updates), + } + : {}), + stateResource: deletedStages.length + ? notInArray(stateResourceTable.stageID, deletedStages) + : undefined, + run: runs.length ? inArray(runTable.id, runs) : undefined, + } satisfies { + [key in keyof typeof TABLES]?: SQLWrapper; + }; + + const workspaceID = useWorkspace(); + + for (const [name, table] of Object.entries(TABLES)) { + const key = TABLE_KEY[name as TableName] ?? [table.id]; + const query = tx + .select({ + name: sql`${name}`, + id: table.id, + version: table.timeUpdated, + key: sql.join([ + sql`concat_ws(`, + sql.join([sql`'/'`, sql`''`, sql`${name}`, ...key], sql`, `), + sql.raw(`)`), + ]) as SQL, + }) + .from(table) + .where( + and( + eq( + "workspaceID" in table ? table.workspaceID : table.id, + workspaceID, + ), + ...(name === "stage" ? [] : [isNull(table.timeDeleted)]), + ...(name in tableFilters + ? [tableFilters[name as keyof typeof tableFilters]] + : []), + ), + ); + log("getting updated from", name); + const rows = await query.execute(); + results.push([name, rows as any]); + } + } + + if (actor.type === "account") { + log("syncing account"); + + const [users] = await Promise.all([ + await tx + .select({ + id: user.id, + key: sql`concat('/user/', ${user.id})`, + version: user.timeUpdated, + }) + .from(user) + .innerJoin(workspace, eq(user.workspaceID, workspace.id)) + .where( + and( + eq(user.email, actor.properties.email), + isNull(user.timeDeleted), + isNull(workspace.timeDeleted), + ), + ) + .execute(), + ]); + results.push(["user", users]); + + const workspaces = await tx + .select({ + id: workspace.id, + version: workspace.timeUpdated, + key: sql`concat('/workspace/', ${workspace.id})`, + }) + .from(workspace) + .leftJoin(user, eq(user.workspaceID, workspace.id)) + .where( + and( + eq(user.email, actor.properties.email), + isNull(user.timeDeleted), + isNull(workspace.timeDeleted), + ), + ) + .execute(); + results.push(["workspace", workspaces]); + } + + for (const [name, rows] of results) { + const arr = []; + for (const row of rows) { + const version = new Date(row.version).getTime(); + if (cvr.data[row.key] !== version) { + arr.push(row); + } + delete cvr.data[row.key]; + nextCvr.data[row.key] = version; + } + toPut[name] = arr; + } + + log( + "toPut", + mapValues(toPut, (value) => value.length), + ); + + log("toDel", cvr.data); + + // new data + for (const [name, items] of Object.entries(toPut)) { + log(name); + const ids = items.map((item) => item.id); + const keys = Object.fromEntries( + items.map((item) => [item.id, item.key]), + ); + + if (!ids.length) continue; + const table = TABLES[name as keyof typeof TABLES]; + + for (const group of chunk(ids, 200)) { + const now = Date.now(); + log(name, "fetching", group.length); + const rows = await tx + .select( + TABLE_SELECT[name as keyof typeof TABLE_SELECT] || + getTableColumns(table), + ) + .from(table) + .where( + and( + "workspaceID" in table && actor.type === "user" + ? eq(table.workspaceID, useWorkspace()) + : undefined, + inArray(table.id, group), + ), + ) + .execute(); + log(name, "got", rows.length, "in", Date.now() - now, "ms"); + const projection = + TABLE_PROJECTION[name as keyof typeof TABLE_PROJECTION]; + for (const row of rows) { + const key = keys[row.id]!; + patch.push({ + op: "put", + key, + value: projection ? projection(row as any) : row, + }); + } + } + } + + // remove deleted data + for (const [key] of Object.entries(cvr.data)) { + patch.push({ + op: "del", + key, + }); + } + + const clients = await tx + .select({ + id: replicache_client.id, + mutationID: replicache_client.mutationID, + clientVersion: replicache_client.clientVersion, + }) + .from(replicache_client) + .where( + and( + eq(replicache_client.clientGroupID, req.clientGroupID), + gt(replicache_client.clientVersion, cvr.clientVersion), + ), + ) + .execute(); + + const lastMutationIDChanges = Object.fromEntries( + clients.map((c) => [c.id, c.mutationID] as const), + ); + if (patch.length > 0 || Object.keys(lastMutationIDChanges).length > 0) { + log("inserting", req.clientGroupID); + await tx + .update(replicache_client_group) + .set({ + cvrVersion: nextCvr.version, + }) + .where(eq(replicache_client_group.id, req.clientGroupID)) + .execute(); + + await Replicache.CVR.put(req.clientGroupID, nextCvr.version, { + data: nextCvr.data, + clientVersion: group.clientVersion, + }); + + return { + patch, + cookie: nextCvr.version, + lastMutationIDChanges, + }; + } + + return { + patch: [], + cookie: req.cookie, + lastMutationIDChanges, + }; + }, + { + isolationLevel: "repeatable read", + }, + ); + + return c.json(resp); +}); + +ReplicacheRoute.post("/push1", async (c) => { + const actor = useActor(); + + const body = await c.req.json(); + if (body.pushVersion !== 1) return c.redirect("/replicache/push"); + + for (const mutation of body.mutations) { + await createTransaction( + async (tx) => { + const group = await tx + .select({ + id: replicache_client_group.id, + cvrVersion: replicache_client_group.cvrVersion, + clientVersion: replicache_client_group.clientVersion, + actor: replicache_client_group.actor, + }) + .from(replicache_client_group) + .for("update") + .where(and(eq(replicache_client_group.id, body.clientGroupID))) + .execute() + .then( + (rows) => + rows.at(0) ?? { + id: body.clientGroupID, + actor: actor, + cvrVersion: 0, + clientVersion: 0, + }, + ); + + // if (!equals(group.actor, actor)) { + // throw new Error( + // `${actor} is not authorized to push to ${body.clientGroupID}}` + // ); + // } + + const client = await tx + .select({ + id: replicache_client.id, + clientGroupID: replicache_client.clientGroupID, + mutationID: replicache_client.mutationID, + clientVersion: replicache_client.clientVersion, + }) + .from(replicache_client) + .for("update") + .where(and(eq(replicache_client.id, mutation.clientID))) + .execute() + .then( + (rows) => + rows.at(0) || { + id: body.clientGroupID, + clientGroupID: body.clientGroupID, + mutationID: 0, + clientVersion: 0, + }, + ); + + const nextClientVersion = group.clientVersion + 1; + const nextMutationID = client.mutationID + 1; + + if (mutation.id < nextMutationID) { + console.log( + `Mutation ${mutation.id} has already been processed - skipping`, + ); + return c.status(200); + } + + if (mutation.id > nextMutationID) { + throw new Error( + `Mutation ${mutation.id} is from the future - aborting`, + ); + } + + const { args, name } = mutation; + console.log("processing", mutation.id, name); + try { + await server.execute(name, args); + } catch (ex) { + if (!(ex instanceof VisibleError)) console.error(ex); + } + console.log("done processing", mutation.id, name); + + await tx + .insert(replicache_client_group) + .values({ + id: body.clientGroupID, + clientVersion: nextClientVersion, + cvrVersion: group.cvrVersion, + actor, + }) + .onDuplicateKeyUpdate({ + set: { + cvrVersion: group.cvrVersion, + clientVersion: nextClientVersion, + }, + }) + .execute(); + + await tx + .insert(replicache_client) + .values({ + id: mutation.clientID, + clientGroupID: group.id, + mutationID: nextMutationID, + clientVersion: nextClientVersion, + }) + .onDuplicateKeyUpdate({ + set: { + clientGroupID: group.id, + mutationID: nextMutationID, + clientVersion: nextClientVersion, + }, + }) + .execute(); + }, + { + isolationLevel: "repeatable read", + }, + ); + } + + if (actor.type === "user") await Replicache.poke(); + + return c.text("ok"); +}); diff --git a/packages/functions/src/api/slack.ts b/packages/functions/src/api/slack.ts new file mode 100644 index 00000000..7b380e7b --- /dev/null +++ b/packages/functions/src/api/slack.ts @@ -0,0 +1,91 @@ +import { Hono } from "hono"; +import { Resource } from "sst"; +import { zValidator } from "@hono/zod-validator"; +import { z } from "zod"; +import { getCookie, setCookie } from "hono/cookie"; +import { useWorkspace, withActor } from "@console/core/actor"; +import { createId } from "@console/core/util/sql"; +import { HTTPException } from "hono/http-exception"; +import { Slack } from "@console/core/slack"; + +export const SlackRoute = new Hono() + .get("/authorize", async (c) => { + const origin = c.req.header("x-forwarded-host"); + const state = createId(); + const authorize = new URL("https://slack.com/oauth/v2/authorize"); + authorize.search = new URLSearchParams({ + client_id: Resource.SlackClientID.value, + scope: "chat:write team:read chat:write.public", + redirect_uri: `https://${origin}/slack/callback`, + state, + }).toString(); + setCookie(c, "state", state, { + httpOnly: true, + sameSite: "lax", + secure: true, + path: "/", + maxAge: 600, + }); + setCookie(c, "workspaceID", useWorkspace(), { + httpOnly: true, + sameSite: "lax", + secure: true, + path: "/", + }); + return c.redirect(authorize.toString()); + }) + .get( + "/callback", + zValidator("query", z.object({ code: z.string(), state: z.string() })), + async (c) => { + const origin = c.req.header("x-forwarded-host"); + const workspaceID = getCookie(c, "workspaceID")!; + const state = getCookie(c, "state"); + const query = c.req.valid("query"); + if (!state || state !== query.state) { + throw new HTTPException(400, { + message: "invalid state parameter", + }); + } + const response = await fetch("https://slack.com/api/oauth.v2.access", { + method: "POST", + headers: { + "Content-Type": "application/x-www-form-urlencoded", + Accept: "application/json", + }, + body: new URLSearchParams({ + client_id: Resource.SlackClientID.value, + client_secret: Resource.SlackClientSecret.value, + code: query.code, + redirect_uri: `https://${origin}/slack/callback`, + }), + }); + + if (!response.ok) + throw new HTTPException(401, { + message: "Unauthorized", + }); + const data = (await response.json()) as { access_token: string }; + await withActor( + { + type: "system", + properties: { + workspaceID, + }, + }, + async () => { + await Slack.connect(data.access_token); + }, + ); + return c.html(` + + + + `); + }, + ); diff --git a/packages/functions/src/billing/webhook.ts b/packages/functions/src/api/webhook.ts similarity index 88% rename from packages/functions/src/billing/webhook.ts rename to packages/functions/src/api/webhook.ts index 585a5963..c9f0d8f3 100644 --- a/packages/functions/src/billing/webhook.ts +++ b/packages/functions/src/api/webhook.ts @@ -1,16 +1,17 @@ -import { Workspace } from "@console/core/workspace"; -import { stripe } from "@console/core/stripe"; -import { ApiHandler } from "sst/node/api"; -import { Config } from "sst/node/config"; -import { Billing } from "@console/core/billing"; import { withActor } from "@console/core/actor"; +import { Billing } from "@console/core/billing"; +import { stripe } from "@console/core/stripe"; +import { Hono } from "hono"; -export const handler = ApiHandler(async (event) => { +export const WebhookRoute = new Hono(); + +WebhookRoute.post("/stripe", async (c) => { // validate signature const body = stripe.webhooks.constructEvent( - event.body!, - event.headers["stripe-signature"]!, - Config.STRIPE_WEBHOOK_SIGNING_SECRET + await c.req.text(), + c.req.header("stripe-signature")!, + // TODO: add signing secret + "", ); console.log(body.type, body); @@ -39,7 +40,7 @@ export const handler = ApiHandler(async (event) => { subscriptionItemID: items.data[0].id, }); await Billing.updateGatingStatus(); - } + }, ); } else if (body.type === "customer.subscription.updated") { const { id: subscriptionID, customer, status } = body.data.object; @@ -73,7 +74,7 @@ export const handler = ApiHandler(async (event) => { }); await Billing.updateGatingStatus(); } - } + }, ); } else if (body.type === "customer.subscription.deleted") { const { id: subscriptionID } = body.data.object; @@ -92,9 +93,9 @@ export const handler = ApiHandler(async (event) => { invoice: id, customer, created: new Date(created * 1000), - } + }, ); } - return { statusCode: 200 }; + return c.status(200); }); diff --git a/packages/functions/src/auth-iot.ts b/packages/functions/src/auth-iot.ts deleted file mode 100644 index 4a254952..00000000 --- a/packages/functions/src/auth-iot.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { assertActor, withActor } from "@console/core/actor"; -import { Config } from "sst/node/config"; -import { Session } from "sst/node/future/auth"; -import { db } from "@console/core/drizzle"; -import { user } from "@console/core/user/user.sql"; -import { eq } from "drizzle-orm"; -import { DateTime } from "luxon"; - -export async function handler(evt: any) { - const tokens = Buffer.from(evt.protocolData.mqtt.password, "base64") - .toString() - .split(";"); - const workspaces = [] as string[]; - for (const token of tokens) { - const session = Session.verify(token); - await withActor(session as any, async () => { - const account = assertActor("account"); - const rows = await db - .select({ - workspaceID: user.workspaceID, - }) - .from(user) - .where(eq(user.email, account.properties.email)) - .execute(); - - await db - .update(user) - .set({ - timeSeen: DateTime.now().toSQL({ includeOffset: false }), - }) - .where(eq(user.email, account.properties.email)) - .execute(); - workspaces.push(...rows.map((r) => r.workspaceID)); - }); - } - console.log("workspaces", workspaces); - const policy = { - isAuthenticated: true, //A Boolean that determines whether client can connect. - principalId: Date.now().toString(), //A string that identifies the connection in logs. - disconnectAfterInSeconds: 86400, - refreshAfterInSeconds: 300, - policyDocuments: [ - { - Version: "2012-10-17", - Statement: [ - { - Action: "iot:Connect", - Effect: "Allow", - Resource: "*", - }, - ], - }, - ...workspaces.slice(0, 10).map((workspaceID) => ({ - Version: "2012-10-17", - Statement: [ - { - Action: "iot:Receive", - Effect: "Allow", - Resource: `arn:aws:iot:us-east-1:${process.env.ACCOUNT}:topic/${Config.APP}/${Config.STAGE}/${workspaceID}/*`, - }, - { - Action: "iot:Subscribe", - Effect: "Allow", - Resource: `arn:aws:iot:us-east-1:${process.env.ACCOUNT}:topicfilter/${Config.APP}/${Config.STAGE}/${workspaceID}/*`, - }, - ], - })), - ], - }; - return policy; -} diff --git a/packages/functions/src/auth-websocket.ts b/packages/functions/src/auth-websocket.ts new file mode 100644 index 00000000..f933e869 --- /dev/null +++ b/packages/functions/src/auth-websocket.ts @@ -0,0 +1,38 @@ +import { withActor } from "@console/core/actor"; +import { sessions } from "./sessions"; +import { User } from "@console/core/user"; +import { db, eq, sql } from "@console/core/drizzle"; +import { user } from "@console/core/user/user.sql"; + +export async function handler(event: any) { + const token = event.authorizationToken; + const session = await sessions.verify(token).catch(() => undefined); + if (!session) return { isAuthorized: false }; + if (session.type !== "account") return { isAuthorized: false }; + if (event.requestContext.operation === "EVENT_CONNECT") { + await db + .update(user) + .set({ + timeSeen: sql`now()`, + }) + .where(eq(user.email, session.properties.email)) + .execute(); + return { isAuthorized: true }; + } + const workspaceID = event.requestContext.channel.split("/").at(2)!; + return withActor( + { + type: "system", + properties: { + workspaceID, + }, + }, + async () => { + const user = await User.fromEmail(session.properties.email); + if (!user || user.timeDeleted) { + return { isAuthorized: false }; + } + return { isAuthorized: true }; + }, + ); +} diff --git a/packages/functions/src/auth.ts b/packages/functions/src/auth.ts index c6bfba79..568509e9 100644 --- a/packages/functions/src/auth.ts +++ b/packages/functions/src/auth.ts @@ -1,24 +1,18 @@ -import { - Issuer, - OauthAdapter, - AuthHandler, - CodeAdapter, -} from "sst/node/future/auth"; -import { Config } from "sst/node/config"; +import { auth } from "sst/aws/auth"; import { Account } from "@console/core/account"; -import { Slack } from "@console/core/slack"; import { SESv2Client, SendEmailCommand } from "@aws-sdk/client-sesv2"; import Botpoison from "@botpoison/node"; import { sessions } from "./sessions"; import { withActor } from "@console/core/actor"; -import { Response, useCookie, useFormValue, useResponse } from "sst/node/api"; -import { User } from "@console/core/user"; import { z } from "zod"; +import { CodeAdapter, OauthAdapter } from "sst/auth/adapter"; +import { Issuer } from "sst/auth"; +import { Resource } from "sst"; const ses = new SESv2Client({}); -export const handler = AuthHandler({ - sessions, +export const handler = auth.authorizer({ + session: sessions, providers: { slack: OauthAdapter({ issuer: new Issuer({ @@ -27,8 +21,8 @@ export const handler = AuthHandler({ token_endpoint: "https://slack.com/api/oauth.v2.access", }), scope: "chat:write team:read chat:write.public", - clientID: Config.SLACK_CLIENT_ID, - clientSecret: Config.SLACK_CLIENT_SECRET, + clientID: Resource.SlackClientID.value, + clientSecret: Resource.SlackClientSecret.value, }), email: CodeAdapter({ async onCodeRequest(code, claims) { @@ -42,32 +36,30 @@ export const handler = AuthHandler({ console.log("code", code); const email = z.string().email().safeParse(claims.email); if (!email.success) { - return { - statusCode: 302, - headers: { - Location: process.env.AUTH_FRONTEND_URL + "/auth/email", - }, - }; + return Response.redirect( + process.env.AUTH_FRONTEND_URL + + "/auth/email?error=invalid_email", + 302, + ); } - if (!process.env.IS_LOCAL) { + if (!process.env.IS_LOCAL && !process.env.SST_DEV) { const botpoison = new Botpoison({ - secretKey: Config.BOTPOISON_SECRET_KEY, + secretKey: Resource.BotpoisonSecretKey.value, }); const { ok } = await botpoison.verify(claims.challenge); if (!ok) - return { - statusCode: 302, - headers: { - Location: process.env.AUTH_FRONTEND_URL + "/auth/email", - }, - }; + return Response.redirect( + process.env.AUTH_FRONTEND_URL + + "/auth/email?error=invalid_challenge", + 302, + ); console.log("challenge verified"); const cmd = new SendEmailCommand({ Destination: { ToAddresses: [email.data], }, - FromEmailAddress: `SST `, + FromEmailAddress: `SST `, Content: { Simple: { Body: { @@ -87,30 +79,25 @@ export const handler = AuthHandler({ await ses.send(cmd); } - return { - statusCode: 302, - headers: { - Location: - process.env.AUTH_FRONTEND_URL + - "/auth/code?" + - new URLSearchParams({ email: claims.email }).toString(), - }, - }; - } + return Response.redirect( + process.env.AUTH_FRONTEND_URL + + "/auth/code?" + + new URLSearchParams({ email: claims.email }).toString(), + 302, + ); + }, ); }, async onCodeInvalid() { - return { - statusCode: 302, - headers: { - Location: - process.env.AUTH_FRONTEND_URL + "/auth/code?error=invalid_code", - }, - }; + return Response.redirect( + process.env.AUTH_FRONTEND_URL + "/auth/code?error=invalid_code", + 302, + ); }, }), }, callbacks: { + /* connect: { async start() { const workspaceID = useFormValue("workspaceID"); @@ -151,7 +138,7 @@ export const handler = AuthHandler({ if (result.provider === "slack") { await Slack.connect(result.tokenset.access_token!); } - } + }, ); return { @@ -171,26 +158,26 @@ export const handler = AuthHandler({ }; }, }, + */ auth: { async allowClient(clientID, redirect) { return true; }, - async success(input, response) { + async success(ctx, response) { let email: string | undefined; - if (input.provider === "email") { + console.log(response); + if (response.provider === "email") { if ( - input.claims.impersonate && - input.claims.email.split("@")[1] !== "sst.dev" + response.claims.impersonate && + response.claims.email?.split("@")[1] !== "sst.dev" ) - return response.http({ - statusCode: 401, - body: "Unauthorized", + return new Response("Unauthorized", { + status: 401, }); - email = input.claims.impersonate || input.claims.email; + email = response.claims.impersonate || response.claims.email; } if (!email) throw new Error("No email found"); - let accountID = await Account.fromEmail(email).then((x) => x?.id); if (!accountID) { console.log("creating account for", email); @@ -198,8 +185,7 @@ export const handler = AuthHandler({ email: email!, }); } - - return response.session({ + return ctx.session({ type: "account", properties: { accountID: accountID!, diff --git a/packages/functions/src/billing/create-customer-portal-session.ts b/packages/functions/src/billing/create-customer-portal-session.ts deleted file mode 100644 index 57f3078d..00000000 --- a/packages/functions/src/billing/create-customer-portal-session.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { stripe } from "@console/core/stripe"; -import { ApiHandler, useJsonBody } from "sst/node/api"; -import { withApiAuth } from "src/api"; -import { Billing } from "@console/core/billing"; - -export const handler = ApiHandler( - withApiAuth(async () => { - const body = useJsonBody(); - - const item = await Billing.Stripe.get(); - if (!item?.customerID) { - throw new Error("No stripe customer ID"); - } - - const session = await stripe.billingPortal.sessions.create({ - customer: item.customerID, - return_url: body.return_url, - }); - - return { statusCode: 200, body: JSON.stringify({ url: session.url }) }; - }) -); diff --git a/packages/functions/src/billing/cron.ts b/packages/functions/src/billing/cron.ts deleted file mode 100644 index f08fff3a..00000000 --- a/packages/functions/src/billing/cron.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { Stage } from "@console/core/app/stage"; -import { SQSClient, SendMessageBatchCommand } from "@aws-sdk/client-sqs"; -import { Queue } from "sst/node/queue"; -import { chunk } from "remeda"; -import { createId } from "@console/core/util/sql"; - -const sqs = new SQSClient({}); -export async function handler() { - let cursor: Awaited>["cursor"]; - - do { - const ret = await Stage.list({ cursor }); - const stages = ret.items; - cursor = ret.cursor; - - console.log("stages", stages.length); - let index = 0; - for (const stage of chunk(stages, 10)) { - await sqs.send( - new SendMessageBatchCommand({ - QueueUrl: Queue.UsageQueue.queueUrl, - Entries: stage.map((stage) => ({ - Id: createId(), - MessageDeduplicationId: createId(), - MessageBody: JSON.stringify({ - stageID: stage.id, - workspaceID: stage.workspaceID, - }), - MessageGroupId: (index++ % 10).toString(), - })), - }) - ); - } - } while (cursor !== undefined); -} diff --git a/packages/functions/src/connect.ts b/packages/functions/src/connect.ts index a286f4f2..e493aeb5 100644 --- a/packages/functions/src/connect.ts +++ b/packages/functions/src/connect.ts @@ -6,6 +6,8 @@ import { import { AWS } from "@console/core/aws"; import { withActor } from "@console/core/actor"; import { Replicache } from "@console/core/replicache"; +import { bus } from "sst/aws/bus"; +import { Resource } from "sst"; export async function handler(event: CloudFormationCustomResourceEvent) { console.log(event); @@ -21,7 +23,7 @@ export async function handler(event: CloudFormationCustomResourceEvent) { }, async () => { const credentials = await AWS.assumeRole( - event.ResourceProperties.accountID + event.ResourceProperties.accountID, ); if (credentials) { await AWS.Account.create({ @@ -33,7 +35,7 @@ export async function handler(event: CloudFormationCustomResourceEvent) { } else { status = "FAILED"; } - } + }, ); } catch (ex) { console.error(ex); @@ -51,14 +53,14 @@ export async function handler(event: CloudFormationCustomResourceEvent) { }, async () => { const account = await AWS.Account.fromAccountID( - event.ResourceProperties.accountID + event.ResourceProperties.accountID, ); if (!account) return; - await AWS.Account.Events.Removed.publish({ + await bus.publish(Resource.Bus, AWS.Account.Events.Removed, { awsAccountID: account.id, }); - } + }, ); } diff --git a/packages/functions/src/error.ts b/packages/functions/src/error.ts index 282bc9e3..9c953d46 100644 --- a/packages/functions/src/error.ts +++ b/packages/functions/src/error.ts @@ -1,8 +1,8 @@ -import { ApiHandler } from "sst/node/api"; -export const handler = ApiHandler(async () => { +export const handler = async () => { + console.log("starting", new Date()); console.error(new Error("logged error 3")); return { statusCode: 200, body: "ok", }; -}); +}; diff --git a/packages/functions/src/event.ts b/packages/functions/src/event.ts new file mode 100644 index 00000000..072c5e65 --- /dev/null +++ b/packages/functions/src/event.ts @@ -0,0 +1,135 @@ +import { withActor } from "@console/core/actor"; +import { App, Stage } from "@console/core/app"; +import { AWS } from "@console/core/aws"; +import { Issue } from "@console/core/issue"; +import { Run } from "@console/core/run"; +import { State } from "@console/core/state"; +import { bus } from "sst/aws/bus"; + +export const handler = bus.subscriber( + [ + AWS.Account.Events.Created, + AWS.Account.Events.Removed, + App.Stage.Events.Connected, + App.Stage.Events.Updated, + App.Stage.Events.ResourcesUpdated, + State.Event.SummaryCreated, + State.Event.HistoryCreated, + State.Event.HistorySynced, + State.Event.SnapshotCreated, + State.Event.StateUpdated, + State.Event.UpdateCreated, + Stage.Events.ResourcesUpdated, + Issue.Events.RateLimited, + Issue.Events.IssueDetected, + Run.Event.Created, + Run.Event.CreateFailed, + Run.Event.Completed, + ], + async (evt) => + withActor(evt.metadata.actor, async () => { + console.log(evt.type); + console.log(evt); + switch (evt.type) { + case AWS.Account.Events.Created.type: + const account = await AWS.Account.fromID(evt.properties.awsAccountID); + if (!account) { + console.log("account not found"); + return; + } + const credentials = await AWS.assumeRole(account.accountID); + if (!credentials) return; + await AWS.Account.integrate({ + awsAccountID: account.id, + credentials, + }); + break; + + case AWS.Account.Events.Removed.type: + await AWS.Account.disconnect(evt.properties.awsAccountID); + break; + + case State.Event.SummaryCreated.type: { + const config = await Stage.assumeRole(evt.properties.stageID); + if (!config) return; + await State.receiveSummary({ + updateID: evt.properties.updateID, + config, + }); + break; + } + + case State.Event.HistoryCreated.type: { + const config = await Stage.assumeRole(evt.properties.stageID); + if (!config) return; + await State.receiveHistory({ + key: evt.properties.key, + config, + }); + break; + } + + case State.Event.UpdateCreated.type: { + const config = await Stage.assumeRole(evt.properties.stageID); + if (!config) return; + await State.receiveUpdate({ + config, + updateID: evt.properties.updateID, + }); + break; + } + + case State.Event.SnapshotCreated.type: { + const config = await Stage.assumeRole(evt.properties.stageID); + if (!config) return; + await State.receiveSnapshot({ + config, + updateID: evt.properties.updateID, + }); + break; + } + + case App.Stage.Events.Updated.type: + case App.Stage.Events.Connected.type: + case State.Event.StateUpdated.type: { + const config = await Stage.assumeRole(evt.properties.stageID); + if (!config) return; + await State.refreshState({ + config, + }); + break; + } + + case Issue.Events.RateLimited.type: { + const config = await Stage.assumeRole(evt.properties.stageID); + if (!config) return; + await Issue.disableLogGroup({ + logGroup: evt.properties.logGroup, + config, + }); + break; + } + + case Issue.Events.IssueDetected.type: { + await Issue.Send.triggerIssue(evt.properties); + break; + } + + case Run.Event.Created.type: { + await Run.orchestrate(evt.properties.stageID); + break; + } + + case Run.Event.CreateFailed.type: { + await Run.alert(evt.properties.runID); + break; + } + + case Run.Event.Completed.type: { + await Run.orchestrate(evt.properties.stageID); + await Run.alert(evt.properties.runID); + break; + } + } + }) +); diff --git a/packages/functions/src/events/app-stage-connected.ts b/packages/functions/src/events/app-stage-connected.ts deleted file mode 100644 index b3544984..00000000 --- a/packages/functions/src/events/app-stage-connected.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Stage } from "@console/core/app/stage"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(Stage.Events.Connected, (evt) => - withActor(evt.metadata.actor, async () => { - const config = await Stage.assumeRole(evt.properties.stageID); - if (!config) return; - await Stage.syncMetadata({ - config, - }); - }), -); diff --git a/packages/functions/src/events/app-stage-updated.ts b/packages/functions/src/events/app-stage-updated.ts deleted file mode 100644 index de7f6409..00000000 --- a/packages/functions/src/events/app-stage-updated.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Stage } from "@console/core/app/stage"; -import { EventHandler } from "sst/node/event-bus"; - -export const handlerInner = EventHandler(Stage.Events.Updated, (evt) => - withActor(evt.metadata.actor, async () => { - const config = await Stage.assumeRole(evt.properties.stageID); - if (!config) return; - await Stage.syncMetadata({ - config, - remove: true, - }); - }), -); - -export const handler = async (evt: any) => { - console.log(evt); - return handlerInner(evt); -}; diff --git a/packages/functions/src/events/aws-account-created.ts b/packages/functions/src/events/aws-account-created.ts deleted file mode 100644 index be08692b..00000000 --- a/packages/functions/src/events/aws-account-created.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { AWS } from "@console/core/aws"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(AWS.Account.Events.Created, (evt) => - withActor(evt.metadata.actor, async () => { - const account = await AWS.Account.fromID(evt.properties.awsAccountID); - if (!account) { - console.log("account not found"); - return; - } - const credentials = await AWS.assumeRole(account.accountID); - if (!credentials) return; - await AWS.Account.integrate({ - awsAccountID: account.id, - credentials, - }); - }) -); diff --git a/packages/functions/src/events/aws-account-removed.ts b/packages/functions/src/events/aws-account-removed.ts deleted file mode 100644 index e83e72e2..00000000 --- a/packages/functions/src/events/aws-account-removed.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { AWS } from "@console/core/aws"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(AWS.Account.Events.Created, (evt) => - withActor(evt.metadata.actor, async () => { - await AWS.Account.disconnect(evt.properties.awsAccountID); - }) -); diff --git a/packages/functions/src/events/fetch-usage.ts b/packages/functions/src/events/fetch-usage.ts deleted file mode 100644 index 3bb051ee..00000000 --- a/packages/functions/src/events/fetch-usage.ts +++ /dev/null @@ -1,241 +0,0 @@ -import { DateTime } from "luxon"; -import { - CloudWatchClient, - GetMetricDataCommand, -} from "@aws-sdk/client-cloudwatch"; -import { withActor, useWorkspace } from "@console/core/actor"; -import { Stage } from "@console/core/app/stage"; -import { Resource } from "@console/core/app/resource"; -import { Billing } from "@console/core/billing"; -import { stripe } from "@console/core/stripe"; -import { Warning } from "@console/core/warning"; -import { uniq } from "remeda"; -import { Handler } from "sst/context"; -import { Workspace } from "@console/core/workspace"; -import { usage } from "@console/core/billing/billing.sql"; -import { and, desc, eq } from "drizzle-orm"; -import { useTransaction } from "@console/core/util/transaction"; - -export const handler = Handler("sqs", async (event) => { - console.log("got", event.Records.length, "records"); - for (const record of event.Records) { - const evt = JSON.parse(record.body); - console.log(record.body); - await withActor( - { - type: "system", - properties: { - workspaceID: evt.workspaceID, - }, - }, - async () => { - const { stageID } = evt; - - // Check if stage is unsupported - const stage = await Stage.fromID(stageID); - if (stage?.unsupported) return; - - await processStage(stageID); - } - ); - } -}); - -async function processStage(stageID: string) { - const workspace = await Workspace.fromID(useWorkspace()); - if (!workspace) return; - - // Start processing from the greater of - // - the last processed day - // - the workspace creation date - const lastUsage = await useTransaction((tx) => - tx - .select() - .from(usage) - .where( - and(eq(usage.workspaceID, useWorkspace()), eq(usage.stageID, stageID)) - ) - .orderBy(desc(usage.day)) - .limit(1) - .execute() - .then((x) => x[0]) - ); - - // Get stage credentials - const config = await Stage.assumeRole(stageID); - if (!config) { - console.log("cannot assume role"); - await Warning.create({ - type: "permission_usage", - target: stageID, - stageID, - data: {}, - }); - return; - } - - // Get all function resources - const allResources = await Resource.listFromStageID({ - stageID, - types: ["Function"], - }); - const functions = uniq( - allResources - .flatMap((fn) => - fn.type === "Function" && !fn.enrichment.live ? [fn] : [] - ) - .map((resource) => resource.metadata.arn) - .map((item) => item.split(":").pop()!) - ); - console.log(`> functions ${functions.length}/${allResources.length}`); - if (!functions.length) { - await Warning.remove({ - stageID, - type: "permission_usage", - target: stageID, - }); - return; - } - console.log(functions); - - // Get AWS usage - let startDate = ( - lastUsage - ? DateTime.fromSQL(lastUsage.day) - : DateTime.fromSQL(workspace.timeCreated).minus({ days: 1 }) - ) - .toUTC() - .startOf("day"); - let endDate: DateTime; - let hasChanges = false; - - while (true) { - if (startDate.plus({ days: 1 }).endOf("day").diffNow().milliseconds > 0) - break; - startDate = startDate.plus({ days: 1 }); - endDate = startDate.endOf("day"); - - console.log("STAGE", stageID, startDate.toSQLDate(), endDate.toSQLDate()); - - // Get usage - let invocations: number; - try { - invocations = await queryUsageFromAWS(); - await Warning.remove({ - stageID, - type: "permission_usage", - target: stageID, - }); - } catch (e: any) { - if (e.name === "AccessDenied") { - console.error(e); - await Warning.create({ - type: "permission_usage", - target: stageID, - data: {}, - stageID, - }); - await Billing.updateGatingStatus(); - return; - } - throw e; - } - hasChanges = hasChanges || invocations > 0; - - await Billing.createUsage({ - stageID, - day: startDate.toSQLDate()!, - invocations, - }); - - async function queryUsageFromAWS() { - const client = new CloudWatchClient(config!); - - const queryBatch = async (batch: typeof functions) => { - const metrics = await client.send( - new GetMetricDataCommand({ - MetricDataQueries: batch.map((fn, i) => ({ - Id: `m${i}`, - MetricStat: { - Metric: { - Namespace: "AWS/Lambda", - MetricName: "Invocations", - Dimensions: [ - { - Name: "FunctionName", - Value: fn, - }, - ], - }, - Period: 86400, - Stat: "Sum", - }, - })), - StartTime: startDate.toJSDate(), - EndTime: endDate.toJSDate(), - }) - ); - return (metrics.MetricDataResults || [])?.reduce( - (acc, result) => acc + (result.Values?.[0] ?? 0), - 0 - ); - }; - - // Query in batches - let total = 0; - const chunkSize = 500; - for (let i = 0; i < functions.length; i += chunkSize) { - total += await queryBatch(functions.slice(i, i + chunkSize)); - } - console.log("> invocations", total); - return total; - } - } - - if (hasChanges) await reportUsageToStripe(); - await Billing.updateGatingStatus(); - - ///////////////// - // Functions - ///////////////// - - async function reportUsageToStripe() { - const item = await Billing.Stripe.get(); - if (!item?.subscriptionItemID) return; - - const monthlyInvocations = await Billing.countByStartAndEndDay({ - startDay: startDate.startOf("month").toSQLDate()!, - endDay: startDate.endOf("month").toSQLDate()!, - }); - console.log("> monthly invocations", monthlyInvocations); - - try { - const timestamp = endDate.toUnixInteger(); - await stripe.subscriptionItems.createUsageRecord( - item.subscriptionItemID, - { - quantity: monthlyInvocations, - timestamp, - action: "set", - }, - { - idempotencyKey: `${useWorkspace()}-${stageID}-${timestamp}`, - } - ); - } catch (e: any) { - console.log(e.message); - // TODO: aren't there instanceof checks we can do - if (e.message.startsWith("Keys for idempotent requests")) { - return; - } - if ( - e.message.startsWith( - "Cannot create the usage record with this timestamp" - ) - ) { - return; - } - throw e; - } - } -} diff --git a/packages/functions/src/events/github-installed.ts b/packages/functions/src/events/github-installed.ts deleted file mode 100644 index bf864351..00000000 --- a/packages/functions/src/events/github-installed.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { Github } from "@console/core/git/github"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(Github.Events.Installed, (evt) => - Github.syncRepos(evt.properties.installationID) -); diff --git a/packages/functions/src/events/log-poller-status.ts b/packages/functions/src/events/log-poller-status.ts deleted file mode 100644 index 49ca43e0..00000000 --- a/packages/functions/src/events/log-poller-status.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { LogPoller } from "@console/core/log/poller"; -import { Replicache } from "@console/core/replicache"; - -export const handler = (evt: any) => { - console.log(evt); - const input = JSON.parse(evt.detail.input); - console.log("input", input); - return withActor( - { - type: "system", - properties: { - workspaceID: input.workspaceID, - }, - }, - async () => { - console.log("status", evt.detail.status); - if (evt.detail.status === "RUNNING") { - console.log( - "setting execution arn", - input.pollerID, - evt.detail.executionArn - ); - await LogPoller.setExecution({ - id: input.pollerID, - executionARN: evt.detail.executionArn, - }); - return; - } - - await LogPoller.remove(input.pollerID); - // if (["SUCCEEDED"].includes(evt.detail.status)) return; - // const restarted = await LogPoller.subscribe(input); - // console.log("restarted", restarted); - - await Replicache.poke(); - } - ); -}; diff --git a/packages/functions/src/events/log-scan-created.ts b/packages/functions/src/events/log-scan-created.ts deleted file mode 100644 index 03646ec3..00000000 --- a/packages/functions/src/events/log-scan-created.ts +++ /dev/null @@ -1,172 +0,0 @@ -import { - CloudWatchLogsClient, - DescribeLogStreamsCommand, - GetQueryResultsCommand, - StartQueryCommand, -} from "@aws-sdk/client-cloudwatch-logs"; -import { withActor } from "@console/core/actor"; -import { Stage } from "@console/core/app"; -import { Log } from "@console/core/log"; -import { Realtime } from "@console/core/realtime"; -import { Replicache } from "@console/core/replicache"; -import { EventHandler } from "sst/node/event-bus"; -import { Storage } from "@console/core/storage"; -import { DateTime } from "luxon"; - -export const handler = EventHandler(Log.Search.Events.Created, (evt) => - withActor(evt.metadata.actor, async () => { - if (evt.attempts > 0) { - return; - } - const search = await Log.Search.fromID(evt.properties.id); - if (!search) return; - const profileID = search.profileID || undefined; - const config = await Stage.assumeRole(search.stageID); - if (!config) return; - - const client = new CloudWatchLogsClient(config); - console.log("scanning logs", search); - - const result = await (async () => { - let iteration = 0; - - let end = search.timeEnd - ? DateTime.fromSQL(search.timeEnd, { zone: "utc" }) - : DateTime.now(); - let start = search.timeEnd - ? end.minus({ hours: 1 }) - : await (async () => { - const response = await client - .send( - new DescribeLogStreamsCommand({ - logGroupIdentifier: search.logGroup, - orderBy: "LastEventTime", - descending: true, - limit: 1, - }), - ) - .catch((ex) => { - if (ex.name === "ResourceNotFoundException") return; - throw ex; - }); - if (!response) return; - return DateTime.fromMillis( - response.logStreams?.[0]?.lastEventTimestamp! - 30 * 60 * 1000, - ).startOf("hour"); - })(); - if (!start) return; - console.log("start", start.toLocaleString(DateTime.DATETIME_SHORT)); - - const processor = Log.createProcessor({ - sourcemapKey: - search.workspaceID === "rjt3u9hhb2b0r8b2pxsbqqof" - ? undefined - : `arn:aws:lambda:${config.region}:${config.awsAccountID}:function:` + - search.logGroup.split("/").slice(3, 5).join("/"), - group: search.id, - config, - }); - - let flushed = 0; - while (true) { - await Log.Search.setStart({ - id: search.id, - timeStart: start.toSQL({ includeOffset: false }), - }); - await Replicache.poke(profileID); - console.log( - "scanning from", - start.toLocaleString(DateTime.DATETIME_SHORT), - "to", - end.toLocaleString(DateTime.DATETIME_SHORT), - ); - const result = await client - .send( - new StartQueryCommand({ - logGroupIdentifiers: [search.logGroup], - queryString: `fields @timestamp, @message, @logStream | sort @timestamp desc | limit 10000`, - startTime: start.toMillis() / 1000, - endTime: end.toMillis() / 1000, - }), - ) - .catch((ex) => {}); - if (!result) return true; - console.log("created query", result.queryId); - - while (true) { - const response = await client.send( - new GetQueryResultsCommand({ - queryId: result.queryId, - }), - ); - - if (response.status === "Complete") { - const results = response.results || []; - console.log("log insights results", results.length); - - let index = 0; - - async function flush() { - const data = processor.flush(-1); - console.log( - "flushing invocations", - data.length, - "flushed so far", - flushed, - ); - if (data.length) { - flushed += data.length; - const url = await Storage.putEphemeral(JSON.stringify(data), { - ContentType: "application/json", - }); - await Realtime.publish("invocation.url", url, profileID); - } - } - - let now = Date.now(); - for (const result of results.sort((a, b) => - a[0]!.value!.localeCompare(b[0]!.value!), - )) { - await processor.process({ - id: index.toString(), - timestamp: new Date(result[0]?.value! + " Z").getTime(), - streamName: result[2]?.value!, - line: result[1]?.value!, - }); - if (Date.now() - now > 10_000 && processor.ready) { - console.log("taking too long, flushing"); - await flush(); - if (flushed >= 50) return false; - now = Date.now(); - } - index++; - } - await flush(); - if (flushed >= 50) { - return false; - } - - break; - } - - await new Promise((resolve) => setTimeout(resolve, 200)); - } - - iteration++; - end = start; - start = start.minus({ millisecond: delay(iteration) }); - } - })(); - - await Log.Search.complete({ - id: search.id, - outcome: result ? "completed" : "partial", - }); - await Replicache.poke(profileID); - }), -); - -function delay(iteration: number) { - const hours = Math.pow(2, iteration) - 1; - return hours * 60 * 60 * 1000; -} diff --git a/packages/functions/src/events/resource-updated.ts b/packages/functions/src/events/resource-updated.ts deleted file mode 100644 index 678da9ab..00000000 --- a/packages/functions/src/events/resource-updated.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Resource } from "@console/core/app/resource"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(Resource.Events.Updated, (evt) => - withActor(evt.metadata.actor, async () => {}) -); diff --git a/packages/functions/src/events/run-alert.ts b/packages/functions/src/events/run-alert.ts deleted file mode 100644 index df517b2b..00000000 --- a/packages/functions/src/events/run-alert.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Run } from "@console/core/run"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler( - [Run.Event.Completed, Run.Event.CreateFailed], - (evt) => withActor(evt.metadata.actor, () => Run.alert(evt.properties.runID)) -); diff --git a/packages/functions/src/events/run-completed.ts b/packages/functions/src/events/run-completed.ts deleted file mode 100644 index 07124f3f..00000000 --- a/packages/functions/src/events/run-completed.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Run } from "@console/core/run"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(Run.Event.Completed, (evt) => - withActor(evt.metadata.actor, () => Run.orchestrate(evt.properties.stageID)) -); diff --git a/packages/functions/src/events/run-created.ts b/packages/functions/src/events/run-created.ts deleted file mode 100644 index 651ed131..00000000 --- a/packages/functions/src/events/run-created.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Run } from "@console/core/run"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(Run.Event.Created, (evt) => - withActor(evt.metadata.actor, () => Run.orchestrate(evt.properties.stageID)) -); diff --git a/packages/functions/src/events/runner-completed.ts b/packages/functions/src/events/runner-completed.ts deleted file mode 100644 index 1f4872c8..00000000 --- a/packages/functions/src/events/runner-completed.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Run } from "@console/core/run"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(Run.Event.RunnerCompleted, async (evt) => { - const { workspaceID, runID, error } = evt.properties; - await withActor( - { - type: "system", - properties: { - workspaceID, - }, - }, - () => Run.complete({ runID, error }) - ); -}); diff --git a/packages/functions/src/events/runner-started.ts b/packages/functions/src/events/runner-started.ts deleted file mode 100644 index e141c023..00000000 --- a/packages/functions/src/events/runner-started.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Run } from "@console/core/run"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(Run.Event.RunnerStarted, async (evt) => { - const { - workspaceID, - engine, - runID, - awsRequestId, - logGroup, - logStream, - timestamp, - } = evt.properties; - await withActor( - { - type: "system", - properties: { - workspaceID, - }, - }, - () => - Run.markRunStarted({ - engine, - runID, - awsRequestId, - logGroup, - logStream, - timestamp, - }) - ); -}); diff --git a/packages/functions/src/events/runner-updated-external.ts b/packages/functions/src/events/runner-updated-external.ts new file mode 100644 index 00000000..cbc20547 --- /dev/null +++ b/packages/functions/src/events/runner-updated-external.ts @@ -0,0 +1,93 @@ +import { withActor } from "@console/core/actor"; +import { Run } from "@console/core/run"; +import { bus } from "sst/aws/bus"; + +export const handler = bus.subscriber( + [Run.Event.RunnerStarted, Run.Event.RunnerCompleted], + async (evt) => { + const { workspaceID } = evt.properties; + await withActor( + { + type: "system", + properties: { workspaceID }, + }, + async () => { + console.log(evt.type); + console.log(evt); + switch (evt.type) { + case Run.Event.RunnerStarted.type: + await Run.markRunStarted({ + engine: evt.properties.engine, + runID: evt.properties.runID, + awsRequestId: evt.properties.awsRequestId, + logGroup: evt.properties.logGroup, + logStream: evt.properties.logStream, + timestamp: evt.properties.timestamp, + }); + break; + case Run.Event.RunnerCompleted.type: + await Run.complete({ + runID: evt.properties.runID, + error: evt.properties.error, + }); + break; + } + } + ); + } +); + +interface Events { + "CodeBuild Build State Change": { + "build-status": string; + "project-name": string; + "additional-information": { + environment: { + "environment-variables": { + name: string; + value: string; + }[]; + }; + }; + }; +} + +type Payload = { + [key in keyof Events]: { + account: string; + region: string; + "detail-type": key; + detail: Events[key]; + }; +}[keyof Events]; + +export const codebuildHandler = async (evt: Payload) => { + console.log(evt); + + if (!evt.detail["project-name"].startsWith("sst-runner-")) return; + + const runnerEnv = evt.detail["additional-information"]["environment"][ + "environment-variables" + ].find((v) => v.name === "SST_RUNNER_EVENT"); + if (!runnerEnv) return; + + const runnerEvent = JSON.parse(runnerEnv.value); + await withActor( + { + type: "system", + properties: { workspaceID: runnerEvent.workspaceID }, + }, + async () => { + const status = evt.detail["build-status"]; + await Run.complete({ + runID: runnerEvent.runID, + error: + status === "TIMED_OUT" + ? "CodeBuild run timed out" + : status === "STOPPED" + ? "CodeBuild run stopped" + : "CodeBuild run failed", + }); + } + ); +}; diff --git a/packages/functions/src/events/stack-updated-external.ts b/packages/functions/src/events/stack-updated-external.ts index fe4a419b..f204660d 100644 --- a/packages/functions/src/events/stack-updated-external.ts +++ b/packages/functions/src/events/stack-updated-external.ts @@ -1,5 +1,4 @@ import { withActor } from "@console/core/actor"; -import { App, Stage } from "@console/core/app"; import { app, stage } from "@console/core/app/app.sql"; import { awsAccount } from "@console/core/aws/aws.sql"; import { State } from "@console/core/state"; @@ -10,6 +9,8 @@ import { useTransaction, } from "@console/core/util/transaction"; import { and, eq } from "drizzle-orm"; +import { Resource } from "sst"; +import { bus } from "sst/aws/bus"; interface Events { "Object Created": { @@ -33,13 +34,127 @@ type Payload = { export const handler = async (evt: Payload) => { console.log(evt); - if (evt.detail.object.key.startsWith("lock")) { + const region = evt.region; + + if ( + evt.detail.object.key.startsWith("update/") && + evt["detail-type"] === "Object Created" + ) { + let [, appHint, stageHint] = evt.detail.object.key.split("/"); + [stageHint] = stageHint!.split("."); + await useTransaction(async (tx) => { + const stages = await findStages( + stageHint!, + appHint!, + evt.account, + region, + ); + for (const row of stages) { + await withActor( + { + type: "system", + properties: { + workspaceID: row.workspaceID, + }, + }, + () => + createTransaction(async () => { + if (!row.appID) { + row.appID = createId(); + await tx.insert(app).values({ + workspaceID: row.workspaceID, + name: appHint!, + id: row.appID, + }); + } + if (!row.stageID) { + row.stageID = createId(); + await tx.insert(stage).values({ + appID: row.appID!, + name: stageHint!, + id: row.stageID, + region: evt.region, + workspaceID: row.workspaceID, + awsAccountID: row.id, + }); + } + await createTransactionEffect(() => + bus.publish(Resource.Bus, State.Event.UpdateCreated, { + stageID: row.stageID!, + updateID: evt.detail.object.key + .split("/") + .at(-1)! + .split(".")[0]!, + }), + ); + }), + ); + } + }); + } + + if ( + evt["detail-type"] === "Object Created" && + evt.detail.object.key.startsWith("snapshot/") + ) { + let [, appHint, stageHint, updateID] = evt.detail.object.key.split("/"); + updateID = updateID!.split(".")[0]!; + const stages = await findStages(stageHint!, appHint!, evt.account, region); + for (const row of stages) { + await withActor( + { + type: "system", + properties: { + workspaceID: row.workspaceID, + }, + }, + () => + bus.publish(Resource.Bus, State.Event.SnapshotCreated, { + stageID: row.stageID!, + updateID, + }), + ); + } + return; + } + + if ( + evt["detail-type"] === "Object Created" && + evt.detail.object.key.startsWith("app/") + ) { + let [, appHint, stageHint] = evt.detail.object.key.split("/"); + stageHint = stageHint!.split(".")[0]; + const stages = await findStages(stageHint!, appHint!, evt.account, region); + for (const row of stages) { + await withActor( + { + type: "system", + properties: { + workspaceID: row.workspaceID, + }, + }, + () => + bus.publish(Resource.Bus, State.Event.StateUpdated, { + stageID: row.stageID!, + }), + ); + } + return; + } + + // this is legacy now :( + if (evt.detail.object.key.startsWith("lock/")) { if (evt["detail-type"] === "Object Created") { console.log("lock created"); await useTransaction(async (tx) => { let [, appHint, stageHint] = evt.detail.object.key.split("/"); [stageHint] = stageHint!.split("."); - const stages = await findStages(stageHint!, appHint!, evt.account); + const stages = await findStages( + stageHint!, + appHint!, + evt.account, + region, + ); for (const row of stages) { await withActor( { @@ -72,7 +187,7 @@ export const handler = async (evt: Payload) => { } console.log("lock created for", row); await createTransactionEffect(() => - State.Event.LockCreated.publish({ + bus.publish(Resource.Bus, State.Event.LockCreated, { stageID: row.stageID!, // @ts-expect-error versionID: evt.detail.object["version-id"]!, @@ -86,10 +201,11 @@ export const handler = async (evt: Payload) => { } } - if (evt.detail.object.key.startsWith("summary")) { + // this is legacy now :( + if (evt.detail.object.key.startsWith("summary/")) { let [, appHint, stageHint, updateID] = evt.detail.object.key.split("/"); updateID = updateID!.split(".")[0]; - const stages = await findStages(stageHint!, appHint!, evt.account); + const stages = await findStages(stageHint!, appHint!, evt.account, region); for (const row of stages) { await withActor( { @@ -99,7 +215,7 @@ export const handler = async (evt: Payload) => { }, }, () => - State.Event.SummaryCreated.publish({ + bus.publish(Resource.Bus, State.Event.SummaryCreated, { stageID: row.stageID!, updateID: updateID!, }), @@ -108,13 +224,14 @@ export const handler = async (evt: Payload) => { return; } + // this is legacy now :( if ( evt["detail-type"] === "Object Created" && - evt.detail.object.key.startsWith("history") + evt.detail.object.key.startsWith("history/") ) { let [, appHint, stageHint, updateID] = evt.detail.object.key.split("/"); updateID = updateID!.split(".")[0]; - const stages = await findStages(stageHint!, appHint!, evt.account); + const stages = await findStages(stageHint!, appHint!, evt.account, region); for (const row of stages) { await withActor( { @@ -124,7 +241,7 @@ export const handler = async (evt: Payload) => { }, }, () => - State.Event.HistoryCreated.publish({ + bus.publish(Resource.Bus, State.Event.HistoryCreated, { stageID: row.stageID!, key: evt.detail.object.key, }), @@ -133,11 +250,12 @@ export const handler = async (evt: Payload) => { return; } + // this is v2 if ( evt["detail-type"] === "Object Created" || evt["detail-type"] === "Object Deleted" ) { - if (!evt.detail.object.key.startsWith("stackMetadata")) { + if (!evt.detail.object.key.startsWith("stackMetadata/")) { console.log("skipping", evt.detail.object.key); return; } @@ -152,9 +270,7 @@ export const handler = async (evt: Payload) => { : appHint; const { account, region } = evt; console.log("processing", appName, stageName, account, region); - - const rows = await findStages(stageName!, appName!, account); - + const rows = await findStages(stageName!, appName!, account, region); for (const row of rows) { await withActor( { @@ -164,38 +280,43 @@ export const handler = async (evt: Payload) => { }, }, () => - createTransaction(async () => { - if (row.stageID) { - console.log("creating effect"); - await createTransactionEffect(() => - Stage.Events.Updated.publish({ - stageID: row.stageID!, - }), - ); - return; - } - - let appID = row.appID; - if (!appID) { - appID = await App.create({ + createTransaction(async (tx) => { + if (!row.appID) { + row.appID = createId(); + await tx.insert(app).values({ + workspaceID: row.workspaceID, name: appName!, + id: row.appID, }); } - - await App.Stage.connect({ - appID, - region, - name: stageName!, - awsAccountID: row.id, - }); + if (!row.stageID) { + row.stageID = createId(); + await tx.insert(stage).values({ + appID: row.appID!, + name: stageName!, + id: row.stageID, + region: evt.region, + workspaceID: row.workspaceID, + awsAccountID: row.id, + }); + } + await createTransactionEffect(() => + bus.publish(Resource.Bus, State.Event.StateUpdated, { + stageID: row.stageID!, + }), + ); }), ); - console.log("done", row); } } }; -async function findStages(stageName: string, appName: string, account: string) { +async function findStages( + stageName: string, + appName: string, + account: string, + region: string, +) { const rows = await useTransaction((tx) => { return tx .select({ @@ -212,7 +333,14 @@ async function findStages(stageName: string, appName: string, account: string) { eq(app.workspaceID, awsAccount.workspaceID), ), ) - .leftJoin(stage, and(eq(stage.name, stageName!), eq(stage.appID, app.id))) + .leftJoin( + stage, + and( + eq(stage.name, stageName!), + eq(stage.appID, app.id), + eq(stage.region, region), + ), + ) .where(and(eq(awsAccount.accountID, account))) .execute(); }); diff --git a/packages/functions/src/events/state-history-created.ts b/packages/functions/src/events/state-history-created.ts deleted file mode 100644 index f1ba095e..00000000 --- a/packages/functions/src/events/state-history-created.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Stage } from "@console/core/app"; -import { State } from "@console/core/state"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(State.Event.HistoryCreated, (evt) => - withActor(evt.metadata.actor, async () => { - const config = await Stage.assumeRole(evt.properties.stageID); - if (!config) return; - await State.receiveHistory({ - key: evt.properties.key, - config, - }); - }) -); diff --git a/packages/functions/src/events/state-history-synced.ts b/packages/functions/src/events/state-history-synced.ts deleted file mode 100644 index f435ab8d..00000000 --- a/packages/functions/src/events/state-history-synced.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Stage } from "@console/core/app"; -import { Issue } from "@console/core/issue"; -import { State } from "@console/core/state"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(State.Event.HistorySynced, (evt) => - withActor(evt.metadata.actor, async () => { - const config = await Stage.assumeRole(evt.properties.stageID); - if (!config) return; - await Issue.subscribeIon(config); - }), -); diff --git a/packages/functions/src/events/state-lock-created.ts b/packages/functions/src/events/state-lock-created.ts deleted file mode 100644 index c350d03c..00000000 --- a/packages/functions/src/events/state-lock-created.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Stage } from "@console/core/app"; -import { State } from "@console/core/state"; -import { createTransaction } from "@console/core/util/transaction"; -import { DateTime } from "luxon"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(State.Event.LockCreated, (evt) => - withActor(evt.metadata.actor, async () => { - const config = await Stage.assumeRole(evt.properties.stageID); - if (!config) return; - await State.receiveLock({ - versionID: evt.properties.versionID, - config, - }); - }) -); diff --git a/packages/functions/src/events/state-summary-created.ts b/packages/functions/src/events/state-summary-created.ts deleted file mode 100644 index bd27a198..00000000 --- a/packages/functions/src/events/state-summary-created.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Stage } from "@console/core/app"; -import { State } from "@console/core/state"; -import { createTransaction } from "@console/core/util/transaction"; -import { DateTime } from "luxon"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(State.Event.SummaryCreated, (evt) => - withActor(evt.metadata.actor, async () => { - const config = await Stage.assumeRole(evt.properties.stageID); - if (!config) return; - await State.receiveSummary({ - updateID: evt.properties.updateID, - config, - }); - }) -); diff --git a/packages/functions/src/events/user-created.ts b/packages/functions/src/events/user-created.ts deleted file mode 100644 index 0f331764..00000000 --- a/packages/functions/src/events/user-created.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { User } from "@console/core/user"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(User.Events.UserCreated, async (event) => - withActor(event.metadata.actor, () => - User.sendEmailInvite(event.properties.userID) - ) -); diff --git a/packages/functions/src/events/workspace-created.ts b/packages/functions/src/events/workspace-created.ts deleted file mode 100644 index 84d847eb..00000000 --- a/packages/functions/src/events/workspace-created.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { Workspace } from "@console/core/workspace"; -import { stripe } from "@console/core/stripe"; -import { EventHandler } from "sst/node/event-bus"; -import { withActor } from "@console/core/actor"; -import { Billing } from "@console/core/billing"; -import { Alert } from "@console/core/alert"; - -export const handler = EventHandler(Workspace.Events.Created, async (evt) => - withActor( - { - type: "system", - properties: { workspaceID: evt.properties.workspaceID }, - }, - async () => { - await Alert.put({ - source: { app: "*", stage: "*" }, - destination: { - type: "email", - properties: { users: "*" }, - }, - event: "issue", - }); - await Alert.put({ - source: { app: "*", stage: "*" }, - destination: { - type: "email", - properties: { users: "*" }, - }, - event: "autodeploy", - }); - - const subscription = await Billing.Stripe.get(); - if (subscription?.customerID) { - console.log("Already has stripe customer ID"); - return; - } - - const customer = await stripe.customers.create({ - //email: evt.properties.email, - metadata: { - workspaceID: evt.properties.workspaceID, - }, - }); - - await Billing.Stripe.setCustomerID(customer.id); - } - ) -); diff --git a/packages/functions/src/github/connect.ts b/packages/functions/src/github/connect.ts deleted file mode 100644 index 40914fc5..00000000 --- a/packages/functions/src/github/connect.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Config } from "sst/node/config"; -import { ApiHandler } from "sst/node/api"; - -export const handler = ApiHandler(async (event) => { - const workspaceID = event.queryStringParameters?.workspaceID; - const appName = - Config.STAGE === "production" - ? "sst-console" - : `sst-console-${Config.STAGE}`; - return { - statusCode: 302, - headers: { - location: `https://github.com/apps/${appName}/installations/new?state=${workspaceID}`, - }, - }; -}); diff --git a/packages/functions/src/github/installed.ts b/packages/functions/src/github/installed.ts deleted file mode 100644 index 0d3b0c61..00000000 --- a/packages/functions/src/github/installed.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { ApiHandler, Response } from "sst/node/api"; -import { Github } from "@console/core/git/github"; -import { withActor } from "@console/core/actor"; - -export const handler = ApiHandler(async (event) => { - const workspaceID = event.queryStringParameters?.state; - const installationID = parseInt( - event.queryStringParameters?.installation_id ?? "", - ); - if (!installationID) - throw new Response({ - statusCode: 401, - body: "Unauthorized", - }); - - // User has authorized the app - if (workspaceID) { - await withActor( - { - type: "system", - properties: { - workspaceID, - }, - }, - async () => { - await Github.connect(installationID); - }, - ); - } - - // No workspaceID when the installation is updated from GitHub console - if (!workspaceID) { - await withActor({ type: "public", properties: {} }, async () => { - await Github.Events.Installed.publish({ installationID }); - }); - } - - return { - statusCode: 200, - headers: { - "content-type": "text/html", - }, - body: ` - - - `, - }; -}); diff --git a/packages/functions/src/index.ts b/packages/functions/src/index.ts deleted file mode 100644 index b3a491c3..00000000 --- a/packages/functions/src/index.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { APIGatewayProxyHandlerV2 } from "aws-lambda"; - -export const handler: APIGatewayProxyHandlerV2 = async (event) => { - return { - statusCode: 200, - body: "Hello foo foo foo", - }; -}; diff --git a/packages/functions/src/issues/issue-detected.tsx b/packages/functions/src/issues/issue-detected.tsx deleted file mode 100644 index 7deadd4c..00000000 --- a/packages/functions/src/issues/issue-detected.tsx +++ /dev/null @@ -1,39 +0,0 @@ -import { SQSClient, SendMessageCommand } from "@aws-sdk/client-sqs"; -import { withActor } from "@console/core/actor"; -import { Issue } from "@console/core/issue"; -import { createId } from "@console/core/util/sql"; -import { Handler } from "sst/context"; -import { EventHandler } from "sst/node/event-bus"; -import { Queue } from "sst/node/queue"; - -const sqs = new SQSClient({}); -export const handler = EventHandler(Issue.Events.IssueDetected, async (event) => - withActor(event.metadata.actor, async () => { - console.log( - await Promise.all([ - sqs.send( - new SendMessageCommand({ - QueueUrl: Queue["issue-detected-queue"].queueUrl, - MessageDeduplicationId: createId(), - MessageBody: JSON.stringify(event), - MessageGroupId: [ - event.properties.group, - event.properties.stageID, - ].join("-"), - }) - ), - // Issue.expand(event.properties), - ]) - ); - }) -); - -export const queue = Handler("sqs", async (event) => { - console.log("got", event.Records.length, "records"); - for (const record of event.Records) { - const evt = JSON.parse(record.body); - await withActor(evt.metadata.actor, async () => { - await Issue.Send.triggerIssue(evt.properties); - }); - } -}); diff --git a/packages/functions/src/issues/rate-limited.ts b/packages/functions/src/issues/rate-limited.ts deleted file mode 100644 index 11d1b71f..00000000 --- a/packages/functions/src/issues/rate-limited.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { Stage } from "@console/core/app"; -import { Issue } from "@console/core/issue"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(Issue.Events.RateLimited, (evt) => - withActor(evt.metadata.actor, async () => { - const config = await Stage.assumeRole(evt.properties.stageID); - if (!config) return; - await Issue.disableLogGroup({ - logGroup: evt.properties.logGroup, - config, - }); - }) -); diff --git a/packages/functions/src/issues/resources-updated.ts b/packages/functions/src/issues/resources-updated.ts deleted file mode 100644 index 6949d211..00000000 --- a/packages/functions/src/issues/resources-updated.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { App, Stage } from "@console/core/app"; -import { Issue } from "@console/core/issue"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(App.Stage.Events.ResourcesUpdated, (evt) => - withActor(evt.metadata.actor, async () => { - const config = await Stage.assumeRole(evt.properties.stageID); - if (!config) { - console.log("no config"); - return; - } - await Issue.subscribe(config); - }) -); diff --git a/packages/functions/src/issues/stage-connected.ts b/packages/functions/src/issues/stage-connected.ts deleted file mode 100644 index 847d11ee..00000000 --- a/packages/functions/src/issues/stage-connected.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { App, Stage } from "@console/core/app"; -import { Issue } from "@console/core/issue"; -import { EventHandler } from "sst/node/event-bus"; - -export const handler = EventHandler(App.Stage.Events.Connected, (evt) => - withActor(evt.metadata.actor, async () => { - const config = await Stage.assumeRole(evt.properties.stageID); - if (!config) return; - await Issue.connectStage(config); - }) -); diff --git a/packages/functions/src/issues/subscriber.ts b/packages/functions/src/issues/subscriber.ts index 5534b0f6..eea2a289 100644 --- a/packages/functions/src/issues/subscriber.ts +++ b/packages/functions/src/issues/subscriber.ts @@ -1,20 +1,10 @@ import { Issue } from "@console/core/issue"; import { unzipSync } from "zlib"; import { withActor } from "@console/core/actor"; -import { Handler } from "sst/context"; -import { KinesisStreamEvent, KinesisStreamBatchResponse } from "aws-lambda"; +import { KinesisStreamEvent } from "aws-lambda"; import { queue } from "@console/core/util/queue"; -declare module "sst/context" { - interface Handlers { - kinesis_stream: { - event: KinesisStreamEvent; - response: KinesisStreamBatchResponse; - }; - } -} - -export const handler = Handler("kinesis_stream", (event) => +export const handler = async (event: KinesisStreamEvent) => withActor( { type: "public", @@ -72,5 +62,4 @@ export const handler = Handler("kinesis_stream", (event) => return response; }, - ), -); + ); diff --git a/packages/functions/src/log/expand.ts b/packages/functions/src/log/expand.ts deleted file mode 100644 index 1de71948..00000000 --- a/packages/functions/src/log/expand.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { Stage } from "@console/core/app"; -import { Resource } from "@console/core/app/resource"; -import { Issue } from "@console/core/issue"; -import { Log } from "@console/core/log"; -import { Replicache } from "@console/core/replicache"; -import { withApiAuth } from "src/api"; -import { ApiHandler, Response, useQueryParam } from "sst/node/api"; - -export const handler = ApiHandler( - withApiAuth(async () => { - const pointer = JSON.parse(useQueryParam("pointer")!); - const stageID = useQueryParam("stageID")!; - const groupID = useQueryParam("groupID")!; - - const config = await Stage.assumeRole(stageID); - if (!config) - throw new Response({ - statusCode: 400, - }); - - if (groupID.length !== 64) { - const result = await Log.expand({ - group: groupID, - logGroup: pointer.logGroup, - logStream: pointer.logStream, - timestamp: pointer.timestamp, - config, - }); - return { - statusCode: 200, - body: JSON.stringify(result), - headers: { - "Content-Type": "application/json", - }, - }; - } - - await Issue.expand({ - group: groupID, - stageID, - }); - await Replicache.poke(); - return { - statusCode: 200, - headers: { - "Content-Type": "application/json", - }, - }; - }) -); diff --git a/packages/functions/src/poller/fetch.ts b/packages/functions/src/poller/fetch.ts deleted file mode 100644 index f5d6623c..00000000 --- a/packages/functions/src/poller/fetch.ts +++ /dev/null @@ -1,170 +0,0 @@ -import { - CloudWatchLogsClient, - DescribeLogStreamsCommand, - FilterLogEventsCommand, - GetLogEventsCommand, -} from "@aws-sdk/client-cloudwatch-logs"; -import { S3Client } from "@aws-sdk/client-s3"; -import { withActor } from "@console/core/actor"; -import { App, Stage } from "@console/core/app"; -import { AWS } from "@console/core/aws"; -import { Log } from "@console/core/log"; -import { LogPoller } from "@console/core/log/poller"; -import { Realtime } from "@console/core/realtime"; -import { Storage } from "@console/core/storage"; - -interface State { - pollerID: string; - workspaceID: string; - stageID: string; - logGroup: string; - status?: { - offset: number; - start: number; - done: boolean; - attempts: number; - }; -} - -export function handler(input: State) { - return withActor( - { - type: "system", - properties: { - workspaceID: input.workspaceID, - }, - }, - async () => { - const attempts = input.status?.attempts || 0; - let start = input.status?.start; - const offset = input.status?.offset || -30 * 1000; - if (attempts === 100) { - return { - done: true, - }; - } - const poller = await LogPoller.fromID(input.pollerID); - if (!poller) { - throw new Error(`No poller found for ${input.pollerID}`); - } - const config = await Stage.assumeRole(poller.stageID); - if (!config) - return { - done: true, - }; - const client = new CloudWatchLogsClient(config); - const sourcemapKey = - `arn:aws:lambda:${config.region}:${config.awsAccountID}:function:` + - input.logGroup.split("/").slice(3, 5).join("/"); - - async function* fetchStreams(logGroup: string) { - let nextToken: string | undefined; - console.log("fetching streams for", logGroup); - - while (true) { - try { - const response = await client.send( - new DescribeLogStreamsCommand({ - logGroupIdentifier: logGroup, - nextToken: nextToken, - orderBy: "LastEventTime", - descending: true, - }) - ); - - for (const logStream of response.logStreams || []) { - yield logStream; - } - - nextToken = response.nextToken; - if (!nextToken) { - break; - } - } catch (e) { - break; - } - } - } - - async function* fetchEvents( - logGroup: string, - startTime: number, - streams: string[] - ) { - let nextToken: string | undefined; - console.log("fetching logs for", streams.length, "streams"); - - while (true) { - const response = await client.send( - new FilterLogEventsCommand({ - logGroupIdentifier: logGroup, - logStreamNames: streams, - nextToken, - startTime, - }) - ); - - for (const event of response.events || []) { - yield event; - } - - nextToken = response.nextToken; - if (!nextToken) { - break; - } - } - } - - console.log("running loop", attempts); - - const streams: string[] = []; - - for await (const stream of fetchStreams(input.logGroup)) { - streams.push(stream.logStreamName || ""); - if (streams.length === 100) break; - } - if (!streams.length) - return { - ...input.status, - done: false, - }; - if (!start) start = Date.now() - 2 * 60 * 1000; - - console.log("fetching since", new Date(start + offset).toLocaleString()); - const processor = Log.createProcessor({ - sourcemapKey, - group: input.logGroup + "-tail", - config, - }); - - for await (const event of fetchEvents( - input.logGroup, - start + offset, - streams - )) { - await processor.process({ - timestamp: event.timestamp!, - line: event.message!, - streamName: event.logStreamName!, - id: event.eventId!, - }); - } - - const data = processor.flush(); - if (data.length) { - console.log("sending", data.length, "invocations"); - const url = await Storage.putEphemeral(JSON.stringify(data), { - ContentType: "application/json", - }); - await Realtime.publish("invocation.url", url); - } - - return { - attempts: attempts + 1, - offset: Date.now() - start - 30 * 1000, - start, - done: false, - } satisfies State["status"]; - } - ); -} diff --git a/packages/functions/src/replicache/dummy/data.ts b/packages/functions/src/replicache/dummy/data.ts index 755b7aba..dcedc093 100644 --- a/packages/functions/src/replicache/dummy/data.ts +++ b/packages/functions/src/replicache/dummy/data.ts @@ -53,7 +53,7 @@ export type DummyMode = | "overview:base;usage:overage;resources:base;workspace:gated;subscription:overdue"; export function* generateData( - mode: DummyMode + mode: DummyMode, ): Generator { console.log("generating for", mode); @@ -629,7 +629,7 @@ function* appsFull(): Generator { appID: APP_ID_LONG, status: "error", error: unknownRunError( - "Areallylongerrormessagethatshouldoverflowbecauseitstoolonganditkeepsongoingandgoinganditshouldoverflowthecontaineritsbeingheldinside" + "Areallylongerrormessagethatshouldoverflowbecauseitstoolonganditkeepsongoingandgoinganditshouldoverflowthecontaineritsbeingheldinside", ), branch: "main", commitID: "11b2661dab38cb264be29b7d1b552802bcca32ce", @@ -795,7 +795,7 @@ function* stageIonBase(): Generator { } function* stageIonUpdateLinks( - parent: string + parent: string, ): Generator { const UPDATE_1 = 1; const UPDATE_2 = 2; @@ -898,7 +898,7 @@ function* stageIonUpdateLinks( } function* stageIonBaseIconCase( - parent: string + parent: string, ): Generator { yield stateResource({ parent, @@ -3333,6 +3333,7 @@ function func({ missingSourcemap, }, enrichment: { + logGroup: "", size: size || 2048, live: live || false, // @ts-ignore @@ -3426,18 +3427,18 @@ function invocation({ duration === undefined ? duration : { - duration, - memory: 128, - size: 2048, - xray: "eb1e33e8a81b697b75855af6bfcdbcbf7cbb", - }, + duration, + memory: 128, + size: 2048, + xray: "eb1e33e8a81b697b75855af6bfcdbcbf7cbb", + }, start: startTime.valueOf(), logs: messages ? messages.map((message, i) => ({ - message, - id: `log-${INVOCATION_COUNT}-${i}`, - timestamp: startTime.plus({ seconds: 20 * i }).toMillis(), - })) + message, + id: `log-${INVOCATION_COUNT}-${i}`, + timestamp: startTime.plus({ seconds: 20 * i }).toMillis(), + })) : [], }; } diff --git a/packages/functions/src/replicache/dummy/pull.ts b/packages/functions/src/replicache/dummy/pull.ts index f54a807c..fb510ba5 100644 --- a/packages/functions/src/replicache/dummy/pull.ts +++ b/packages/functions/src/replicache/dummy/pull.ts @@ -1,3 +1,4 @@ +// @ts-nocheck import { withActor, useActor } from "@console/core/actor"; import { NotPublic } from "../../api"; import { @@ -20,12 +21,12 @@ export const handler = ApiHandler(async () => { !workspaceID ? session : { - type: "user", - properties: { - workspaceID, - userID: "dummy-user", + type: "user", + properties: { + workspaceID, + userID: "dummy-user", + }, }, - }, () => { const actor = useActor(); NotPublic(); diff --git a/packages/functions/src/replicache/framework.ts b/packages/functions/src/replicache/framework.ts index b88aa302..29c13a13 100644 --- a/packages/functions/src/replicache/framework.ts +++ b/packages/functions/src/replicache/framework.ts @@ -1,4 +1,4 @@ -import { z, ZodAny, ZodObject, ZodRawShape, ZodSchema } from "zod"; +import { z, ZodSchema } from "zod"; import { WriteTransaction } from "replicache"; interface Mutation { @@ -18,11 +18,11 @@ export class Server { public mutation< Name extends string, Shape extends ZodSchema, - Args = z.infer + Args = z.infer, >( name: Name, shape: Shape, - fn: (input: z.infer) => Promise + fn: (input: z.infer) => Promise, ): Server }> { this.mutations.set(name as string, { fn: async (args) => { @@ -37,12 +37,12 @@ export class Server { public expose< Name extends string, Shape extends ZodSchema, - Args = z.infer + Args = z.infer, >( name: Name, fn: ((input: z.infer) => Promise) & { schema: Shape; - } + }, ): Server }> { this.mutations.set(name as string, { fn, @@ -58,19 +58,21 @@ export class Server { } } -type ExtractMutations> = S extends Server - ? M - : never; +type ExtractMutations> = + S extends Server ? M : never; export class Client< S extends Server, - Mutations extends Record = ExtractMutations + Mutations extends Record = ExtractMutations, > { private mutations = new Map Promise>(); public mutation( name: Name, - fn: (tx: WriteTransaction, input: Mutations[Name]["input"]) => Promise + fn: ( + tx: WriteTransaction, + input: Mutations[Name]["input"], + ) => Promise, ) { this.mutations.set(name as string, fn); return this; @@ -79,7 +81,7 @@ export class Client< public build(): { [key in keyof Mutations]: ( ctx: WriteTransaction, - args: Mutations[key]["input"] + args: Mutations[key]["input"], ) => Promise; } { return Object.fromEntries(this.mutations.entries()) as any; diff --git a/packages/functions/src/replicache/pull.ts b/packages/functions/src/replicache/pull.ts deleted file mode 100644 index 1f247d27..00000000 --- a/packages/functions/src/replicache/pull.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { ApiHandler } from "sst/node/api"; -import { PullResponseV0 } from "replicache"; -import { NotPublic, withApiAuth } from "../api"; - -export const handler = ApiHandler( - withApiAuth(async () => { - NotPublic(); - - const response: PullResponseV0 = { - error: "VersionNotSupported", - }; - - return { - statusCode: 200, - body: JSON.stringify(response), - headers: { - "Content-Type": "application/json", - }, - }; - }) -); diff --git a/packages/functions/src/replicache/pull1.ts b/packages/functions/src/replicache/pull1.ts deleted file mode 100644 index d953db42..00000000 --- a/packages/functions/src/replicache/pull1.ts +++ /dev/null @@ -1,565 +0,0 @@ -import { DateTime } from "luxon"; -import { useActor, useWorkspace } from "@console/core/actor"; -import { user } from "@console/core/user/user.sql"; -import { TxOrDb, createTransaction } from "@console/core/util/transaction"; -import { NotPublic, withApiAuth } from "../api"; -import { ApiHandler, Response, useHeader, useJsonBody } from "sst/node/api"; -import { - eq, - and, - gt, - gte, - inArray, - isNull, - SQLWrapper, - sql, - SQL, - lt, - lte, -} from "drizzle-orm"; -import { workspace } from "@console/core/workspace/workspace.sql"; -import { stripe, usage } from "@console/core/billing/billing.sql"; -import { app, appRepoTable, resource, stage } from "@console/core/app/app.sql"; -import { awsAccount } from "@console/core/aws/aws.sql"; -import { - replicache_client, - replicache_client_group, -} from "@console/core/replicache/replicache.sql"; -import { lambdaPayload } from "@console/core/lambda/lambda.sql"; -import { chunk, equals, mapValues } from "remeda"; -import { log_poller, log_search } from "@console/core/log/log.sql"; -import { PatchOperation, PullRequest, PullResponseV1 } from "replicache"; -import { warning } from "@console/core/warning/warning.sql"; -import { - issue, - issueSubscriber, - issueCount, -} from "@console/core/issue/issue.sql"; -import { MySqlColumn } from "drizzle-orm/mysql-core"; -import { - SelectedFields, - db, - getTableColumns, - isNotNull, - notInArray, -} from "@console/core/drizzle"; -import { githubOrgTable, githubRepoTable } from "@console/core/git/git.sql"; -import { slackTeam } from "@console/core/slack/slack.sql"; -import { APIGatewayProxyStructuredResultV2 } from "aws-lambda"; -import { gzipSync } from "zlib"; -import { - stateEventTable, - stateResourceTable, - stateUpdateTable, -} from "@console/core/state/state.sql"; -import { State } from "@console/core/state"; -import { runConfigTable, runTable } from "@console/core/run/run.sql"; -import { Run } from "@console/core/run"; -import { Replicache } from "@console/core/replicache"; -import { AppRepo } from "@console/core/app/repo"; -import { Github } from "@console/core/git/github"; -import { alert } from "@console/core/alert/alert.sql"; -import { Alert } from "@console/core/alert"; -import { - GetObjectCommand, - PutObjectCommand, - S3Client, -} from "@aws-sdk/client-s3"; -import { createId } from "@console/core/util/sql"; -import { Bucket } from "sst/node/bucket"; -import { getSignedUrl } from "@aws-sdk/s3-request-presigner"; - -export const TABLES = { - stateUpdate: stateUpdateTable, - stateResource: stateResourceTable, - stateEvent: stateEventTable, - workspace, - stripe, - user, - awsAccount, - app, - appRepo: appRepoTable, - stage, - resource, - log_poller, - log_search, - lambdaPayload, - warning, - issue, - issueCount, - alert, - githubOrg: githubOrgTable, - githubRepo: githubRepoTable, - slackTeam, - usage, - run: runTable, - runConfig: runConfigTable, -}; - -type TableName = keyof typeof TABLES; - -const TABLE_KEY = { - appRepo: [appRepoTable.appID, appRepoTable.id], - runConfig: [runConfigTable.appID, runConfigTable.id], - issue: [issue.stageID, issue.id], - resource: [resource.stageID, resource.id], - issueCount: [issueCount.group, issueCount.id], - warning: [warning.stageID, warning.type, warning.id], - usage: [usage.stageID, usage.id], - stateUpdate: [stateUpdateTable.stageID, stateUpdateTable.id], - stateResource: [stateResourceTable.stageID, stateResourceTable.id], - stateEvent: [ - stateEventTable.stageID, - stateEventTable.updateID, - stateEventTable.id, - ], - run: [runTable.stageID, runTable.id], - stripe: [], -} as { - [key in TableName]?: MySqlColumn[]; -}; - -const TABLE_SELECT = { - stateEvent: (() => { - const { inputs, outputs, ...rest } = getTableColumns(stateEventTable); - return rest; - })(), -} as { - [key in TableName]?: any; -}; - -const TABLE_PROJECTION = { - alert: (input) => Alert.serialize(input), - appRepo: (input) => AppRepo.serializeAppRepo(input), - githubOrg: (input) => Github.serializeOrg(input), - githubRepo: (input) => Github.serializeRepo(input), - stateUpdate: (input) => State.serializeUpdate(input), - stateEvent: (input) => State.serializeEvent(input), - stateResource: (input) => State.serializeResource(input), - runConfig: (input) => { - if (!input.env) return input; - for (const key of Object.keys(input.env)) { - input.env[key] = "__secret"; - } - return input; - }, - run: (input) => Run.serializeRun(input), -} as { - [key in TableName]?: (input: (typeof TABLES)[key]["$inferSelect"]) => any; -}; - -const s3 = new S3Client({}); - -export const handler = ApiHandler( - withApiAuth(async () => { - NotPublic(); - const actor = useActor(); - function log(...args: any[]) { - if (process.env.IS_LOCAL) return; - console.log(...args); - } - log("actor", actor); - - const req: PullRequest = useJsonBody(); - log("request", req); - if (req.pullVersion !== 1) { - throw new Response({ - statusCode: 307, - headers: { - location: "/replicache/pull", - }, - }); - } - - await db.insert(replicache_client_group).ignore().values({ - id: req.clientGroupID, - cvrVersion: 0, - actor, - clientVersion: 0, - }); - const resp = await createTransaction( - async (tx): Promise => { - const patch: PatchOperation[] = []; - - const group = await tx - .select({ - id: replicache_client_group.id, - cvrVersion: replicache_client_group.cvrVersion, - clientVersion: replicache_client_group.clientVersion, - actor: replicache_client_group.actor, - }) - .from(replicache_client_group) - .for("update") - .where(and(eq(replicache_client_group.id, req.clientGroupID))) - .execute() - .then((rows) => rows.at(0)!); - - if (!equals(group.actor, actor)) { - log("compare failed", group.actor, actor); - return; - } - - const oldCvr = await Replicache.CVR.get( - req.clientGroupID, - req.cookie as number, - ); - - const cvr = oldCvr ?? { - data: {}, - clientVersion: 0, - }; - - const toPut: Record = {}; - const nextCvr = { - data: {} as Record, - version: Math.max(req.cookie as number, group.cvrVersion) + 1, - }; - - if (!oldCvr) { - patch.push({ - op: "clear", - }); - patch.push({ - op: "put", - key: "/init", - value: true, - }); - } - - const results: [ - string, - { id: string; version: string; key: string }[], - ][] = []; - - if (actor.type === "user") { - log("syncing user"); - - const deletedStages = await tx - .select({ id: stage.id }) - .from(stage) - .where( - and( - isNotNull(stage.timeDeleted), - eq(stage.workspaceID, useWorkspace()), - ), - ) - .then((rows) => rows.map((row) => row.id)); - - const updates = await tx - .select({ - id: stateUpdateTable.id, - rowNumber: - sql`ROW_NUMBER() OVER (PARTITION BY ${stateUpdateTable.stageID} ORDER BY ${stateUpdateTable.index} DESC)`.as( - "row_number", - ), - }) - .from(stateUpdateTable) - .where( - and( - eq(stateUpdateTable.workspaceID, useWorkspace()), - deletedStages.length - ? notInArray(stateUpdateTable.stageID, deletedStages) - : undefined, - ), - ) - .then((rows) => - rows - .filter((row) => parseInt(row.rowNumber) < 100) - .map((row) => row.id), - ); - - const runs = await tx - .select({ - id: runTable.id, - rowNumber: - sql`ROW_NUMBER() OVER (PARTITION BY ${runTable.stageID} ORDER BY ${runTable.timeCompleted} DESC)`.as( - "row_number", - ), - }) - .from(runTable) - .where( - and( - eq(runTable.workspaceID, useWorkspace()), - deletedStages.length - ? notInArray(runTable.stageID, deletedStages) - : undefined, - ), - ) - .then((rows) => - rows - .filter((row) => parseInt(row.rowNumber) < 100) - .map((row) => row.id), - ); - const tableFilters = { - log_search: eq(log_search.userID, actor.properties.userID), - usage: gte( - usage.day, - DateTime.now().toUTC().startOf("month").toSQLDate()!, - ), - issueCount: gte( - issueCount.hour, - DateTime.now() - .toUTC() - .startOf("hour") - .minus({ day: 1 }) - .toSQL({ includeOffset: false })!, - ), - issue: isNull(issue.timeDeleted), - ...(updates.length - ? { - stateEvent: inArray(stateEventTable.updateID, updates), - stateUpdate: inArray(stateUpdateTable.id, updates), - } - : {}), - stateResource: deletedStages.length - ? notInArray(stateResourceTable.stageID, deletedStages) - : undefined, - run: runs.length ? inArray(runTable.id, runs) : undefined, - } satisfies { - [key in keyof typeof TABLES]?: SQLWrapper; - }; - - const workspaceID = useWorkspace(); - - for (const [name, table] of Object.entries(TABLES)) { - const key = TABLE_KEY[name as TableName] ?? [table.id]; - const query = tx - .select({ - name: sql`${name}`, - id: table.id, - version: table.timeUpdated, - key: sql.join([ - sql`concat_ws(`, - sql.join([sql`'/'`, sql`''`, sql`${name}`, ...key], sql`, `), - sql.raw(`)`), - ]) as SQL, - }) - .from(table) - .where( - and( - eq( - "workspaceID" in table ? table.workspaceID : table.id, - workspaceID, - ), - ...(name === "stage" ? [] : [isNull(table.timeDeleted)]), - ...(name in tableFilters - ? [tableFilters[name as keyof typeof tableFilters]] - : []), - ), - ); - log("getting updated from", name); - const rows = await query.execute(); - results.push([name, rows as any]); - } - } - - if (actor.type === "account") { - log("syncing account"); - - const [users] = await Promise.all([ - await tx - .select({ - id: user.id, - key: sql`concat('/user/', ${user.id})`, - version: user.timeUpdated, - }) - .from(user) - .innerJoin(workspace, eq(user.workspaceID, workspace.id)) - .where( - and( - eq(user.email, actor.properties.email), - isNull(user.timeDeleted), - isNull(workspace.timeDeleted), - ), - ) - .execute(), - ]); - results.push(["user", users]); - - const workspaces = await tx - .select({ - id: workspace.id, - version: workspace.timeUpdated, - key: sql`concat('/workspace/', ${workspace.id})`, - }) - .from(workspace) - .leftJoin(user, eq(user.workspaceID, workspace.id)) - .where( - and( - eq(user.email, actor.properties.email), - isNull(user.timeDeleted), - isNull(workspace.timeDeleted), - ), - ) - .execute(); - results.push(["workspace", workspaces]); - } - - for (const [name, rows] of results) { - const arr = []; - for (const row of rows) { - const version = new Date(row.version).getTime(); - if (cvr.data[row.key] !== version) { - arr.push(row); - } - delete cvr.data[row.key]; - nextCvr.data[row.key] = version; - } - toPut[name] = arr; - } - - log( - "toPut", - mapValues(toPut, (value) => value.length), - ); - - log("toDel", cvr.data); - - // new data - for (const [name, items] of Object.entries(toPut)) { - log(name); - const ids = items.map((item) => item.id); - const keys = Object.fromEntries( - items.map((item) => [item.id, item.key]), - ); - - if (!ids.length) continue; - const table = TABLES[name as keyof typeof TABLES]; - - for (const group of chunk(ids, 200)) { - const now = Date.now(); - log(name, "fetching", group.length); - const rows = await tx - .select( - TABLE_SELECT[name as keyof typeof TABLE_SELECT] || - getTableColumns(table), - ) - .from(table) - .where( - and( - "workspaceID" in table && actor.type === "user" - ? eq(table.workspaceID, useWorkspace()) - : undefined, - inArray(table.id, group), - ), - ) - .execute(); - log(name, "got", rows.length, "in", Date.now() - now, "ms"); - const projection = - TABLE_PROJECTION[name as keyof typeof TABLE_PROJECTION]; - for (const row of rows) { - const key = keys[row.id]!; - patch.push({ - op: "put", - key, - value: projection ? projection(row as any) : row, - }); - } - } - } - - // remove deleted data - for (const [key] of Object.entries(cvr.data)) { - patch.push({ - op: "del", - key, - }); - } - - const clients = await tx - .select({ - id: replicache_client.id, - mutationID: replicache_client.mutationID, - clientVersion: replicache_client.clientVersion, - }) - .from(replicache_client) - .where( - and( - eq(replicache_client.clientGroupID, req.clientGroupID), - gt(replicache_client.clientVersion, cvr.clientVersion), - ), - ) - .execute(); - - const lastMutationIDChanges = Object.fromEntries( - clients.map((c) => [c.id, c.mutationID] as const), - ); - if (patch.length > 0 || Object.keys(lastMutationIDChanges).length > 0) { - log("inserting", req.clientGroupID); - await tx - .update(replicache_client_group) - .set({ - cvrVersion: nextCvr.version, - }) - .where(eq(replicache_client_group.id, req.clientGroupID)) - .execute(); - - await Replicache.CVR.put(req.clientGroupID, nextCvr.version, { - data: nextCvr.data, - clientVersion: group.clientVersion, - }); - - return { - patch, - cookie: nextCvr.version, - lastMutationIDChanges, - }; - } - - return { - patch: [], - cookie: req.cookie, - lastMutationIDChanges, - }; - }, - { - isolationLevel: "repeatable read", - }, - ); - - const response: APIGatewayProxyStructuredResultV2 = { - statusCode: 200, - headers: { - "content-type": "application/json", - }, - body: JSON.stringify(resp), - }; - - const isGzip = useHeader("accept-encoding"); - if (isGzip) { - log("gzipping"); - response.headers!["content-encoding"] = "gzip"; - const buff = gzipSync(response.body || ""); - const body = buff.toString("base64"); - response.body = body; - response.isBase64Encoded = true; - if (body.length > 5 * 1024 * 1024) { - const key = - ["temporary", "daily", "pull", req.profileID].join("/") + ".json.gz"; - await s3.send( - new PutObjectCommand({ - Bucket: Bucket.storage.bucketName, - Key: key, - ContentEncoding: "gzip", - ContentType: "application/json", - Body: buff, - }), - ); - - const url = await getSignedUrl( - s3, - new GetObjectCommand({ - Bucket: Bucket.storage.bucketName, - Key: key, - }), - ); - response.body = undefined; - response.statusCode = 302; - response.headers!["location"] = url; - } - log("done gzip"); - } - - return response; - }), -); diff --git a/packages/functions/src/replicache/push.ts b/packages/functions/src/replicache/push.ts deleted file mode 100644 index 8c714d1c..00000000 --- a/packages/functions/src/replicache/push.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { ApiHandler } from "sst/node/api"; -import { PushResponse } from "replicache"; -import { NotPublic, withApiAuth } from "../api"; - -export const handler = ApiHandler( - withApiAuth(async () => { - NotPublic(); - - const response: PushResponse = { - error: "VersionNotSupported", - }; - - return { - statusCode: 200, - body: JSON.stringify(response), - headers: { - "Content-Type": "application/json", - }, - }; - }) -); diff --git a/packages/functions/src/replicache/push1.ts b/packages/functions/src/replicache/push1.ts deleted file mode 100644 index 2dd9b9a4..00000000 --- a/packages/functions/src/replicache/push1.ts +++ /dev/null @@ -1,150 +0,0 @@ -import { useActor } from "@console/core/actor"; -import { - replicache_client_group, - replicache_client, -} from "@console/core/replicache/replicache.sql"; -import { createTransaction } from "@console/core/util/transaction"; -import { and, eq } from "drizzle-orm"; -import { PushRequest } from "replicache"; -import { NotPublic, withApiAuth } from "../api"; -import { ApiHandler, useJsonBody } from "sst/node/api"; -import { server } from "./server"; -import { Replicache } from "@console/core/replicache"; -import { VisibleError } from "@console/core/util/error"; - -export const handler = ApiHandler( - withApiAuth(async () => { - NotPublic(); - const actor = useActor(); - - const body: PushRequest = useJsonBody(); - if (body.pushVersion !== 1) - return { - statusCode: 307, - headers: { - Location: "/replicache/push", - }, - }; - - for (const mutation of body.mutations) { - await createTransaction( - async (tx) => { - const group = await tx - .select({ - id: replicache_client_group.id, - cvrVersion: replicache_client_group.cvrVersion, - clientVersion: replicache_client_group.clientVersion, - actor: replicache_client_group.actor, - }) - .from(replicache_client_group) - .for("update") - .where(and(eq(replicache_client_group.id, body.clientGroupID))) - .execute() - .then( - (rows) => - rows.at(0) ?? { - id: body.clientGroupID, - actor: actor, - cvrVersion: 0, - clientVersion: 0, - }, - ); - - // if (!equals(group.actor, actor)) { - // throw new Error( - // `${actor} is not authorized to push to ${body.clientGroupID}}` - // ); - // } - - const client = await tx - .select({ - id: replicache_client.id, - clientGroupID: replicache_client.clientGroupID, - mutationID: replicache_client.mutationID, - clientVersion: replicache_client.clientVersion, - }) - .from(replicache_client) - .for("update") - .where(and(eq(replicache_client.id, mutation.clientID))) - .execute() - .then( - (rows) => - rows.at(0) || { - id: body.clientGroupID, - clientGroupID: body.clientGroupID, - mutationID: 0, - clientVersion: 0, - }, - ); - - const nextClientVersion = group.clientVersion + 1; - const nextMutationID = client.mutationID + 1; - - if (mutation.id < nextMutationID) { - console.log( - `Mutation ${mutation.id} has already been processed - skipping`, - ); - return; - } - - if (mutation.id > nextMutationID) { - throw new Error( - `Mutation ${mutation.id} is from the future - aborting`, - ); - } - - const { args, name } = mutation; - console.log("processing", mutation.id, name); - try { - await server.execute(name, args); - } catch (ex) { - if (!(ex instanceof VisibleError)) console.error(ex); - } - console.log("done processing", mutation.id, name); - - await tx - .insert(replicache_client_group) - .values({ - id: body.clientGroupID, - clientVersion: nextClientVersion, - cvrVersion: group.cvrVersion, - actor, - }) - .onDuplicateKeyUpdate({ - set: { - cvrVersion: group.cvrVersion, - clientVersion: nextClientVersion, - }, - }) - .execute(); - - await tx - .insert(replicache_client) - .values({ - id: mutation.clientID, - clientGroupID: group.id, - mutationID: nextMutationID, - clientVersion: nextClientVersion, - }) - .onDuplicateKeyUpdate({ - set: { - clientGroupID: group.id, - mutationID: nextMutationID, - clientVersion: nextClientVersion, - }, - }) - .execute(); - }, - { - isolationLevel: "repeatable read", - }, - ); - } - - if (actor.type === "user") await Replicache.poke(); - - return { - statusCode: 200, - }; - }), -); diff --git a/packages/functions/src/replicache/server.ts b/packages/functions/src/replicache/server.ts index aea13f7d..986e04c8 100644 --- a/packages/functions/src/replicache/server.ts +++ b/packages/functions/src/replicache/server.ts @@ -9,7 +9,7 @@ import { Lambda } from "@console/core/lambda"; import { assertActor, withActor, useWorkspace } from "@console/core/actor"; import { User } from "@console/core/user"; import { Issue } from "@console/core/issue"; -import { and, db, eq, or } from "@console/core/drizzle"; +import { and, eq, or } from "@console/core/drizzle"; import { useTransaction } from "@console/core/util/transaction"; import { issueSubscriber } from "@console/core/issue/issue.sql"; import { warning } from "@console/core/warning/warning.sql"; @@ -18,6 +18,8 @@ import { Slack } from "@console/core/slack"; import { AppRepo } from "@console/core/app/repo"; import { RunConfig } from "@console/core/run/config"; import { Alert } from "@console/core/alert"; +import { bus } from "sst/aws/bus"; +import { Resource } from "sst"; export const server = new Server() .expose("log_poller_subscribe", LogPoller.subscribe) @@ -59,8 +61,8 @@ export const server = new Server() .where( and( eq(issueSubscriber.workspaceID, useWorkspace()), - eq(issueSubscriber.stageID, input.stageID) - ) + eq(issueSubscriber.stageID, input.stageID), + ), ) .execute(); await tx @@ -71,22 +73,20 @@ export const server = new Server() eq(warning.stageID, input.stageID), or( eq(warning.type, "log_subscription"), - eq(warning.type, "issue_rate_limited") - ) - ) + eq(warning.type, "issue_rate_limited"), + ), + ), ) .execute(); }); - await Stage.Events.ResourcesUpdated.publish({ + await bus.publish(Resource.Bus, Stage.Events.ResourcesUpdated, { stageID: input.stageID, }); - } + }, ) .expose("aws_account_scan", AWS.Account.scan) - .mutation( - "app_stage_sync", - z.object({ stageID: z.string() }), - async (input) => await App.Stage.Events.Updated.publish(input) + .mutation("app_stage_sync", z.object({ stageID: z.string() }), (input) => + bus.publish(Resource.Bus, App.Stage.Events.Updated, input), ) .mutation("workspace_create", Workspace.create.schema, async (input) => { const actor = assertActor("account"); @@ -102,7 +102,7 @@ export const server = new Server() User.create({ email: actor.properties.email, first: true, - }) + }), ); }) .expose("user_create", User.create) diff --git a/packages/functions/src/rest/account.ts b/packages/functions/src/rest/account.ts deleted file mode 100644 index b5bdb26c..00000000 --- a/packages/functions/src/rest/account.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { Account } from "@console/core/account"; -import { assertActor } from "@console/core/actor"; -import { User } from "@console/core/user"; -import { withApiAuth } from "src/api"; -import { ApiHandler } from "sst/node/api"; - -export const handler = ApiHandler( - withApiAuth(async () => { - const actor = assertActor("account"); - return { - statusCode: 200, - headers: { - "content-type": "application/json", - }, - body: JSON.stringify({ - id: actor.properties.accountID, - email: actor.properties.email, - workspaces: await Account.workspaces(), - }), - }; - }), -); diff --git a/packages/functions/src/rest/lambda/invoke.ts b/packages/functions/src/rest/lambda/invoke.ts deleted file mode 100644 index 5cb1306c..00000000 --- a/packages/functions/src/rest/lambda/invoke.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Lambda } from "@console/core/lambda"; -import { withApiAuth } from "src/api"; -import { ApiHandler, useJsonBody } from "sst/node/api"; - -export const handler = ApiHandler( - withApiAuth(async () => { - const body = useJsonBody(); - const requestID = await Lambda.invoke(body); - - return { - statusCode: 200, - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ - requestID, - }), - }; - }) -); diff --git a/packages/functions/src/rest/local.ts b/packages/functions/src/rest/local.ts deleted file mode 100644 index 2150ea09..00000000 --- a/packages/functions/src/rest/local.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { assertActor } from "@console/core/actor"; -import { app, stage } from "@console/core/app/app.sql"; -import { and, db, eq } from "@console/core/drizzle"; -import { user } from "@console/core/user/user.sql"; -import { workspace } from "@console/core/workspace/workspace.sql"; -import { withApiAuth } from "src/api"; -import { ApiHandler, useJsonBody, useQueryParams } from "sst/node/api"; -import { z } from "zod"; - -const Body = z.object({ - app: z.string(), - stage: z.string(), -}); -export const handler = ApiHandler( - withApiAuth(async () => { - const body = Body.parse(useQueryParams()); - const actor = assertActor("account"); - - const result = await db - .select({ - workspace: workspace.slug, - }) - .from(user) - .innerJoin(workspace, eq(workspace.id, user.workspaceID)) - .innerJoin(stage, eq(stage.workspaceID, workspace.id)) - .innerJoin( - app, - and(eq(app.id, stage.appID), eq(workspace.id, app.workspaceID)) - ) - .where( - and( - eq(user.email, actor.properties.email), - eq(app.name, body.app), - eq(stage.name, body.stage) - ) - ); - - console.log(result); - - return { - statusCode: 200, - headers: { - "content-type": "application/json", - }, - body: JSON.stringify(result.map((item) => item.workspace)), - }; - }) -); diff --git a/packages/functions/src/rest/log/scan.ts b/packages/functions/src/rest/log/scan.ts deleted file mode 100644 index 617cfe5d..00000000 --- a/packages/functions/src/rest/log/scan.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { Stage } from "@console/core/app/stage"; -import { Log } from "@console/core/log"; -import { withApiAuth } from "src/api"; -import { ApiHandler, useJsonBody, useQueryParams } from "sst/node/api"; -import { z } from "zod"; - -const Body = z.object({ - stageID: z.string(), - requestID: z.string().optional(), - timestamp: z.number({ coerce: true }).optional(), - logGroup: z.string(), - logStream: z.string(), -}); - -export const handler = ApiHandler( - withApiAuth(async () => { - const body = Body.parse(useQueryParams()); - let start = Date.now() - 2 * 60 * 1000; - console.log("tailing from", start); - const config = await Stage.assumeRole(body.stageID); - if (!config) - return { - statusCode: 500, - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ error: "Failed to assume role" }), - }; - - const logs = await Log.scan({ - ...body, - timestamp: body.timestamp || undefined, - config, - }); - - return { - statusCode: 200, - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify(logs), - }; - }), -); diff --git a/packages/functions/src/rest/log/tail.ts b/packages/functions/src/rest/log/tail.ts deleted file mode 100644 index bbed0ec0..00000000 --- a/packages/functions/src/rest/log/tail.ts +++ /dev/null @@ -1,134 +0,0 @@ -import { - CloudWatchLogsClient, - DescribeLogStreamsCommand, - FilterLogEventsCommand, -} from "@aws-sdk/client-cloudwatch-logs"; -import { Stage } from "@console/core/app/stage"; -import { Log } from "@console/core/log"; -import { withApiAuth } from "src/api"; -import { ApiHandler, useJsonBody } from "sst/node/api"; -import { Storage } from "@console/core/storage"; -import { z } from "zod"; -import { Realtime } from "@console/core/realtime"; - -const Body = z.object({ - stageID: z.string(), - profileID: z.string(), - logGroup: z.string(), -}); - -export const handler = ApiHandler( - withApiAuth(async () => { - const body = Body.parse(useJsonBody()); - let start = Date.now() - 2 * 60 * 1000; - console.log("tailing from", start); - const config = await Stage.assumeRole(body.stageID); - if (!config) - return { - done: true, - }; - const client = new CloudWatchLogsClient(config); - const sourcemapKey = - `arn:aws:lambda:${config.region}:${config.awsAccountID}:function:` + - body.logGroup.split("/").slice(3, 5).join("/"); - - async function* fetchStreams(logGroup: string) { - let nextToken: string | undefined; - console.log("fetching streams for", logGroup); - - while (true) { - try { - const response = await client.send( - new DescribeLogStreamsCommand({ - logGroupIdentifier: logGroup, - nextToken: nextToken, - orderBy: "LastEventTime", - descending: true, - }) - ); - - for (const logStream of response.logStreams || []) { - yield logStream; - } - - nextToken = response.nextToken; - if (!nextToken) { - break; - } - } catch (e) { - break; - } - } - } - - async function* fetchEvents( - logGroup: string, - startTime: number, - streams: string[] - ) { - let nextToken: string | undefined; - console.log("fetching logs for", streams.length, "streams"); - - while (true) { - const response = await client.send( - new FilterLogEventsCommand({ - logGroupIdentifier: logGroup, - logStreamNames: streams, - nextToken, - startTime, - }) - ); - - for (const event of response.events || []) { - yield event; - } - - nextToken = response.nextToken; - if (!nextToken) { - break; - } - } - } - - const streams: string[] = []; - - for await (const stream of fetchStreams(body.logGroup)) { - streams.push(stream.logStreamName || ""); - if (streams.length === 100) break; - } - if (!streams.length) - return { - statusCode: 200, - }; - if (!start) start = Date.now() - 2 * 60 * 1000; - - console.log("fetching since", new Date(start).toLocaleString()); - const processor = Log.createProcessor({ - sourcemapKey, - group: body.logGroup + "-tail", - config, - }); - - for await (const event of fetchEvents(body.logGroup, start, streams)) { - await processor.process({ - timestamp: event.timestamp!, - line: event.message!, - streamName: event.logStreamName!, - id: event.eventId!, - }); - } - - const data = processor.flush(); - if (data.length) { - console.log("sending", data.length, "invocations"); - const url = await Storage.putEphemeral(JSON.stringify(data), { - ContentType: "application/json", - }); - await Realtime.publish("invocation.url", url, body.profileID); - } - - return { - statusCode: 200, - }; - }) -); diff --git a/packages/functions/src/rest/workspace.ts b/packages/functions/src/rest/workspace.ts deleted file mode 100644 index bc825052..00000000 --- a/packages/functions/src/rest/workspace.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { assertActor, withActor } from "@console/core/actor"; -import { withApiAuth } from "../api"; -import { ApiHandler, useJsonBody } from "sst/node/api"; -import { Workspace } from "@console/core/workspace"; -import { User } from "@console/core/user"; - -export const create = ApiHandler( - withApiAuth(async () => { - const actor = assertActor("account"); - const body = useJsonBody(); - const parsed = Workspace.create.schema.parse(body); - try { - const workspaceID = await Workspace.create(parsed); - const workspace = await Workspace.fromID(workspaceID); - await withActor( - { - type: "system", - properties: { - workspaceID, - }, - }, - () => - User.create({ - email: actor.properties.email, - first: true, - }), - ); - return { - statusCode: 200, - headers: { - "content-type": "application/json", - }, - body: JSON.stringify(workspace), - }; - } catch { - return { - statusCode: 400, - headers: { - "content-type": "application/json", - }, - }; - } - }), -); - -export const remove = ApiHandler( - withApiAuth(async () => { - const body = useJsonBody(); - const parsed = Workspace.remove.schema.parse(body); - try { - await Workspace.remove(parsed); - return { - statusCode: 200, - headers: { - "content-type": "application/json", - }, - }; - } catch { - return { - statusCode: 400, - headers: { - "content-type": "application/json", - }, - }; - } - }), -); diff --git a/packages/functions/src/run/runner-warmer.ts b/packages/functions/src/run/runner-warmer.ts deleted file mode 100644 index 07e5d82b..00000000 --- a/packages/functions/src/run/runner-warmer.ts +++ /dev/null @@ -1,55 +0,0 @@ -import type { Context } from "aws-lambda"; -import { withActor } from "@console/core/actor"; -import { AWS } from "@console/core/aws"; -import { Run } from "@console/core/run"; -import { AppRepo } from "@console/core/app/repo"; -import { Github } from "@console/core/git/github"; - -export async function handler(evt: Run.RunnerWarmerEvent, context: Context) { - const { workspaceID, runnerID } = evt; - await withActor( - { - type: "system", - properties: { - workspaceID, - }, - }, - async () => { - const runner = await Run.getRunnerByID(runnerID); - if (!runner) return; - if (!runner.resource) return; - - // If runner is not active, stop warming. - // Note: Except for the case where the runner is created but has not been used. - // In this case, we want to warm it so that it can be used immediately. - const usage = await Run.getRunnerActiveUsage(runnerID); - if (!usage.length && runner.timeRun) { - await Run.unsetRunnerWarmer(runnerID); - return; - } - - // Build cloneUrl - const gitRepo = await Github.getExternalInfoByRepoID(runner.appRepoID); - if (!gitRepo) return; - const cloneUrl = await Github.getCloneUrl(gitRepo); - - // Warm - const awsAccount = await AWS.Account.fromID(runner.awsAccountID); - if (!awsAccount) return; - const credentials = await AWS.assumeRole(awsAccount?.accountID!); - if (!credentials) return; - await Run.warmRunner({ - region: runner.region, - engine: runner.engine, - resource: runner.resource, - credentials, - cloneUrl, - instances: Math.max(usage.length, 1), - }); - - // Schedule next warmer - process.env.RUNNER_WARMER_FUNCTION_ARN = context.invokedFunctionArn; - await Run.scheduleRunnerWarmer(runnerID); - } - ); -} diff --git a/packages/functions/src/scratch.ts b/packages/functions/src/scratch.ts deleted file mode 100644 index 5ddb0dbb..00000000 --- a/packages/functions/src/scratch.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { withActor } from "@console/core/actor"; -import { AWS } from "@console/core/aws"; -import { awsAccount } from "@console/core/aws/aws.sql"; -import { db } from "@console/core/drizzle"; - -export async function handler() { - const rows = await db.select().from(awsAccount).execute(); - for (const row of rows) { - await withActor( - { - type: "system", - properties: { - workspaceID: row.workspaceID, - }, - }, - () => - AWS.Account.Events.Created.publish({ - awsAccountID: row.id, - }) - ); - } -} diff --git a/packages/functions/src/sessions.ts b/packages/functions/src/sessions.ts index 7d547f88..0ceebf36 100644 --- a/packages/functions/src/sessions.ts +++ b/packages/functions/src/sessions.ts @@ -1,6 +1,6 @@ -import { createSessionBuilder } from "sst/node/future/auth"; +import { auth } from "sst/auth"; -export const sessions = createSessionBuilder<{ +export const sessions = auth.sessions<{ account: { accountID: string; email: string; diff --git a/packages/functions/sst-env.d.ts b/packages/functions/sst-env.d.ts index a9187e85..b0900f6b 100644 --- a/packages/functions/sst-env.d.ts +++ b/packages/functions/sst-env.d.ts @@ -1 +1,144 @@ -/// +/* This file is auto-generated by SST. Do not edit. */ +/* tslint:disable */ +/* eslint-disable */ +/* deno-fmt-ignore-file */ +import "sst" +export {} +declare module "sst" { + export interface Resource { + "Alerts": { + "arn": string + "type": "sst.aws.SnsTopic" + } + "Api": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "ApiRouter": { + "type": "sst.aws.Router" + "url": string + } + "Auth": { + "publicKey": string + "type": "sst.aws.Auth" + } + "AuthAuthenticator": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "AuthRouter": { + "type": "sst.aws.Router" + "url": string + } + "AutodeployConfig": { + "buildImage": string + "buildspecBucketName": string + "buildspecVersion": string + "configParserFunctionArn": string + "runnerRemoverFunctionArn": string + "runnerRemoverScheduleGroupName": string + "runnerRemoverScheduleRoleArn": string + "timeoutMonitorFunctionArn": string + "timeoutMonitorScheduleGroupName": string + "timeoutMonitorScheduleRoleArn": string + "type": "sst.sst.Linkable" + } + "AutodeployConfigParser": { + "name": string + "type": "sst.aws.Function" + } + "AutodeployRunnerRemover": { + "name": string + "type": "sst.aws.Function" + } + "AutodeployTimeoutMonitor": { + "name": string + "type": "sst.aws.Function" + } + "BotpoisonSecretKey": { + "type": "sst.sst.Secret" + "value": string + } + "Bus": { + "arn": string + "name": string + "type": "sst.aws.Bus" + } + "Connect": { + "name": string + "type": "sst.aws.Function" + } + "Database": { + "database": string + "host": string + "password": string + "port": number + "type": "sst.sst.Linkable" + "username": string + } + "Email": { + "configSet": string + "sender": string + "type": "sst.aws.Email" + } + "Error": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "GithubAppID": { + "type": "sst.sst.Secret" + "value": string + } + "GithubPrivateKey": { + "type": "sst.sst.Secret" + "value": string + } + "GithubWebhookSecret": { + "type": "sst.sst.Secret" + "value": string + } + "IssueDestination": { + "prefix": string + "role": string + "stream": string + "type": "sst.sst.Linkable" + } + "IssueStream": { + "name": string + "type": "sst.aws.KinesisStream" + } + "SlackClientID": { + "type": "sst.sst.Secret" + "value": string + } + "SlackClientSecret": { + "type": "sst.sst.Secret" + "value": string + } + "Storage": { + "name": string + "type": "sst.aws.Bucket" + } + "Websocket": { + "http": string + "realtime": string + "token": string + "type": "sst.sst.Linkable" + } + "WebsocketAuthorizer": { + "name": string + "type": "sst.aws.Function" + } + "WebsocketToken": { + "type": "sst.sst.Secret" + "value": string + } + "Workspace": { + "type": "sst.aws.StaticSite" + "url": string + } + } +} diff --git a/packages/functions/tsconfig.json b/packages/functions/tsconfig.json index 5408b70d..d2b70ab2 100644 --- a/packages/functions/tsconfig.json +++ b/packages/functions/tsconfig.json @@ -1,10 +1,10 @@ { - "extends": "@tsconfig/node16/tsconfig.json", + "extends": "@tsconfig/node20/tsconfig.json", "compilerOptions": { "jsx": "react", "noUncheckedIndexedAccess": true, "module": "esnext", - "moduleResolution": "node", + "moduleResolution": "bundler", "baseUrl": ".", "paths": { "@console/core/*": ["../core/src/*"] diff --git a/packages/mail/sst-env.d.ts b/packages/mail/sst-env.d.ts new file mode 100644 index 00000000..b0900f6b --- /dev/null +++ b/packages/mail/sst-env.d.ts @@ -0,0 +1,144 @@ +/* This file is auto-generated by SST. Do not edit. */ +/* tslint:disable */ +/* eslint-disable */ +/* deno-fmt-ignore-file */ +import "sst" +export {} +declare module "sst" { + export interface Resource { + "Alerts": { + "arn": string + "type": "sst.aws.SnsTopic" + } + "Api": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "ApiRouter": { + "type": "sst.aws.Router" + "url": string + } + "Auth": { + "publicKey": string + "type": "sst.aws.Auth" + } + "AuthAuthenticator": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "AuthRouter": { + "type": "sst.aws.Router" + "url": string + } + "AutodeployConfig": { + "buildImage": string + "buildspecBucketName": string + "buildspecVersion": string + "configParserFunctionArn": string + "runnerRemoverFunctionArn": string + "runnerRemoverScheduleGroupName": string + "runnerRemoverScheduleRoleArn": string + "timeoutMonitorFunctionArn": string + "timeoutMonitorScheduleGroupName": string + "timeoutMonitorScheduleRoleArn": string + "type": "sst.sst.Linkable" + } + "AutodeployConfigParser": { + "name": string + "type": "sst.aws.Function" + } + "AutodeployRunnerRemover": { + "name": string + "type": "sst.aws.Function" + } + "AutodeployTimeoutMonitor": { + "name": string + "type": "sst.aws.Function" + } + "BotpoisonSecretKey": { + "type": "sst.sst.Secret" + "value": string + } + "Bus": { + "arn": string + "name": string + "type": "sst.aws.Bus" + } + "Connect": { + "name": string + "type": "sst.aws.Function" + } + "Database": { + "database": string + "host": string + "password": string + "port": number + "type": "sst.sst.Linkable" + "username": string + } + "Email": { + "configSet": string + "sender": string + "type": "sst.aws.Email" + } + "Error": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "GithubAppID": { + "type": "sst.sst.Secret" + "value": string + } + "GithubPrivateKey": { + "type": "sst.sst.Secret" + "value": string + } + "GithubWebhookSecret": { + "type": "sst.sst.Secret" + "value": string + } + "IssueDestination": { + "prefix": string + "role": string + "stream": string + "type": "sst.sst.Linkable" + } + "IssueStream": { + "name": string + "type": "sst.aws.KinesisStream" + } + "SlackClientID": { + "type": "sst.sst.Secret" + "value": string + } + "SlackClientSecret": { + "type": "sst.sst.Secret" + "value": string + } + "Storage": { + "name": string + "type": "sst.aws.Bucket" + } + "Websocket": { + "http": string + "realtime": string + "token": string + "type": "sst.sst.Linkable" + } + "WebsocketAuthorizer": { + "name": string + "type": "sst.aws.Function" + } + "WebsocketToken": { + "type": "sst.sst.Secret" + "value": string + } + "Workspace": { + "type": "sst.aws.StaticSite" + "url": string + } + } +} diff --git a/packages/scripts/package.json b/packages/scripts/package.json index 0b5e00b7..58ba384f 100644 --- a/packages/scripts/package.json +++ b/packages/scripts/package.json @@ -12,7 +12,7 @@ "@jsx-email/render": "^1.1.0", "@tsconfig/node16": "^1.0.3", "luxon": "^3.3.0", - "sst": "2.43.4", + "sst": "3.3.2", "tsx": "^3.12.7" }, "devDependencies": { diff --git a/packages/scripts/sst-env.d.ts b/packages/scripts/sst-env.d.ts new file mode 100644 index 00000000..b0900f6b --- /dev/null +++ b/packages/scripts/sst-env.d.ts @@ -0,0 +1,144 @@ +/* This file is auto-generated by SST. Do not edit. */ +/* tslint:disable */ +/* eslint-disable */ +/* deno-fmt-ignore-file */ +import "sst" +export {} +declare module "sst" { + export interface Resource { + "Alerts": { + "arn": string + "type": "sst.aws.SnsTopic" + } + "Api": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "ApiRouter": { + "type": "sst.aws.Router" + "url": string + } + "Auth": { + "publicKey": string + "type": "sst.aws.Auth" + } + "AuthAuthenticator": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "AuthRouter": { + "type": "sst.aws.Router" + "url": string + } + "AutodeployConfig": { + "buildImage": string + "buildspecBucketName": string + "buildspecVersion": string + "configParserFunctionArn": string + "runnerRemoverFunctionArn": string + "runnerRemoverScheduleGroupName": string + "runnerRemoverScheduleRoleArn": string + "timeoutMonitorFunctionArn": string + "timeoutMonitorScheduleGroupName": string + "timeoutMonitorScheduleRoleArn": string + "type": "sst.sst.Linkable" + } + "AutodeployConfigParser": { + "name": string + "type": "sst.aws.Function" + } + "AutodeployRunnerRemover": { + "name": string + "type": "sst.aws.Function" + } + "AutodeployTimeoutMonitor": { + "name": string + "type": "sst.aws.Function" + } + "BotpoisonSecretKey": { + "type": "sst.sst.Secret" + "value": string + } + "Bus": { + "arn": string + "name": string + "type": "sst.aws.Bus" + } + "Connect": { + "name": string + "type": "sst.aws.Function" + } + "Database": { + "database": string + "host": string + "password": string + "port": number + "type": "sst.sst.Linkable" + "username": string + } + "Email": { + "configSet": string + "sender": string + "type": "sst.aws.Email" + } + "Error": { + "name": string + "type": "sst.aws.Function" + "url": string + } + "GithubAppID": { + "type": "sst.sst.Secret" + "value": string + } + "GithubPrivateKey": { + "type": "sst.sst.Secret" + "value": string + } + "GithubWebhookSecret": { + "type": "sst.sst.Secret" + "value": string + } + "IssueDestination": { + "prefix": string + "role": string + "stream": string + "type": "sst.sst.Linkable" + } + "IssueStream": { + "name": string + "type": "sst.aws.KinesisStream" + } + "SlackClientID": { + "type": "sst.sst.Secret" + "value": string + } + "SlackClientSecret": { + "type": "sst.sst.Secret" + "value": string + } + "Storage": { + "name": string + "type": "sst.aws.Bucket" + } + "Websocket": { + "http": string + "realtime": string + "token": string + "type": "sst.sst.Linkable" + } + "WebsocketAuthorizer": { + "name": string + "type": "sst.aws.Function" + } + "WebsocketToken": { + "type": "sst.sst.Secret" + "value": string + } + "Workspace": { + "type": "sst.aws.StaticSite" + "url": string + } + } +} diff --git a/packages/scripts/tsconfig.json b/packages/scripts/tsconfig.json index 9d5e702d..1190f272 100644 --- a/packages/scripts/tsconfig.json +++ b/packages/scripts/tsconfig.json @@ -4,6 +4,7 @@ "jsx": "react", "noUncheckedIndexedAccess": true, "module": "esnext", + "moduleResolution": "bundler", "baseUrl": ".", "paths": { "@console/core/*": ["../core/src/*"] diff --git a/packages/web/workspace/package.json b/packages/web/workspace/package.json index f2e5d39a..07f29f9c 100644 --- a/packages/web/workspace/package.json +++ b/packages/web/workspace/package.json @@ -4,45 +4,48 @@ "description": "", "scripts": { "start": "vite", - "dev": "sst bind vite", + "dev": "vite", "build": "vite build", - "serve": "vite preview", - "typecheck": "tsc --noEmit --incremental" + "serve": "vite preview" }, + "type": "module", "license": "MIT", "devDependencies": { - "@macaron-css/vite": "^1.3.0", - "sst": "2.43.4", - "typescript": "^5.2.2", - "vite": "3", - "vite-plugin-inspect": "^0.7.38", - "vite-plugin-solid": "^2.7.2" + "@macaron-css/vite": "1.5.1", + "sst": "3.3.2", + "typescript": "5.5.4", + "vite": "5.4.10", + "vite-plugin-inspect": "0.7.42", + "vite-plugin-solid": "2.10.2" }, "dependencies": { - "@botpoison/browser": "^0.1.30", - "@fontsource/ibm-plex-mono": "^4.5.13", - "@fontsource/rubik": "^4.5.14", - "@kobalte/core": "^0.11.2", - "@macaron-css/core": "1.2.0", - "@macaron-css/solid": "1.0.1", - "@modular-forms/solid": "^0.20.0", - "@paralleldrive/cuid2": "^2.2.0", - "@solid-primitives/event-bus": "^1.0.7", - "@solid-primitives/event-listener": "^2.2.13", - "@solid-primitives/input-mask": "^0.2.0", - "@solid-primitives/keyboard": "^1.2.3", - "@solid-primitives/mutation-observer": "^1.1.13", - "@solid-primitives/storage": "^2.0.0", - "@solidjs/router": "^0.8.2", + "@botpoison/browser": "0.1.30", + "@fontsource/ibm-plex-mono": "4.5.13", + "@fontsource/rubik": "4.5.14", + "@kobalte/core": "0.13.0", + "@macaron-css/core": "1.5.1", + "@macaron-css/solid": "1.5.3", + "@modular-forms/solid": "0.20.0", + "@paralleldrive/cuid2": "2.2.2", + "@solid-primitives/event-bus": "1.0.11", + "@solid-primitives/event-listener": "2.3.3", + "@solid-primitives/input-mask": "0.2.2", + "@solid-primitives/keyboard": "1.2.8", + "@solid-primitives/mutation-observer": "1.1.17", + "@solid-primitives/storage": "2.1.4", + "@solidjs/router": "0.8.4", "@tanstack/solid-virtual": "3.0.0-beta.6", - "@types/luxon": "^3.3.1", - "aws-iot-device-sdk-v2": "^1.13.1", - "luxon": "^3.4.3", - "modern-normalize": "^1.1.0", - "posthog-js": "^1.121.2", - "remeda": "^2.0.10", - "replicache": "^14.2.2", - "solid-js": "^1.8.3", - "valibot": "^0.19.0" + "@types/luxon": "3.4.2", + "aws-iot-device-sdk-v2": "1.20.1", + "hono": "4.6.5", + "luxon": "3.5.0", + "modern-normalize": "1.1.0", + "posthog-js": "1.154.5", + "remeda": "2.10.1", + "replicache": "14.2.2", + "solid-js": "1.9.3", + "solid-list": "^0.3.0", + "valibot": "0.19.0", + "virtua": "^0.35.1" } } diff --git a/packages/web/workspace/src/App.tsx b/packages/web/workspace/src/App.tsx index 141cb23e..16eedec3 100644 --- a/packages/web/workspace/src/App.tsx +++ b/packages/web/workspace/src/App.tsx @@ -5,34 +5,25 @@ import "@fontsource/ibm-plex-mono/latin.css"; import { styled } from "@macaron-css/solid"; import { darkClass, lightClass, theme } from "./ui/theme"; import { globalStyle, macaron$ } from "@macaron-css/core"; -import { dropAllDatabases } from "replicache"; import { Match, Switch, onCleanup, Component, createSignal } from "solid-js"; import { Navigate, Route, Router, Routes, useNavigate } from "@solidjs/router"; import { Auth, Code } from "./pages/auth"; -import { AuthProvider } from "./providers/auth"; -import { RealtimeProvider } from "./providers/realtime"; import { CommandBar, useCommandBar } from "./pages/workspace/command-bar"; import { Debug } from "./pages/debug"; import { Design } from "./pages/design"; import { Workspace } from "./pages/workspace"; import { WorkspaceCreate } from "./pages/workspace-create"; -import { - IconLogout, - IconAddCircle, - IconWorkspace, - IconApp, -} from "./ui/icons/custom"; +import { IconAddCircle, IconWorkspace } from "./ui/icons/custom"; import { LocalProvider } from "./providers/local"; import { useStorage } from "./providers/account"; import { DummyConfigProvider, DummyProvider } from "./providers/dummy"; -import { InvocationProvider } from "./providers/invocation"; +import { LocalLogsProvider } from "./providers/invocation"; import { FlagsProvider } from "./providers/flags"; import { NotFound } from "./pages/not-found"; import { Local } from "./pages/local"; import { ReplicacheStatusProvider } from "./providers/replicache-status"; import { AuthProvider2, useAuth2 } from "./providers/auth2"; -import { createSubscription } from "./providers/replicache"; -import { AppStore } from "./data/app"; +import { RealtimeProvider } from "./providers/realtime"; const Root = styled("div", { base: { @@ -178,7 +169,7 @@ export const App: Component = () => { - + @@ -210,14 +201,15 @@ export const App: Component = () => { } > w.id === storage.value.workspace, ) || auth.current.workspaces[0] ).slug - }`} + }`} /> @@ -229,7 +221,7 @@ export const App: Component = () => { /> } /> - + diff --git a/packages/web/workspace/src/common/autodeploy-detail.tsx b/packages/web/workspace/src/common/autodeploy-detail.tsx index 4a7acbcb..8ccaa610 100644 --- a/packages/web/workspace/src/common/autodeploy-detail.tsx +++ b/packages/web/workspace/src/common/autodeploy-detail.tsx @@ -42,11 +42,14 @@ import { githubCommit, githubTag, } from "$/common/url-builder"; -import { Row, Text, Stack, theme, utility } from "$/ui"; import { pipe, dropWhile, drop, takeWhile, filter } from "remeda"; import { useWorkspace } from "../pages/workspace/context"; import { useAuth2 } from "$/providers/auth2"; import { IconTag, IconXCircle } from "$/ui/icons"; +import { utility } from "$/ui/utility"; +import { theme } from "$/ui/theme"; +import { Stack, Row } from "$/ui/layout"; +import { Text } from "$/ui/text"; const DATETIME_NO_TIME = { month: "short", @@ -275,7 +278,7 @@ export function AutodeployDetail(props: AutodeployDetailProps) { const replicacheStatus = useReplicacheStatus(); const data = createSubscription(async (tx) => { const runs = (await RunStore.all(tx)).filter( - (run) => run.id === params.runID + (run) => run.id === params.runID, ); if (!runs.length) return; @@ -285,7 +288,7 @@ export function AutodeployDetail(props: AutodeployDetailProps) { const stage = await StageStore.get(tx, run.stageID); const update = (await StateUpdateStore.forStage(tx, run.stageID)).find( - (update) => update.runID === run.id + (update) => update.runID === run.id, ); return { run, stage, update }; }); @@ -337,21 +340,21 @@ export function AutodeployDetail(props: AutodeployDetailProps) { const repoURL = createMemo(() => trigger.source === "github" ? githubRepo(trigger.repo.owner, trigger.repo.repo) - : "" + : "", ); const runInfo = createMemo(() => { const branch = trigger.type === "pull_request" ? `pr#${trigger.number}` : trigger.type === "tag" - ? trigger.tag - : trigger.branch; + ? trigger.tag + : trigger.branch; const uri = trigger.type === "pull_request" ? githubPr(repoURL(), trigger.number) : trigger.type === "tag" - ? githubTag(repoURL(), trigger.tag) - : githubBranch(repoURL(), trigger.branch); + ? githubTag(repoURL(), trigger.tag) + : githubBranch(repoURL(), trigger.branch); return { trigger, branch, uri }; }); @@ -426,7 +429,7 @@ export function AutodeployDetail(props: AutodeployDetailProps) { title={ data.value!.run.time.started ? DateTime.fromISO( - data.value!.run.time.started! + data.value!.run.time.started!, ).toLocaleString(DateTime.DATETIME_FULL) : undefined } @@ -434,7 +437,7 @@ export function AutodeployDetail(props: AutodeployDetailProps) { {data.value!.run.time.started ? formatSinceTime( DateTime.fromISO(data.value!.run.time.started!).toSQL()!, - true + true, ) : "—"} @@ -454,7 +457,7 @@ export function AutodeployDetail(props: AutodeployDetailProps) { DateTime.fromISO(data.value!.run.time.completed!) .diff(DateTime.fromISO(data.value!.run.time.started!)) .as("milliseconds"), - true + true, ) : "—"} @@ -473,7 +476,7 @@ export function AutodeployDetail(props: AutodeployDetailProps) { if (!log) return []; const results = await fetch( import.meta.env.VITE_API_URL + - "/rest/log/scan?" + + "/log/aws/scan?" + new URLSearchParams( log.engine === "lambda" ? { @@ -487,14 +490,14 @@ export function AutodeployDetail(props: AutodeployDetailProps) { stageID: data.value!.stage!.id, logStream: log.logStream, logGroup: log.logGroup, - } + }, ).toString(), { headers: { "x-sst-workspace": workspace().id, Authorization: "Bearer " + auth.current.token, }, - } + }, ).then( (res) => res.json() as Promise< @@ -502,14 +505,14 @@ export function AutodeployDetail(props: AutodeployDetailProps) { message: string; timestamp: number; }[] - > + >, ); console.log("!! LENGTH AAA", results.length); return results; }, { initialValue: [], - } + }, ); createEffect(() => { console.log("!! EFFECT", logs().length); @@ -521,7 +524,7 @@ export function AutodeployDetail(props: AutodeployDetailProps) { dropWhile((r) => !r.message.startsWith("[sst.deploy.start]")), drop(1), filter((r) => r.message.trim() != ""), - takeWhile((r) => !r.message.startsWith("[sst.deploy.end]")) + takeWhile((r) => !r.message.startsWith("[sst.deploy.end]")), ); }); @@ -544,7 +547,7 @@ export function AutodeployDetail(props: AutodeployDetailProps) { > Logs —{" "} {DateTime.fromMillis(trimmedLogs()![0].timestamp!).toLocaleString( - DATETIME_NO_TIME + DATETIME_NO_TIME, )} @@ -558,7 +561,7 @@ export function AutodeployDetail(props: AutodeployDetailProps) { .toLocaleString(DateTime.DATETIME_FULL_WITH_SECONDS)} > {DateTime.fromMillis(entry.timestamp).toFormat( - "HH:mm:ss.SSS" + "HH:mm:ss.SSS", )} {entry.message} diff --git a/packages/web/workspace/src/common/invocation.tsx b/packages/web/workspace/src/common/invocation.tsx index 8a16be7a..56bf9af1 100644 --- a/packages/web/workspace/src/common/invocation.tsx +++ b/packages/web/workspace/src/common/invocation.tsx @@ -1,8 +1,6 @@ import { ErrorList, ErrorItem } from "$/pages/workspace/stage/logs/error"; -import { Row, Stack, TabTitle, Tag, TextButton, theme, utility } from "$/ui"; import { IconBookmark, IconArrowPath } from "$/ui/icons"; import { IconCaretRight } from "$/ui/icons/custom"; -import { inputFocusStyles } from "$/ui/form"; import { For, Match, @@ -12,16 +10,16 @@ import { createSignal, mergeProps, } from "solid-js"; -import { unwrap } from "solid-js/store"; import { formatDuration, formatBytes } from "./format"; import { styled } from "@macaron-css/solid"; -import { useReplicache } from "$/providers/replicache"; -import { Resource } from "@console/core/app/resource"; -import { Invocation } from "@console/core/log"; import { Link } from "@solidjs/router"; import { DateTime } from "luxon"; -import { useKeyboardNavigator } from "./keyboard-navigator"; -import { useStageContext } from "$/pages/workspace/stage/context"; +import { TabTitle, TextButton } from "$/ui/button"; +import { Row, Stack } from "$/ui/layout"; +import { Tag } from "$/ui/tag"; +import { theme } from "$/ui/theme"; +import { utility } from "$/ui/utility"; +import { Invocation } from "@console/core/log"; const shortDateOptions: Intl.DateTimeFormatOptions = { month: "short", @@ -40,18 +38,7 @@ const longDateOptions: Intl.DateTimeFormatOptions = { const Root = styled("div", { base: { - borderStyle: "solid", - borderWidth: "0 1px 1px 1px", - borderColor: theme.color.divider.base, - ":last-child": { - borderRadius: `0 0 ${theme.borderRadius} ${theme.borderRadius}`, - }, - selectors: { - "&[data-focus]": { - ...inputFocusStyles, - outlineOffset: -1, - }, - }, + width: "100%", }, variants: { expanded: { @@ -156,6 +143,8 @@ export const Log = styled("div", { export const LogTime = styled("div", { base: { + userSelect: "none", + WebkitUserSelect: "none", flexShrink: 0, minWidth: 89, textAlign: "left", @@ -272,17 +261,15 @@ const FunctionLink = styled(Link, { export function InvocationRow(props: { invocation: Invocation; onSavePayload?: () => void; - function: { - arn: string; - handler: string; - id: string; - }; local: boolean; - mixed?: boolean; - focus?: boolean; + mixed?: { + description: string; + link: string; + }; + expanded?: boolean; onClick?: () => void; + onReplay?: () => void; }) { - const [expanded, setExpanded] = createSignal(false); const [tab, setTab] = createSignal< "logs" | "request" | "response" | "report" >("logs"); @@ -294,11 +281,10 @@ export function InvocationRow(props: { ); const longDate = createMemo(() => new Intl.DateTimeFormat("en-US", longDateOptions).format( - props.invocation.start, + props.invocation.start ), ); const [replaying, setReplaying] = createSignal(false); - const rep = useReplicache(); const level = createMemo(() => props.invocation.errors.length ? props.invocation.errors.some((error) => error.failed) @@ -307,23 +293,16 @@ export function InvocationRow(props: { : "info", ); - const navigator = useKeyboardNavigator(); - const ctx = useStageContext(); - return ( - { - navigator?.focus(e.currentTarget); - setExpanded((r) => !r); - }} - > + @@ -344,13 +323,13 @@ export function InvocationRow(props: { {props.mixed - ? props.function.handler + ? props.mixed.description : props.invocation.errors[0]?.message || props.invocation.logs[0]?.message} - - + + e.stopImmediatePropagation()}> - + { e.stopPropagation(); @@ -414,8 +393,8 @@ export function InvocationRow(props: { Save - - + + View function @@ -426,11 +405,7 @@ export function InvocationRow(props: { onClick={(e) => { e.stopPropagation(); setReplaying(true); - rep().mutate.function_invoke({ - stageID: ctx.stage.id, - functionARN: props.function.arn, - payload: structuredClone(unwrap(props.invocation.input)), - }); + props.onReplay?.(); setTimeout(() => setReplaying(false), 2000); }} > diff --git a/packages/web/workspace/src/data/log.ts b/packages/web/workspace/src/data/log.ts index 96c81b77..c0480dc2 100644 --- a/packages/web/workspace/src/data/log.ts +++ b/packages/web/workspace/src/data/log.ts @@ -231,7 +231,7 @@ bus.on("function.error", (e) => { id: invocation.id, type: e.errorType, message: e.errorMessage, - stack: e.trace.map((t) => ({ + stack: e.trace.map((t: any) => ({ raw: t, })), }); diff --git a/packages/web/workspace/src/pages/auth/index.tsx b/packages/web/workspace/src/pages/auth/index.tsx index c3a931bf..c2db490f 100644 --- a/packages/web/workspace/src/pages/auth/index.tsx +++ b/packages/web/workspace/src/pages/auth/index.tsx @@ -1,14 +1,3 @@ -import { - Row, - Text, - Stack, - Input, - Button, - FormField, - Fullscreen, - theme, - utility, -} from "$/ui"; import { DateTime } from "luxon"; import { IconApp } from "$/ui/icons/custom"; import { styled } from "@macaron-css/solid"; @@ -16,8 +5,13 @@ import { IconChevronRight } from "$/ui/icons"; import { Navigate, Route, Routes, useSearchParams } from "@solidjs/router"; import { For, Show, createSignal, onMount } from "solid-js"; import Botpoison from "@botpoison/browser"; -import { createSingleSelectListState } from "@kobalte/core"; import { NotFound } from "../not-found"; +import { FormField, Input } from "$/ui/form"; +import { Fullscreen, Stack, Row } from "$/ui/layout"; +import { theme } from "$/ui/theme"; +import { utility } from "$/ui/utility"; +import { Text } from "$/ui/text"; +import { Button } from "$/ui/button"; const Root = styled("div", { base: { @@ -160,7 +154,7 @@ export function Email() {
{ setSubmitting(true); e.preventDefault(); @@ -214,7 +208,7 @@ export function Code() { .join(""); location.href = import.meta.env.VITE_AUTH_URL + - "/callback?" + + "/email/callback?" + new URLSearchParams({ code, }).toString(); diff --git a/packages/web/workspace/src/pages/not-found.tsx b/packages/web/workspace/src/pages/not-found.tsx index 995d2407..38888aee 100644 --- a/packages/web/workspace/src/pages/not-found.tsx +++ b/packages/web/workspace/src/pages/not-found.tsx @@ -1,10 +1,10 @@ import { Show } from "solid-js"; import { styled } from "@macaron-css/solid"; import { Link } from "@solidjs/router"; -import { theme } from "$/ui"; import { Text } from "$/ui/text"; import { Stack, Fullscreen } from "$/ui/layout"; import { Header } from "./workspace/header"; +import { theme } from "$/ui/theme"; const HomeLink = styled(Link, { base: { @@ -30,7 +30,9 @@ export function NotFound(props: ErrorScreenProps) {
- + {props.message || "Page not found"} Go back home @@ -46,7 +48,9 @@ export function NotAllowed(props: ErrorScreenProps) {
- + Access not allowed diff --git a/packages/web/workspace/src/pages/workspace-create.tsx b/packages/web/workspace/src/pages/workspace-create.tsx index 4b55a58b..affdef55 100644 --- a/packages/web/workspace/src/pages/workspace-create.tsx +++ b/packages/web/workspace/src/pages/workspace-create.tsx @@ -1,14 +1,3 @@ -import { - Text, - Input, - theme, - Stack, - Button, - utility, - FormField, - Fullscreen, - AvatarInitialsIcon, -} from "$/ui"; import { styled } from "@macaron-css/solid"; import { createForm, getValue, setError, valiForm } from "@modular-forms/solid"; import { useNavigate } from "@solidjs/router"; @@ -17,6 +6,13 @@ import { minLength, object, string, regex } from "valibot"; import { Header } from "./workspace/header"; import { useAuth2 } from "$/providers/auth2"; import { Workspace } from "@console/core/workspace"; +import { utility } from "$/ui/utility"; +import { theme } from "$/ui/theme"; +import { Fullscreen, Stack } from "$/ui/layout"; +import { AvatarInitialsIcon } from "$/ui/avatar-icon"; +import { FormField, Input } from "$/ui/form"; +import { Text } from "$/ui/text"; +import { Button } from "$/ui/button"; const CreateWorkspaceHint = styled("ul", { base: { @@ -79,7 +75,7 @@ export function WorkspaceCreate() { onSubmit={async (data) => { console.log("submitting"); const result = await fetch( - import.meta.env.VITE_API_URL + "/rest/workspace", + import.meta.env.VITE_API_URL + "/account/workspace", { method: "POST", headers: { diff --git a/packages/web/workspace/src/pages/workspace/account.tsx b/packages/web/workspace/src/pages/workspace/account.tsx index 2b6866eb..6c327da3 100644 --- a/packages/web/workspace/src/pages/workspace/account.tsx +++ b/packages/web/workspace/src/pages/workspace/account.tsx @@ -1,14 +1,15 @@ -import { Button, Fullscreen, Row, Stack, Text, theme } from "$/ui"; import { AvatarInitialsIcon } from "$/ui/avatar-icon"; import { styled } from "@macaron-css/solid"; import { useWorkspace } from "./context"; import { utility } from "$/ui/utility"; -import { createId } from "@paralleldrive/cuid2"; -import { useReplicache } from "$/providers/replicache"; import { Link, useNavigate } from "@solidjs/router"; import { IconArrowsRightLeft } from "$/ui/icons"; import { IconAws } from "$/ui/icons/custom"; import { Header } from "./header"; +import { Fullscreen, Stack, Row } from "$/ui/layout"; +import { theme } from "$/ui/theme"; +import { Text } from "$/ui/text"; +import { Button } from "$/ui/button"; const AddAccountGraphicAwsIcon = styled("div", { base: { diff --git a/packages/web/workspace/src/pages/workspace/app/autodeploy/index.tsx b/packages/web/workspace/src/pages/workspace/app/autodeploy/index.tsx index 37b64165..a7b31c18 100644 --- a/packages/web/workspace/src/pages/workspace/app/autodeploy/index.tsx +++ b/packages/web/workspace/src/pages/workspace/app/autodeploy/index.tsx @@ -1,4 +1,3 @@ -import { Button, ButtonIcon, Row, Stack, TabTitle, theme, utility } from "$/ui"; import { useAppContext } from "../context"; import { Show } from "solid-js"; import { Link, Route, Routes } from "@solidjs/router"; @@ -16,6 +15,11 @@ import { } from "$/data/app"; import { DateTime } from "luxon"; import { IconGitHub } from "$/ui/icons/custom"; +import { TabTitle, ButtonIcon } from "$/ui/button"; +import { Row, Stack } from "$/ui/layout"; +import { theme } from "$/ui/theme"; +import { utility } from "$/ui/utility"; +import { Button } from "$/ui/button"; const RepoLink = styled("a", { base: { @@ -72,19 +76,19 @@ export function Autodeploy() { .sort( (a, b) => DateTime.fromISO(b.time.created).toMillis() - - DateTime.fromISO(a.time.created).toMillis() + DateTime.fromISO(a.time.created).toMillis(), )[0]; const latestRunError = run?.status === "error"; const appRepo = await AppRepoStore.forApp(tx, ctx.app.id); const ghRepo = (await GithubRepoStore.all(tx)).find( - (repo) => repo.id === appRepo[0]?.repoID + (repo) => repo.id === appRepo[0]?.repoID, ); if (!ghRepo) return { latestRunError }; const ghRepoOrg = (await GithubOrgStore.all(tx)).find( - (org) => org.id === ghRepo.githubOrgID && !org.time.disconnected + (org) => org.id === ghRepo.githubOrgID && !org.time.disconnected, ); return { @@ -127,8 +131,9 @@ export function Autodeploy() { diff --git a/packages/web/workspace/src/pages/workspace/app/autodeploy/list.tsx b/packages/web/workspace/src/pages/workspace/app/autodeploy/list.tsx index b7e4c41c..cf4e87ce 100644 --- a/packages/web/workspace/src/pages/workspace/app/autodeploy/list.tsx +++ b/packages/web/workspace/src/pages/workspace/app/autodeploy/list.tsx @@ -4,7 +4,6 @@ import { Row } from "$/ui/layout"; import { Link } from "@solidjs/router"; import { styled } from "@macaron-css/solid"; import { useAppContext } from "../context"; -import { Stack, theme, utility } from "$/ui"; import { IconArrowLongRight, IconExclamationTriangle, @@ -26,6 +25,8 @@ import { import { RunStore } from "$/data/app"; import { StageStore } from "$/data/stage"; import { For, Show, Match, Switch, createMemo } from "solid-js"; +import { theme } from "$/ui/theme"; +import { utility } from "$/ui/utility"; export function ERROR_MAP(error: Exclude) { switch (error.type) { @@ -454,15 +455,16 @@ function RunItem({ run }: { run: Run.Run }) { @@ -495,7 +497,7 @@ function RunItem({ run }: { run: Run.Run }) { —}> {formatSinceTime(DateTime.fromISO(run.time.created!).toSQL()!)} @@ -514,7 +516,7 @@ export function List() { return pipe( all, filter((run) => run.appID === ctx.app.id), - sortBy([(run) => run.time.created, "desc"]) + sortBy([(run) => run.time.created, "desc"]), ); }); diff --git a/packages/web/workspace/src/pages/workspace/app/overview.tsx b/packages/web/workspace/src/pages/workspace/app/overview.tsx index 3cbcb878..b78c260b 100644 --- a/packages/web/workspace/src/pages/workspace/app/overview.tsx +++ b/packages/web/workspace/src/pages/workspace/app/overview.tsx @@ -1,4 +1,3 @@ -import { Tag, Row, theme, Stack, utility, TabTitle } from "$/ui"; import { DateTime } from "luxon"; import { For, Show, Match, Switch } from "solid-js"; import { RunStore, StateUpdateStore } from "$/data/app"; @@ -16,6 +15,11 @@ import { AWS } from "$/data/aws"; import { githubCommit, githubRepo } from "$/common/url-builder"; import { sortBy } from "remeda"; import { IconTag } from "$/ui/icons"; +import { TabTitle } from "$/ui/button"; +import { Row, Stack } from "$/ui/layout"; +import { Tag } from "$/ui/tag"; +import { theme } from "$/ui/theme"; +import { utility } from "$/ui/utility"; const Root = styled("div", { base: { @@ -217,7 +221,7 @@ export function Overview() { await ActiveStagesForApp(app.app.id)(tx), (stage) => app.app.name === local().app && stage.name === local().stage ? 0 : 1, - [(stage) => stage.timeUpdated, "desc"] + [(stage) => stage.timeUpdated, "desc"], ); }); const latestRunError = createSubscription(async (tx) => { @@ -227,7 +231,7 @@ export function Overview() { .sort( (a, b) => DateTime.fromISO(b.time.created).toMillis() - - DateTime.fromISO(a.time.created).toMillis() + DateTime.fromISO(a.time.created).toMillis(), )[0]; return run?.status === "error"; }); @@ -253,7 +257,7 @@ export function Overview() { }); const local = useLocalContext(); const aws = createSubscription(async (tx) => - AWS.AccountStore.get(tx, props.stage.awsAccountID) + AWS.AccountStore.get(tx, props.stage.awsAccountID), ); return ( @@ -310,7 +314,7 @@ export function Overview() { Updated {formatSinceTime(props.stage.timeUpdated, true)} diff --git a/packages/web/workspace/src/pages/workspace/app/settings/index.tsx b/packages/web/workspace/src/pages/workspace/app/settings/index.tsx index 402a6429..89b92387 100644 --- a/packages/web/workspace/src/pages/workspace/app/settings/index.tsx +++ b/packages/web/workspace/src/pages/workspace/app/settings/index.tsx @@ -5,21 +5,6 @@ import { GithubOrgStore, GithubRepoStore, } from "$/data/app"; -import { - theme, - utility, - Tag, - Row, - Text, - Stack, - Input, - TabTitle, - Grower, - Button, - FormField, - ButtonIcon, - LinkButton, -} from "$/ui"; import { Select } from "$/ui/select"; import { Dropdown } from "$/ui/dropdown"; import { @@ -66,6 +51,14 @@ import { array, minLength, object, optional, string } from "valibot"; import { AWS } from "$/data/aws"; import { createStore } from "solid-js/store"; import { fromEntries, map, pipe, sortBy, filter } from "remeda"; +import { LinkButton, TabTitle, ButtonIcon } from "$/ui/button"; +import { FormField, Input } from "$/ui/form"; +import { Grower, Row, Stack } from "$/ui/layout"; +import { Tag } from "$/ui/tag"; +import { theme } from "$/ui/theme"; +import { utility } from "$/ui/utility"; +import { Text } from "$/ui/text"; +import { Button } from "$/ui/button"; const HEADER_HEIGHT = 54; @@ -418,8 +411,8 @@ export const EditTargetForm = object({ object({ key: string([minLength(1, "Set the key of the variable")]), value: string([minLength(1, "Set the value of the variable")]), - }) - ) + }), + ), ), }); @@ -435,17 +428,17 @@ export function Settings() { const workspace = useWorkspace(); const runConfigs = createSubscription( (tx) => RunConfigStore.forApp(tx, app.app.id), - [] + [], ); const appRepo = createSubscription((tx) => - AppRepoStore.forApp(tx, app.app.id).then((repos) => repos[0]) + AppRepoStore.forApp(tx, app.app.id).then((repos) => repos[0]), ); const needsGithub = createSubscription(async (tx) => { const ghOrgs = await GithubOrgStore.all(tx); const appRepo = await AppRepoStore.forApp(tx, app.app.id).then( - (repos) => repos[0] + (repos) => repos[0], ); if (appRepo) { const ghRepo = await GithubRepoStore.get(tx, appRepo.repoID); @@ -469,7 +462,7 @@ export function Settings() { .sort( (a, b) => DateTime.fromISO(b.time.created).toMillis() - - DateTime.fromISO(a.time.created).toMillis() + DateTime.fromISO(a.time.created).toMillis(), )[0]; return run?.status === "error"; }); @@ -488,7 +481,7 @@ export function Settings() { "message", (e) => { if (e.data === "github.success") setOverrideGithub(true); - } + }, ); const [putForm, { Form, Field, FieldArray }] = createForm({ @@ -566,7 +559,7 @@ export function Settings() { onSelect={() => { if ( !confirm( - "Are you sure you want to remove this environment?" + "Are you sure you want to remove this environment?", ) ) return; @@ -598,7 +591,7 @@ export function Settings() { awsAccountExternalID: data.awsAccount, appID: app.app.id, env: fromEntries( - (data.env || []).map((item) => [item.key, item.value]) + (data.env || []).map((item) => [item.key, item.value]), ), }); setEditing("active", false); @@ -721,16 +714,16 @@ export function Settings() { onPaste={(e) => { const data = e.clipboardData?.getData( - "text/plain" + "text/plain", ); if (!data) return; setValue( putForm, `env.${index()}.value`, - data + data, ); e.currentTarget.value = "0".repeat( - data.length + data.length, ); e.preventDefault(); }} @@ -836,8 +829,8 @@ export function Settings() { new Set( orgs.value .filter((org) => !org.time.disconnected) - .map((org) => org.id) - ) + .map((org) => org.id), + ), ); const sortedRepos = createMemo(() => pipe( @@ -847,13 +840,13 @@ export function Settings() { label: repo.name, value: repo.id, })), - sortBy((repo) => repo.label) - ) + sortBy((repo) => repo.label), + ), ); const newRepo = createMemo(() => props.new === true); const empty = createMemo(() => sortedRepos().length === 0); const expanded = createMemo(() => - newRepo() ? !empty() && !!getValue(repoForm, "repo") : true + newRepo() ? !empty() && !!getValue(repoForm, "repo") : true, ); return ( @@ -1040,7 +1033,7 @@ export function Settings() { const info = createSubscription(async (tx) => { const repo = await GithubRepoStore.get( tx, - appRepo.value!.repoID + appRepo.value!.repoID, ); const org = await GithubOrgStore.get(tx, repo.githubOrgID); return { @@ -1066,7 +1059,7 @@ export function Settings() { target="_blank" href={githubRepo( info.value!.org.login, - info.value!.repo.name + info.value!.repo.name, )} > {info.value!.org.login} @@ -1086,12 +1079,12 @@ export function Settings() { onClick={() => { if ( !confirm( - "Are you sure you want to disconnect from this repo?" + "Are you sure you want to disconnect from this repo?", ) ) return; rep().mutate.app_repo_disconnect( - appRepo.value!.id + appRepo.value!.id, ); reset(repoForm, { initialValues: repoFormInitialValues, @@ -1142,7 +1135,7 @@ export function Settings() { val.stagePattern.length) + sortBy((val) => val.stagePattern.length), )} > {(config) => ( @@ -1184,7 +1177,7 @@ export function Settings() { - c.stagePattern.startsWith("pr-") + c.stagePattern.startsWith("pr-"), ) } > diff --git a/packages/web/workspace/src/pages/workspace/command-bar.tsx b/packages/web/workspace/src/pages/workspace/command-bar.tsx index dfbf788a..6401b6f0 100644 --- a/packages/web/workspace/src/pages/workspace/command-bar.tsx +++ b/packages/web/workspace/src/pages/workspace/command-bar.tsx @@ -1,18 +1,17 @@ import { - For, - JSX, - ParentProps, batch, createContext, createEffect, createMemo, createSignal, + For, + JSX, onCleanup, + ParentProps, useContext, } from "solid-js"; import { styled } from "@macaron-css/solid"; import { theme } from "$/ui/theme"; -import { Text } from "$/ui/text"; import { filter, groupBy, pipe } from "remeda"; import { globalStyle } from "@macaron-css/core"; import { Portal } from "solid-js/web"; @@ -23,8 +22,8 @@ import { import { createMutationObserver } from "@solid-primitives/mutation-observer"; import { utility } from "$/ui/utility"; import { IconSubRight } from "$/ui/icons/custom"; -import { Navigator, useLocation, useNavigate } from "@solidjs/router"; -import { useBus } from "sst/bus"; +import { Navigator, useLocation } from "@solidjs/router"; +import { Text } from "$/ui/text"; import { bus } from "$/providers/bus"; export interface Action { @@ -52,8 +51,7 @@ export function NavigationAction(input: { icon: input.icon || IconSubRight, title: input.title, category: input.category, - disabled: - input.disabled || + disabled: input.disabled || (input.path.startsWith("/") && (!input.prefix ? loc.pathname === input.path @@ -101,7 +99,6 @@ const Modal = styled("div", { borderRadius: 10, flexShrink: 0, boxShadow: theme.color.shadow.drop.long, - backdropFilter: "blur(10px)", background: theme.color.background.modal, // Safari doesn't redraw properly when the height // of the modal changes @@ -238,8 +235,8 @@ function createControl() { if (!visible()) return; const p = activeProviders().length ? activeProviders() - .map((p) => providers.find((e) => e.name === p)!) - .filter(Boolean) + .map((p) => providers.find((e) => e.name === p)!) + .filter(Boolean) : [...providers.values()].reverse(); const actions = await Promise.all( p.map(async (entry) => { diff --git a/packages/web/workspace/src/pages/workspace/context.ts b/packages/web/workspace/src/pages/workspace/context.ts index b613347a..453767f5 100644 --- a/packages/web/workspace/src/pages/workspace/context.ts +++ b/packages/web/workspace/src/pages/workspace/context.ts @@ -1,4 +1,8 @@ +import { createInitializedContext } from "$/common/context"; +import { useAuth2 } from "$/providers/auth2"; import { Workspace } from "@console/core/workspace"; +import { app } from "@console/functions/api/api"; +import { hc } from "hono/client"; import { Accessor, createContext, useContext } from "solid-js"; export const WorkspaceContext = createContext>(); @@ -8,3 +12,21 @@ export function useWorkspace() { if (!context) throw new Error("No workspace context"); return context; } + +export const { use: useApi, provider: ApiProvider } = createInitializedContext( + "Api", + () => { + const auth = useAuth2(); + const workspace = useWorkspace(); + const client = hc(import.meta.env.VITE_API_URL, { + headers: { + Authorization: `Bearer ${auth.current.token}`, + "x-sst-workspace": workspace().id, + }, + }); + return { + client, + ready: true, + }; + }, +); diff --git a/packages/web/workspace/src/pages/workspace/header.tsx b/packages/web/workspace/src/pages/workspace/header.tsx index 5f0fb80b..9fbd02b3 100644 --- a/packages/web/workspace/src/pages/workspace/header.tsx +++ b/packages/web/workspace/src/pages/workspace/header.tsx @@ -1,6 +1,5 @@ import { CSSProperties } from "@macaron-css/core"; import { Workspace } from "@console/core/workspace"; -import { Row, Stack, AvatarInitialsIcon, Text, theme } from "$/ui"; import { IconApp } from "$/ui/icons/custom"; import { IconChevronUpDown, IconMagnifyingGlass } from "$/ui/icons"; import { utility } from "$/ui/utility"; @@ -22,6 +21,9 @@ import { } from "solid-js"; import { createInitializedContext } from "$/common/context"; import { dropAllDatabases } from "replicache"; +import { AvatarInitialsIcon } from "$/ui/avatar-icon"; +import { Row } from "$/ui/layout"; +import { theme } from "$/ui/theme"; const breadCrumbStyles: CSSProperties = { flexShrink: 0, @@ -297,7 +299,9 @@ export function Header(props: { app?: string; stage?: string }) { / - + {props.stage} bar.show("stage-switcher")}> diff --git a/packages/web/workspace/src/pages/workspace/index.tsx b/packages/web/workspace/src/pages/workspace/index.tsx index 467cf341..e265a0b7 100644 --- a/packages/web/workspace/src/pages/workspace/index.tsx +++ b/packages/web/workspace/src/pages/workspace/index.tsx @@ -12,7 +12,7 @@ import { User } from "./user"; import { Account } from "./account"; import { Settings } from "./settings"; import { Overview } from "./overview"; -import { WorkspaceContext } from "./context"; +import { ApiProvider, WorkspaceContext } from "./context"; import { AppStore } from "$/data/app"; import { IconApp, IconUserAdd, IconConnect } from "$/ui/icons/custom"; import { StageStore } from "$/data/stage"; @@ -79,7 +79,9 @@ export function Workspace() { workspace()!}> - + + + diff --git a/packages/web/workspace/src/pages/workspace/overview-next.tsx b/packages/web/workspace/src/pages/workspace/overview-next.tsx index 9d53d89c..edccef24 100644 --- a/packages/web/workspace/src/pages/workspace/overview-next.tsx +++ b/packages/web/workspace/src/pages/workspace/overview-next.tsx @@ -1,25 +1,10 @@ import { DateTime } from "luxon"; -import { - AppStore, - RunStore, - RepoFromApp, - StateUpdateStore, -} from "$/data/app"; +import { AppStore, RunStore, RepoFromApp, StateUpdateStore } from "$/data/app"; import { UserStore } from "$/data/user"; import { AccountStore } from "$/data/aws"; import { ActiveStages } from "$/data/stage"; import { createSubscription, useReplicache } from "$/providers/replicache"; -import { - theme, - utility, - Row, - Tag, - Text, - Stack, - Button, - TextButton, -} from "$/ui"; -import { Fullscreen } from "$/ui/layout"; +import { Fullscreen, Row, Stack } from "$/ui/layout"; import { Dropdown } from "$/ui/dropdown"; import { IconChevronRight, @@ -40,21 +25,17 @@ import { Link, useNavigate, useSearchParams } from "@solidjs/router"; import { For, Match, Show, Switch, createEffect, createMemo } from "solid-js"; import { Header } from "./header"; import { useLocalContext } from "$/providers/local"; -import { - filter, - flatMap, - groupBy, - map, - pipe, - sortBy, - entries, - nthBy, -} from "remeda"; +import { filter, flatMap, groupBy, map, pipe, sortBy, entries } from "remeda"; import { User } from "@console/core/user"; import { useAuth2 } from "$/providers/auth2"; import { parseTime, formatSinceTime, formatCommit } from "$/common/format"; import { githubCommit, githubRepo } from "$/common/url-builder"; -import { AppRepo } from "@console/core/app/repo"; +import { TextButton } from "$/ui/button"; +import { Tag } from "$/ui/tag"; +import { theme } from "$/ui/theme"; +import { utility } from "$/ui/utility"; +import { Text } from "$/ui/text"; +import { Button } from "$/ui/button"; const OVERFLOW_APPS_COUNT = 9; const OVERFLOW_APPS_DISPLAY = 6; @@ -269,7 +250,7 @@ function sortUsers(users: User.Info[], selfEmail: string): User.Info[] { users, (user) => (user.email === selfEmail ? 0 : 1), // Your own user (user) => (!user.timeSeen ? 0 : 1), // Invites - (user) => user.email.length // Sort by length + (user) => user.email.length, // Sort by length ); } @@ -305,7 +286,7 @@ export function OverviewNext() { const accounts = AccountStore.list.watch( rep, () => [], - (accounts) => accounts.filter((a) => !a.timeDeleted) + (accounts) => accounts.filter((a) => !a.timeDeleted), ); const auth = useAuth2(); const local = useLocalContext(); @@ -325,14 +306,14 @@ export function OverviewNext() { pipe( stages(), filter((s) => s.appID === app.id), - sortBy((s) => s.timeUpdated) + sortBy((s) => s.timeUpdated), ).at(-1)?.timeUpdated || "", "desc", ], - (app) => app.name - ) - ) - ) + (app) => app.name, + ), + ), + ), ); const nav = useNavigate(); const selfEmail = createMemo(() => auth.current.email); @@ -340,12 +321,12 @@ export function OverviewNext() { const result = pipe( stages(), groupBy( - (s) => `${apps.value.find((a) => a.id === s.appID)?.name}/${s.name}` + (s) => `${apps.value.find((a) => a.id === s.appID)?.name}/${s.name}`, ), entries, filter(([, stages]) => stages.length > 1), flatMap(([_, stages]) => stages), - map((s) => s.id) + map((s) => s.id), ); return new Set(result); }); @@ -356,13 +337,13 @@ export function OverviewNext() { const sortedUsers = createMemo(() => sortUsers( users().filter((u) => !u.timeDeleted), - selfEmail() - ) + selfEmail(), + ), ); const usersCapped = createMemo(() => sortedUsers().length > OVERFLOW_USERS_COUNT ? sortedUsers().slice(0, OVERFLOW_USERS_DISPLAY) - : sortedUsers() + : sortedUsers(), ); createEffect(() => { @@ -396,14 +377,14 @@ export function OverviewNext() { (c) => props.app.name === local().app && c.name === local().stage ? 0 : 1, (c) => (c.unsupported ? 1 : 0), - [(c) => c.timeUpdated, "desc"] + [(c) => c.timeUpdated, "desc"], ); }); const repo = createSubscription(RepoFromApp(props.app.id)); const childrenCapped = createMemo(() => children().length > OVERFLOW_APPS_COUNT ? children().slice(0, OVERFLOW_APPS_DISPLAY) - : children() + : children(), ); const showOverflow = createMemo(() => { return showApps().includes(props.app.id); @@ -415,7 +396,7 @@ export function OverviewNext() { .sort( (a, b) => DateTime.fromISO(b.time.created).toMillis() - - DateTime.fromISO(a.time.created).toMillis() + DateTime.fromISO(a.time.created).toMillis(), )[0]; if (!run) return; return !run.stageID && run.status === "error"; @@ -424,7 +405,9 @@ export function OverviewNext() { - + + + {props.app.name} @@ -517,7 +500,10 @@ export function OverviewNext() { Autodeploy your apps with the Console{" "} - + Learn more @@ -791,7 +777,7 @@ function StageCard(props: StageCardProps) { const run = await RunStore.get( tx, props.stage.id, - latestUpdate.value.runID + latestUpdate.value.runID, ); if (run.trigger.source !== "github") return; const repoUrl = githubRepo(run.trigger.repo.owner, run.trigger.repo.repo); @@ -846,14 +832,25 @@ function StageCard(props: StageCardProps) { {props.stage.name} - + - Local + + Local + - - Error + + + Error + @@ -872,7 +869,7 @@ function StageCard(props: StageCardProps) { {props.stage.region} {formatSinceTime(props.stage.timeUpdated, false, true)} @@ -932,7 +929,7 @@ function UserCard(props: UserCardProps) { onSelect={() => { if ( !confirm( - "Are you sure you want to remove them from the workspace?" + "Are you sure you want to remove them from the workspace?", ) ) return; diff --git a/packages/web/workspace/src/pages/workspace/overview.tsx b/packages/web/workspace/src/pages/workspace/overview.tsx index 316f47c1..fad72611 100644 --- a/packages/web/workspace/src/pages/workspace/overview.tsx +++ b/packages/web/workspace/src/pages/workspace/overview.tsx @@ -4,17 +4,7 @@ import { UserStore } from "$/data/user"; import { AccountStore } from "$/data/aws"; import { StageStore } from "$/data/stage"; import { useReplicache } from "$/providers/replicache"; -import { - theme, - utility, - Row, - Tag, - Text, - Stack, - Button, - TextButton, -} from "$/ui"; -import { Fullscreen } from "$/ui/layout"; +import { Fullscreen, Row, Stack } from "$/ui/layout"; import { Dropdown } from "$/ui/dropdown"; import { IconChevronRight, @@ -34,6 +24,12 @@ import { useLocalContext } from "$/providers/local"; import { filter, flatMap, groupBy, map, pipe, sortBy, entries } from "remeda"; import { User } from "@console/core/user"; import { useAuth2 } from "$/providers/auth2"; +import { TextButton } from "$/ui/button"; +import { Tag } from "$/ui/tag"; +import { theme } from "$/ui/theme"; +import { utility } from "$/ui/utility"; +import { Text } from "$/ui/text"; +import { Button } from "$/ui/button"; const OVERFLOW_APPS_COUNT = 9; const OVERFLOW_APPS_DISPLAY = 6; diff --git a/packages/web/workspace/src/pages/workspace/settings/alerts.tsx b/packages/web/workspace/src/pages/workspace/settings/alerts.tsx index b35cef35..975e617d 100644 --- a/packages/web/workspace/src/pages/workspace/settings/alerts.tsx +++ b/packages/web/workspace/src/pages/workspace/settings/alerts.tsx @@ -7,17 +7,6 @@ import { createSignal, createMemo, } from "solid-js"; -import { - Row, - Text, - Stack, - Input, - theme, - Button, - Grower, - FormField, - LinkButton, -} from "$/ui"; import { utility } from "$/ui/utility"; import { Dropdown } from "$/ui/dropdown"; import { @@ -53,6 +42,12 @@ import { createForm, } from "@modular-forms/solid"; import { WarningStore } from "$/data/warning"; +import { LinkButton } from "$/ui/button"; +import { FormField, Input } from "$/ui/form"; +import { Stack, Row, Grower } from "$/ui/layout"; +import { theme } from "$/ui/theme"; +import { Text } from "$/ui/text"; +import { Button } from "$/ui/button"; const PANEL_CONTENT_SPACE = "10"; const PANEL_HEADER_SPACE = "3"; @@ -297,7 +292,7 @@ const PutForm = object({ }), event: union( [literal("issue"), literal("autodeploy"), literal("autodeploy.error")], - "Must select event" + "Must select event", ), }); @@ -305,7 +300,7 @@ export function Alerts() { const rep = useReplicache(); const users = createSubscription( async (tx) => UserStore.list(tx).then(filter((u) => !u.timeDeleted)), - [] + [], ); const alerts = createSubscription(async (tx) => { const ret = AlertStore.all(tx); @@ -335,7 +330,7 @@ export function Alerts() { formData.source?.app?.includes(app.name) ); }) - .map((app) => app.id) + .map((app) => app.id), ); const availableStages = createMemo(() => { @@ -343,7 +338,7 @@ export function Alerts() { stages(), filter((s) => selectedApps().includes(s.appID)), map((s) => s.name), - unique() + unique(), ); }); @@ -436,7 +431,7 @@ export function Alerts() { {joinWithAnd( - matchingStages().map(({ app, stage }) => `${app}/${stage}`) + matchingStages().map(({ app, stage }) => `${app}/${stage}`), )} @@ -470,7 +465,7 @@ export function Alerts() { color={field.error ? "danger" : "primary"} hint={ getValue(putForm, "destination.type") === "slack" && - !slackTeam() ? ( + !slackTeam() ? ( Connect your Slack workspace{" "} below. @@ -523,7 +518,7 @@ export function Alerts() { }, []); createComputed(() => - setValue(putForm, "source.app", value()) + setValue(putForm, "source.app", value()), ); return ( @@ -620,7 +615,7 @@ export function Alerts() { }, []); createComputed(() => - setValue(putForm, "destination.email.users", value()) + setValue(putForm, "destination.email.users", value()), ); return ( {(field, props) => ( @@ -755,19 +750,19 @@ export function Alerts() { destination: cloned.destination!.type === "slack" ? { - type: "slack", - properties: { - channel: cloned.destination!.slack?.channel!, - }, - } + type: "slack", + properties: { + channel: cloned.destination!.slack?.channel!, + }, + } : { - type: "email", - properties: { - users: cloned.destination!.email!.users!.includes("*") - ? "*" - : cloned.destination!.email!.users!, + type: "email", + properties: { + users: cloned.destination!.email!.users!.includes("*") + ? "*" + : cloned.destination!.email!.users!, + }, }, - }, event: cloned.event!, }); setEditing("active", false); @@ -789,13 +784,13 @@ export function Alerts() { Object.fromEntries( warnings .filter((w) => w.type === "issue_alert_slack") - .map((w) => [w.target, w] as const) - ) + .map((w) => [w.target, w] as const), + ), ); const slackTeam = SlackTeamStore.all.watch( rep, () => [], - (all) => all.at(0) + (all) => all.at(0), ); return ( @@ -823,7 +818,7 @@ export function Alerts() { {(alert) => { const isEditingRow = createMemo( - () => editing.active && editing.id === alert.id + () => editing.active && editing.id === alert.id, ); return ( @@ -891,7 +886,7 @@ export function Alerts() { alert.source.stage.join(", ")} {" "} {alert.source.app !== "*" && - alert.source.app.length === 1 + alert.source.app.length === 1 ? "stage" : "stages"} @@ -917,16 +912,16 @@ export function Alerts() { {destination().properties.users === "*" ? "To all users in the workspace" : ( - destination().properties - .users as string[] - ) - .map( - (id) => - users.value.find( - (u) => u.id === id - )?.email + destination().properties + .users as string[] ) - .join(", ")} + .map( + (id) => + users.value.find( + (u) => u.id === id, + )?.email, + ) + .join(", ")} )} diff --git a/packages/web/workspace/src/pages/workspace/settings/aws.tsx b/packages/web/workspace/src/pages/workspace/settings/aws.tsx index 9f59c416..9f5ff7d2 100644 --- a/packages/web/workspace/src/pages/workspace/settings/aws.tsx +++ b/packages/web/workspace/src/pages/workspace/settings/aws.tsx @@ -1,16 +1,20 @@ import { AccountStore } from "$/data/aws"; import { createSubscription, useReplicache } from "$/providers/replicache"; -import { theme, utility, Text, Row, Stack, Button, LinkButton } from "$/ui"; import { Dropdown } from "$/ui/dropdown"; import { IconEllipsisVertical, IconExclamationTriangle } from "$/ui/icons"; import { IconAws, IconAdd, IconArrowPathSpin } from "$/ui/icons/custom"; +import { Stack } from "$/ui/layout"; +import { theme } from "$/ui/theme"; +import { utility } from "$/ui/utility"; import { styled } from "@macaron-css/solid"; import { Link } from "@solidjs/router"; import { For, Show, Switch, Match } from "solid-js"; +import { Text } from "$/ui/text"; +import { Button } from "$/ui/button"; +import { DateTime } from "luxon"; const Root = styled("div", { - base: { - }, + base: {}, }); const Card = styled("div", { @@ -25,7 +29,7 @@ const CardLeft = styled("div", { base: { ...utility.row(3), alignItems: "center", - } + }, }); const CardTitle = styled("p", { @@ -57,7 +61,7 @@ const CardRight = styled("div", { base: { ...utility.row(5), alignItems: "center", - } + }, }); const CardActions = styled("div", { @@ -138,11 +142,14 @@ export function AWS() { - - - - }> + + + + } + > {(account) => ( @@ -151,9 +158,10 @@ export function AWS() { {account.accountID} - Connected - }> + Connected} + > Disconnected @@ -170,7 +178,9 @@ export function AWS() { - + @@ -185,7 +195,10 @@ export function AWS() {