diff --git a/.github/workflows/check.yaml b/.github/workflows/check.yaml index b900e7bc5..8049b49f9 100644 --- a/.github/workflows/check.yaml +++ b/.github/workflows/check.yaml @@ -52,6 +52,19 @@ jobs: path: '*/*/dist/go-test.json' retention-days: 1 + cli-integration: + name: CLI Integration Tests + runs-on: ubuntu-latest + needs: check + steps: + - uses: actions/checkout@v4 + + - name: Setup runner environment + uses: ./.github/actions/setup + + - name: Run CLI integration tests + run: pnpm nx run cli:test:integration + go-test-summary: name: Test runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index 30a81028f..059f1144a 100644 --- a/.gitignore +++ b/.gitignore @@ -28,6 +28,9 @@ storybook-static tmp/ tsconfig.tsbuildinfo tsp-output +.bench/ +.ralph/ +.ralphrc # Coverage files *.out diff --git a/apps/cli/cmd/flow.go b/apps/cli/cmd/flow.go index 1b627c671..3935bfeea 100644 --- a/apps/cli/cmd/flow.go +++ b/apps/cli/cmd/flow.go @@ -14,6 +14,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlitemem" "github.com/the-dev-tools/dev-tools/packages/server/pkg/expression" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/flowbuilder" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/http/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/ioworkspace" @@ -153,7 +154,7 @@ var yamlflowRunCmd = &cobra.Command{ return fmt.Errorf("failed to convert YAML using v2: %w", err) } - resolver := resolver.NewStandardResolver( + httpResolver := resolver.NewStandardResolver( &services.HTTP, &services.HTTPHeader, services.HTTPSearchParam, @@ -163,6 +164,12 @@ var yamlflowRunCmd = &cobra.Command{ services.HTTPAssert, ) + graphqlResolver := gqlresolver.NewStandardResolver( + services.GraphQL.Reader(), + &services.GraphQLHeader, + &services.GraphQLAssert, + ) + // Create LLM provider factory for AI nodes llmFactory := scredential.NewLLMProviderFactory(&services.Credential) @@ -176,10 +183,14 @@ var yamlflowRunCmd = &cobra.Command{ &services.NodeAI, &services.NodeAiProvider, &services.NodeMemory, + &services.NodeGraphQL, + &services.GraphQL, + &services.GraphQLHeader, &services.Workspace, &services.Variable, &services.FlowVariable, - resolver, + httpResolver, + graphqlResolver, services.Logger, llmFactory, ) diff --git a/apps/cli/internal/common/services.go b/apps/cli/internal/common/services.go index 09780f11a..f2657757c 100644 --- a/apps/cli/internal/common/services.go +++ b/apps/cli/internal/common/services.go @@ -10,6 +10,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/scredential" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" ) @@ -40,6 +41,12 @@ type Services struct { NodeAI sflow.NodeAIService NodeAiProvider sflow.NodeAiProviderService NodeMemory sflow.NodeMemoryService + NodeGraphQL sflow.NodeGraphQLService + + // GraphQL + GraphQL sgraphql.GraphQLService + GraphQLHeader sgraphql.GraphQLHeaderService + GraphQLAssert sgraphql.GraphQLAssertService // Credentials Credential scredential.CredentialService @@ -87,6 +94,12 @@ func CreateServices(ctx context.Context, db *sql.DB, logger *slog.Logger) (*Serv NodeAI: sflow.NewNodeAIService(queries), NodeAiProvider: sflow.NewNodeAiProviderService(queries), NodeMemory: sflow.NewNodeMemoryService(queries), + NodeGraphQL: sflow.NewNodeGraphQLService(queries), + + // GraphQL + GraphQL: sgraphql.New(queries, logger), + GraphQLHeader: sgraphql.NewGraphQLHeaderService(queries), + GraphQLAssert: sgraphql.NewGraphQLAssertService(queries), // Credentials Credential: scredential.NewCredentialService(queries), diff --git a/apps/cli/internal/runner/runner.go b/apps/cli/internal/runner/runner.go index 4f06541d8..d4ae4e6b1 100644 --- a/apps/cli/internal/runner/runner.go +++ b/apps/cli/internal/runner/runner.go @@ -13,6 +13,7 @@ import ( "github.com/the-dev-tools/dev-tools/apps/cli/internal/reporter" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/ngraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nrequest" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner/flowlocalrunner" @@ -251,6 +252,17 @@ func RunFlow(ctx context.Context, flowPtr *mflow.Flow, services RunnerServices, }() defer close(requestRespChan) + // Initialize GraphQL response channel + gqlRespChan := make(chan ngraphql.NodeGraphQLSideResp, requestBufferSize) + go func() { + for resp := range gqlRespChan { + if resp.Done != nil { + close(resp.Done) + } + } + }() + defer close(gqlRespChan) + // Build flow node map using flowbuilder flowNodeMap, startNodeID, err := services.Builder.BuildNodes( ctx, @@ -259,6 +271,7 @@ func RunFlow(ctx context.Context, flowPtr *mflow.Flow, services RunnerServices, nodeTimeout, httpClient, requestRespChan, + gqlRespChan, services.JSClient, ) if err != nil { diff --git a/apps/cli/internal/runner/runner_test.go b/apps/cli/internal/runner/runner_test.go index beb1b7433..67271a7c2 100644 --- a/apps/cli/internal/runner/runner_test.go +++ b/apps/cli/internal/runner/runner_test.go @@ -113,10 +113,14 @@ func newFlowTestFixture(t *testing.T) *flowTestFixture { nil, // NodeAIService - not needed for CLI tests nil, // NodeAiProviderService - not needed for CLI tests nil, // NodeMemoryService - not needed for CLI tests + nil, // NodeGraphQLService - not needed for CLI tests + nil, // GraphQLService - not needed for CLI tests + nil, // GraphQLHeaderService - not needed for CLI tests &workspaceService, &varService, &flowVariableService, res, + nil, // GraphQLResolver - not needed for CLI tests logger, nil, // LLMProviderFactory - not needed for CLI tests ) diff --git a/apps/cli/project.json b/apps/cli/project.json index 40367f6e0..c00020fd5 100644 --- a/apps/cli/project.json +++ b/apps/cli/project.json @@ -60,6 +60,21 @@ }, "dependsOn": ["copy-worker"] }, + "test:integration": { + "executor": "nx:run-commands", + "options": { + "cwd": "{projectRoot}", + "parallel": false, + "commands": [ + "go build -tags cli -o dist/devtools-cli-test .", + "RUN_CLI_INTEGRATION=true DEVTOOLS_CLI_BIN=$(pwd)/dist/devtools-cli-test go test -tags cli_integration ./test/yamlflow/ -v -timeout 120s" + ], + "env": { + "DEVTOOLS_MODE": "cli" + } + }, + "dependsOn": ["copy-worker"] + }, "lint": { "executor": "nx:run-commands", "options": { diff --git a/apps/cli/test/yamlflow/example_run_yamlflow.yaml b/apps/cli/test/yamlflow/example_run_yamlflow.yaml index fa042ac46..7f1aa93b9 100644 --- a/apps/cli/test/yamlflow/example_run_yamlflow.yaml +++ b/apps/cli/test/yamlflow/example_run_yamlflow.yaml @@ -83,7 +83,7 @@ flows: depends_on: RequestB - if: name: CheckPostCount - condition: RequestB.response.body.length > 10 + condition: len(RequestB.response.body) > 10 then: LogManyPosts else: LogFewPosts depends_on: RequestB diff --git a/apps/cli/test/yamlflow/graphql_run_example.yaml b/apps/cli/test/yamlflow/graphql_run_example.yaml new file mode 100644 index 000000000..1dbc124f9 --- /dev/null +++ b/apps/cli/test/yamlflow/graphql_run_example.yaml @@ -0,0 +1,70 @@ +workspace_name: GraphQL Run Example +run: + - flow: QueryAndLookup +graphql_requests: + - name: ListCountries + url: https://countries.trevorblades.com/graphql + query: |- + query { + countries { + code + name + capital + } + } + variables: '{}' + assertions: + - response.status == 200 + - response.body.data.countries != nil + - name: GetCountry + url: https://countries.trevorblades.com/graphql + query: |- + query GetCountry($code: ID!) { + country(code: $code) { + name + capital + currency + languages { + name + } + } + } + variables: '{}' + assertions: + - response.status == 200 +flows: + - name: QueryAndLookup + steps: + - manual_start: + name: Start + position_x: 0 + position_y: 0 + - graphql: + name: ListCountries + depends_on: Start + position_x: 300 + position_y: 0 + use_request: ListCountries + - js: + name: PickCountry + depends_on: ListCountries + position_x: 600 + position_y: 0 + code: |- + export default function(ctx) { + const countries = ctx.ListCountries.response.body.data.countries; + const country = countries[0]; + return { code: country.code, name: country.name }; + } + - graphql: + name: GetCountry + depends_on: PickCountry + position_x: 900 + position_y: 0 + use_request: GetCountry + variables: '{"code": "{{PickCountry.code}}"}' + assertions: + - response.body.data.country.name != nil +environments: + - name: default + variables: {} diff --git a/apps/cli/test/yamlflow/integration_yamlflow_test.go b/apps/cli/test/yamlflow/integration_yamlflow_test.go new file mode 100644 index 000000000..95d09ed26 --- /dev/null +++ b/apps/cli/test/yamlflow/integration_yamlflow_test.go @@ -0,0 +1,78 @@ +//go:build cli_integration + +package yamlflow_test + +import ( + "os" + "os/exec" + "path/filepath" + "testing" +) + +// TestMain builds the CLI binary once for all tests in this package. +// If DEVTOOLS_CLI_BIN is already set, it skips the build step. +func TestMain(m *testing.M) { + if os.Getenv("RUN_CLI_INTEGRATION") != "true" { + os.Exit(0) + } + + binPath := os.Getenv("DEVTOOLS_CLI_BIN") + cleanUp := false + if binPath == "" { + // Build CLI binary with cli tag + binPath = filepath.Join(os.TempDir(), "devtools-cli-test") + cmd := exec.Command("go", "build", "-tags", "cli", "-o", binPath, "../../.") + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + panic("failed to build CLI binary: " + err.Error()) + } + os.Setenv("DEVTOOLS_CLI_BIN", binPath) + cleanUp = true + } + + code := m.Run() + + if cleanUp { + os.Remove(binPath) + } + os.Exit(code) +} + +func runCLI(t *testing.T, yamlFile string) { + t.Helper() + + binPath := os.Getenv("DEVTOOLS_CLI_BIN") + if binPath == "" { + t.Fatal("DEVTOOLS_CLI_BIN not set") + } + + cmd := exec.Command(binPath, "flow", "run", yamlFile) + cmd.Env = append(os.Environ(), "DEVTOOLS_MODE=cli") + out, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("CLI failed for %s:\n%s", filepath.Base(yamlFile), string(out)) + } + + t.Logf("CLI output for %s:\n%s", filepath.Base(yamlFile), string(out)) +} + +func TestYAMLFlow_SimpleRun(t *testing.T) { + runCLI(t, "simple_run_example.yaml") +} + +func TestYAMLFlow_MultiFlowRun(t *testing.T) { + runCLI(t, "multi_flow_run_example.yaml") +} + +func TestYAMLFlow_ExampleRun(t *testing.T) { + runCLI(t, "example_run_yamlflow.yaml") +} + +func TestYAMLFlow_TestRunField(t *testing.T) { + runCLI(t, "test_run_field.yaml") +} + +func TestYAMLFlow_GraphQLRun(t *testing.T) { + runCLI(t, "graphql_run_example.yaml") +} diff --git a/packages/client/src/app/router/route-tree.gen.ts b/packages/client/src/app/router/route-tree.gen.ts index a03b4f0dc..6cd100d81 100644 --- a/packages/client/src/app/router/route-tree.gen.ts +++ b/packages/client/src/app/router/route-tree.gen.ts @@ -15,12 +15,15 @@ import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDot import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRouteImport } from './../../pages/workspace/routes/workspace/$workspaceIdCan/route' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRouteImport } from './../../pages/workspace/routes/workspace/$workspaceIdCan/index' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteImport } from './../../pages/http/routes/http/$httpIdCan/route' +import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteImport } from './../../pages/graphql/routes/graphql/$graphqlIdCan/route' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteImport } from './../../pages/flow/routes/flow/$flowIdCan/route' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRouteImport } from './../../pages/http/routes/http/$httpIdCan/index' +import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRouteImport } from './../../pages/graphql/routes/graphql/$graphqlIdCan/index' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRouteImport } from './../../pages/flow/routes/flow/$flowIdCan/index' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRouteImport } from './../../pages/credential/routes/credential/$credentialIdCan/index' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanHistoryRouteImport } from './../../pages/flow/routes/flow/$flowIdCan/history' import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRouteImport } from './../../pages/http/routes/http/$httpIdCan/delta.$deltaHttpIdCan' +import { Route as dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRouteImport } from './../../pages/graphql/routes/graphql/$graphqlIdCan/delta.$deltaGraphqlIdCan' const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesIndexRoute = dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesIndexRouteImport.update({ @@ -70,6 +73,15 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRoute, } as any, ) +const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute = + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteImport.update( + { + id: '/(graphql)/graphql/$graphqlIdCan', + path: '/graphql/$graphqlIdCan', + getParentRoute: () => + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRoute, + } as any, + ) const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRoute = dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteImport.update( { @@ -88,6 +100,15 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute, } as any, ) +const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute = + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRouteImport.update( + { + id: '/', + path: '/', + getParentRoute: () => + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute, + } as any, + ) const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRoute = dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRouteImport.update( { @@ -124,6 +145,15 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute, } as any, ) +const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute = + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRouteImport.update( + { + id: '/delta/$deltaGraphqlIdCan', + path: '/delta/$deltaGraphqlIdCan', + getParentRoute: () => + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute, + } as any, + ) export interface FileRoutesByFullPath { '/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesIndexRoute @@ -132,11 +162,14 @@ export interface FileRoutesByFullPath { '/workspace/$workspaceIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRouteWithChildren '/workspace/$workspaceIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRoute '/workspace/$workspaceIdCan/flow/$flowIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteWithChildren + '/workspace/$workspaceIdCan/graphql/$graphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteWithChildren '/workspace/$workspaceIdCan/http/$httpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteWithChildren '/workspace/$workspaceIdCan/flow/$flowIdCan/history': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanHistoryRoute '/workspace/$workspaceIdCan/credential/$credentialIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute '/workspace/$workspaceIdCan/flow/$flowIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRoute + '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute '/workspace/$workspaceIdCan/http/$httpIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRoute + '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute '/workspace/$workspaceIdCan/http/$httpIdCan/delta/$deltaHttpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRoute } export interface FileRoutesByTo { @@ -147,7 +180,9 @@ export interface FileRoutesByTo { '/workspace/$workspaceIdCan/flow/$flowIdCan/history': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanHistoryRoute '/workspace/$workspaceIdCan/credential/$credentialIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute '/workspace/$workspaceIdCan/flow/$flowIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRoute + '/workspace/$workspaceIdCan/graphql/$graphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute '/workspace/$workspaceIdCan/http/$httpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRoute + '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute '/workspace/$workspaceIdCan/http/$httpIdCan/delta/$deltaHttpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRoute } export interface FileRoutesById { @@ -158,11 +193,14 @@ export interface FileRoutesById { '/(dashboard)/(workspace)/workspace/$workspaceIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRouteWithChildren '/(dashboard)/(workspace)/workspace/$workspaceIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRoute '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteWithChildren + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteWithChildren '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteWithChildren '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/history': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanHistoryRoute '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(credential)/credential/$credentialIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanIndexRoute + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan/': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRoute + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan/delta/$deltaHttpIdCan': typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRoute } export interface FileRouteTypes { @@ -174,11 +212,14 @@ export interface FileRouteTypes { | '/workspace/$workspaceIdCan' | '/workspace/$workspaceIdCan/' | '/workspace/$workspaceIdCan/flow/$flowIdCan' + | '/workspace/$workspaceIdCan/graphql/$graphqlIdCan' | '/workspace/$workspaceIdCan/http/$httpIdCan' | '/workspace/$workspaceIdCan/flow/$flowIdCan/history' | '/workspace/$workspaceIdCan/credential/$credentialIdCan' | '/workspace/$workspaceIdCan/flow/$flowIdCan/' + | '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/' | '/workspace/$workspaceIdCan/http/$httpIdCan/' + | '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan' | '/workspace/$workspaceIdCan/http/$httpIdCan/delta/$deltaHttpIdCan' fileRoutesByTo: FileRoutesByTo to: @@ -189,7 +230,9 @@ export interface FileRouteTypes { | '/workspace/$workspaceIdCan/flow/$flowIdCan/history' | '/workspace/$workspaceIdCan/credential/$credentialIdCan' | '/workspace/$workspaceIdCan/flow/$flowIdCan' + | '/workspace/$workspaceIdCan/graphql/$graphqlIdCan' | '/workspace/$workspaceIdCan/http/$httpIdCan' + | '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan' | '/workspace/$workspaceIdCan/http/$httpIdCan/delta/$deltaHttpIdCan' id: | '__root__' @@ -199,11 +242,14 @@ export interface FileRouteTypes { | '/(dashboard)/(workspace)/workspace/$workspaceIdCan' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan' + | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/history' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(credential)/credential/$credentialIdCan/' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/' + | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan/' + | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan' | '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan/delta/$deltaHttpIdCan' fileRoutesById: FileRoutesById } @@ -258,6 +304,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteImport parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRoute } + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan': { + id: '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan' + path: '/graphql/$graphqlIdCan' + fullPath: '/workspace/$workspaceIdCan/graphql/$graphqlIdCan' + preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteImport + parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRoute + } '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan': { id: '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan' path: '/flow/$flowIdCan' @@ -272,6 +325,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRouteImport parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute } + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/': { + id: '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/' + path: '/' + fullPath: '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/' + preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRouteImport + parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute + } '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/': { id: '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/' path: '/' @@ -300,6 +360,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRouteImport parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute } + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan': { + id: '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan' + path: '/delta/$deltaGraphqlIdCan' + fullPath: '/workspace/$workspaceIdCan/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan' + preLoaderRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRouteImport + parentRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute + } } } @@ -321,6 +388,24 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteChildren, ) +interface dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteChildren { + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute +} + +const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteChildren: dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteChildren = + { + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute: + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanIndexRoute, + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute: + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanDeltaDotdeltaGraphqlIdCanRoute, + } + +const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteWithChildren = + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute._addFileChildren( + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteChildren, + ) + interface dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteChildren { dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanIndexRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanDeltaDotdeltaHttpIdCanRoute @@ -342,6 +427,7 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoute interface dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanRouteRouteChildren { dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRoute dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteWithChildren + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteWithChildren dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteWithChildren dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute: typeof dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute } @@ -352,6 +438,8 @@ const dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspace dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotWorkspaceRoutesWorkspaceWorkspaceIdCanIndexRoute, dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRoute: dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotFlowRoutesFlowFlowIdCanRouteRouteWithChildren, + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRoute: + dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotGraphqlRoutesGraphqlGraphqlIdCanRouteRouteWithChildren, dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRoute: dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotHttpRoutesHttpHttpIdCanRouteRouteWithChildren, dashboardDotDotDotDotDotDotDotDotPagesDashboardRoutesDotDotDotDotCredentialRoutesCredentialCredentialIdCanIndexRoute: diff --git a/packages/client/src/features/agent/context-builder.ts b/packages/client/src/features/agent/context-builder.ts index 899df1fbd..fc314bbf0 100644 --- a/packages/client/src/features/agent/context-builder.ts +++ b/packages/client/src/features/agent/context-builder.ts @@ -8,6 +8,7 @@ import { FlowVariableCollectionSchema, NodeCollectionSchema, NodeExecutionCollectionSchema, + NodeGraphQLCollectionSchema, NodeHttpCollectionSchema, } from '@the-dev-tools/spec/tanstack-db/v1/api/flow'; import { HttpCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/http'; @@ -20,6 +21,7 @@ const NODE_KIND_NAMES: Record = { [NodeKind.CONDITION]: 'Condition', [NodeKind.FOR]: 'For', [NodeKind.FOR_EACH]: 'ForEach', + [NodeKind.GRAPH_Q_L]: 'GraphQL', [NodeKind.HTTP]: 'HTTP', [NodeKind.JS]: 'JavaScript', [NodeKind.MANUAL_START]: 'ManualStart', @@ -54,6 +56,7 @@ export const useFlowContext = (flowId: Uint8Array): FlowContextData => { const variableCollection = useApiCollection(FlowVariableCollectionSchema); const executionCollection = useApiCollection(NodeExecutionCollectionSchema); const nodeHttpCollection = useApiCollection(NodeHttpCollectionSchema); + const nodeGraphqlCollection = useApiCollection(NodeGraphQLCollectionSchema); const httpCollection = useApiCollection(HttpCollectionSchema); const { data: nodesData } = useLiveQuery( @@ -94,6 +97,19 @@ export const useFlowContext = (flowId: Uint8Array): FlowContextData => { .map((nh) => [Ulid.construct(nh.nodeId).toCanonical(), Ulid.construct(nh.httpId).toCanonical()]), ); + // Get all nodeGraphql mappings for GraphQL nodes + const { data: nodeGraphqlData } = useLiveQuery( + (_) => _.from({ nodeGql: nodeGraphqlCollection }), + [nodeGraphqlCollection], + ); + + // Build a map of nodeId -> graphqlId for quick lookup + const nodeGraphqlMap = new Map( + (nodeGraphqlData ?? []) + .filter((ng) => ng.nodeId != null && ng.graphqlId != null) + .map((ng) => [Ulid.construct(ng.nodeId).toCanonical(), Ulid.construct(ng.graphqlId).toCanonical()]), + ); + // Get all HTTP requests to fetch their methods const { data: httpData } = useLiveQuery((_) => _.from({ http: httpCollection }), [httpCollection]); @@ -110,7 +126,9 @@ export const useFlowContext = (flowId: Uint8Array): FlowContextData => { const nodeIdStr = Ulid.construct(n.nodeId).toCanonical(); const httpId = n.kind === NodeKind.HTTP ? nodeHttpMap.get(nodeIdStr) : undefined; const httpMethod = httpId ? httpMethodMap.get(httpId) : undefined; + const graphqlId = n.kind === NodeKind.GRAPH_Q_L ? nodeGraphqlMap.get(nodeIdStr) : undefined; return { + graphqlId, httpId, httpMethod, id: nodeIdStr, @@ -177,6 +195,7 @@ interface FlowCollections { executionCollection: ReturnType>; httpCollection: ReturnType>; nodeCollection: ReturnType>; + nodeGraphqlCollection: ReturnType>; nodeHttpCollection: ReturnType>; variableCollection: ReturnType>; } @@ -195,6 +214,7 @@ export const refreshFlowContext = async ( executionCollection, httpCollection, nodeCollection, + nodeGraphqlCollection, nodeHttpCollection, variableCollection, } = collections; @@ -227,6 +247,13 @@ export const refreshFlowContext = async ( .map((nh) => [Ulid.construct(nh.nodeId).toCanonical(), Ulid.construct(nh.httpId).toCanonical()]), ); + const nodeGraphqlData = await queryCollection((_) => _.from({ nodeGql: nodeGraphqlCollection })); + const nodeGraphqlMap = new Map( + nodeGraphqlData + .filter((ng) => ng.nodeId != null && ng.graphqlId != null) + .map((ng) => [Ulid.construct(ng.nodeId).toCanonical(), Ulid.construct(ng.graphqlId).toCanonical()]), + ); + const httpData = await queryCollection((_) => _.from({ http: httpCollection })); const httpMethodMap = new Map( httpData @@ -240,7 +267,9 @@ export const refreshFlowContext = async ( const nodeIdStr = Ulid.construct(n.nodeId).toCanonical(); const httpId = n.kind === NodeKind.HTTP ? nodeHttpMap.get(nodeIdStr) : undefined; const httpMethod = httpId ? httpMethodMap.get(httpId) : undefined; + const graphqlId = n.kind === NodeKind.GRAPH_Q_L ? nodeGraphqlMap.get(nodeIdStr) : undefined; return { + graphqlId, httpId, httpMethod, id: nodeIdStr, @@ -380,7 +409,7 @@ const buildXmlFlowBlock = (context: FlowContextData): string => { // 4. Compute endpoint set (sequential nodes with no outgoing edges) const endpointSet = new Set( context.nodes - .filter((n) => ['HTTP', 'JavaScript', 'ManualStart'].includes(n.kind) && !outgoingEdges.has(n.id)) + .filter((n) => ['GraphQL', 'HTTP', 'JavaScript', 'ManualStart'].includes(n.kind) && !outgoingEdges.has(n.id)) .map((n) => n.id), ); @@ -451,7 +480,7 @@ const buildXmlCompactSummary = (context: FlowContextData): string => { // Find endpoint nodes const outgoing = new Set(context.edges.map((e) => e.sourceId)); const endpoints = context.nodes.filter( - (n) => ['HTTP', 'JavaScript', 'ManualStart'].includes(n.kind) && !outgoing.has(n.id), + (n) => ['GraphQL', 'HTTP', 'JavaScript', 'ManualStart'].includes(n.kind) && !outgoing.has(n.id), ); const lines: string[] = [``]; @@ -505,13 +534,13 @@ ${buildXmlFlowBlock(context)} IMPORTANT RULES: 1. To find the start node, look for a node with type "ManualStart". 2. When connecting nodes, use the node IDs from the workflow XML. -3. Node outputs are stored by node name. In JS code use ctx["NodeName"]. HTTP nodes output { response: { status, body }, request }. ForEach nodes expose { item, key } during iteration. In HTTP fields use {{NodeName.response.body.field}} interpolation — see . +3. Node outputs are stored by node name. In JS code use ctx["NodeName"]. HTTP nodes output { response: { status, body }, request }. GraphQL nodes output { response: { status, body, headers, duration }, request: { url, query, variables, headers } }. ForEach nodes expose { item, key } during iteration. In HTTP/GraphQL fields use {{NodeName.response.body.field}} interpolation — see . 4. A node can connect to multiple targets for parallel execution (all branches run and complete before downstream nodes continue). To run steps sequentially, chain them: Start → A → B → C. Only create Condition nodes when "then" and "else" lead to DIFFERENT destinations — if both go to the same node, skip the Condition. 5. ALWAYS use connectChain for ALL connections — sequential, branching (auto-applies "then"), fan-out, and fan-in. Examples: ["A","B"] single, ["A","B","C"] chain, ["A",["B","C"],"D"] fan-out/fan-in, [["B","C"],"D"] fan-in only. Pass sourceHandle: "else" or "loop" for non-default branches. Use edge id attributes from \`\` elements when calling disconnectNodes. 6. Always confirm what you did after executing tools. 7. If a node has state="Failure", use inspectNode to get detailed error and config information. 8. Use inspectNode with includeOutput: true to see the input/output data of a node's most recent execution. -9. Use updateNode to modify any node's configuration — condition expressions, loop iterations/paths, JS code, HTTP settings, or node names. Provide only the fields to change. Arrays (headers, searchParams, assertions) replace the full existing set. +9. Use updateNode to modify any node's configuration — condition expressions, loop iterations/paths, JS code, HTTP settings, GraphQL settings, or node names. Provide only the fields to change. Arrays (headers, searchParams, assertions) replace the full existing set. 10. Nodes with selected="true" are currently selected on canvas — prefer operating on those nodes unless the user specifies otherwise. 11. Nodes with endpoint="true" are the last in their chain — new nodes connect there. 12. Nodes with orphan="true" are mistakes — they must be connected to the flow via connectChain. @@ -519,10 +548,10 @@ IMPORTANT RULES: 14. For multi-phase flows, use SEPARATE connectChain calls per phase with a shared fan-in node. Example: ["Start",["GET1","GET2"],"ProcessData"] then ["ProcessData",["POST1","POST2"],"End"]. NEVER use consecutive nested arrays — split them across calls. 15. NEVER delete a node to work around an error. If a node fails or cannot be configured with available tools, explain the problem to the user and suggest what they need to do manually. Deleting user-requested nodes and replacing them with a different type is not allowed unless the user explicitly asks for it. 16. AI nodes require a connected AI Provider node that supplies the LLM model and credentials. The agent cannot create or configure AI Provider nodes — this must be done by the user on the canvas. If an AI node fails with a provider-related error, tell the user they need to add and connect an AI Provider node to it with the appropriate credentials. -17. Use patchHttpNode to add or remove individual headers, query params, or assertions without affecting the rest. Use updateNode only when you want to replace the entire set. +17. Use patchHttpNode to add or remove individual headers, query params, or assertions on HTTP nodes without affecting the rest. Use patchGraphqlNode for the same on GraphQL nodes. Use updateNode only when you want to replace the entire set. -All text fields in HTTP nodes (url, headers, body, query params) support {{}} interpolation. +All text fields in HTTP nodes (url, headers, body, query params) and GraphQL nodes (url, query, variables, headers) support {{}} interpolation. The server resolves these at runtime — use variable references, not hardcoded values. Syntax: @@ -543,7 +572,7 @@ Node names use underscores for spaces: "Get User" → Get_User in references. -Assertions use expr-lang syntax (NOT JavaScript). They are evaluated server-side against the HTTP response. +Assertions use expr-lang syntax (NOT JavaScript). They are evaluated server-side against the HTTP/GraphQL response. Available variables (ONLY these exist — do NOT invent others): - response.status (int), response.body (parsed JSON object/array/string), response.headers (map), response.duration (int ms) diff --git a/packages/client/src/features/agent/tool-executor.ts b/packages/client/src/features/agent/tool-executor.ts index 459df7b99..deee375e3 100644 --- a/packages/client/src/features/agent/tool-executor.ts +++ b/packages/client/src/features/agent/tool-executor.ts @@ -63,6 +63,9 @@ interface Collections { fileCollection: CollectionData; forCollection: { utils: CollectionUtils }; forEachCollection: { utils: CollectionUtils }; + graphqlAssertCollection: { utils: CollectionUtils }; + graphqlCollection: { utils: CollectionUtils }; + graphqlHeaderCollection: { utils: CollectionUtils }; httpAssertCollection: { utils: CollectionUtils }; httpBodyRawCollection: { utils: CollectionUtils }; httpCollection: { utils: CollectionUtils }; @@ -70,6 +73,7 @@ interface Collections { httpSearchParamCollection: { utils: CollectionUtils }; jsCollection: { utils: CollectionUtils }; nodeCollection: { utils: CollectionUtils }; + nodeGraphqlCollection: { utils: CollectionUtils }; nodeHttpCollection: { utils: CollectionUtils }; variableCollection: { utils: CollectionUtils }; } @@ -107,6 +111,7 @@ const NODE_KIND_NAMES: Record = { [NodeKind.CONDITION]: 'Condition', [NodeKind.FOR]: 'For', [NodeKind.FOR_EACH]: 'ForEach', + [NodeKind.GRAPH_Q_L]: 'GraphQL', [NodeKind.HTTP]: 'HTTP', [NodeKind.JS]: 'JavaScript', [NodeKind.MANUAL_START]: 'ManualStart', @@ -605,6 +610,55 @@ const executeToolInternal = async ( } } + case 'createGraphQLNode': { + const nodeId = Ulid.generate().bytes; + const position = (args.position as { x: number; y: number }) ?? { x: 0, y: 0 }; + const nodeName = normalizeNodeName(args.name as string); + const url = (args.url as string) ?? ''; + const query = (args.query as string) ?? ''; + const variables = (args.variables as string) ?? ''; + + const graphqlId = Ulid.generate().bytes; + const graphqlIdStr = Ulid.construct(graphqlId).toCanonical(); + + const insertPromises: Promise[] = [ + collections.graphqlCollection.utils.insert({ + graphqlId, + name: nodeName, + query, + url, + variables, + }), + getNextAgentFileOrder(fileCollection, workspaceId).then((order) => + fileCollection.utils.insert({ + fileId: graphqlId, + kind: FileKind.GRAPH_Q_L, + order, + workspaceId, + }), + ), + nodeCollection.utils.insert({ + flowId, + kind: NodeKind.GRAPH_Q_L, + name: nodeName, + nodeId, + position, + }), + collections.nodeGraphqlCollection.utils.insert({ + graphqlId, + nodeId, + }), + ]; + + await Promise.all(insertPromises); + + { + const canonicalId = Ulid.construct(nodeId).toCanonical(); + context.sessionCreatedNodeIds.add(canonicalId); + return { graphqlId: graphqlIdStr, name: nodeName, nodeId: canonicalId }; + } + } + case 'createJsNode': { const nodeId = Ulid.generate().bytes; const position = (args.position as { x: number; y: number }) ?? { x: 0, y: 0 }; @@ -839,6 +893,41 @@ const executeToolInternal = async ( result.errorHandling = feData?.errorHandling === 1 ? 'break' : 'continue'; break; } + case 'GraphQL': { + if (!node.graphqlId) break; + const graphqlIdBytes = parseUlid(node.graphqlId); + + const [graphqlData] = await queryCollection((_) => + _.from({ gql: collections.graphqlCollection }) + .where((_) => eq(_.gql.graphqlId, graphqlIdBytes)) + .findOne(), + ); + + const gqlHeaders = await queryCollection((_) => + _.from({ h: collections.graphqlHeaderCollection }).where((_) => eq(_.h.graphqlId, graphqlIdBytes)), + ); + + const gqlAsserts = await queryCollection((_) => + _.from({ a: collections.graphqlAssertCollection }).where((_) => eq(_.a.graphqlId, graphqlIdBytes)), + ); + + result.graphqlId = node.graphqlId; + result.url = graphqlData?.url ?? ''; + result.query = graphqlData?.query ?? ''; + result.variables = graphqlData?.variables ?? ''; + result.headers = gqlHeaders.map((h) => ({ + enabled: h.enabled, + id: h.graphqlHeaderId ? Ulid.construct(h.graphqlHeaderId).toCanonical() : undefined, + key: h.key, + value: h.value, + })); + result.assertions = gqlAsserts.map((a) => ({ + enabled: a.enabled, + id: a.graphqlAssertId ? Ulid.construct(a.graphqlAssertId).toCanonical() : undefined, + value: a.value, + })); + break; + } case 'HTTP': { if (!node.httpId) break; const httpIdBytes = parseUlid(node.httpId); @@ -955,6 +1044,122 @@ const executeToolInternal = async ( return result; } + case 'patchGraphqlNode': { + const nodeIdStr = args.nodeId as string; + const node = flowContext.nodes.find((n) => n.id === nodeIdStr); + if (!node) throw new Error(`Node not found: ${nodeIdStr}`); + if (node.kind !== 'GraphQL') throw new Error(`patchGraphqlNode only works on GraphQL nodes, got: ${node.kind}`); + if (!node.graphqlId) throw new Error(`GraphQL node "${node.name}" has no associated GraphQL request`); + + const graphqlIdBytes = parseUlid(node.graphqlId); + const patchedFields: string[] = []; + const warnings: string[] = []; + + // --- Remove headers --- + const removeHeaderIds = args.removeHeaderIds as string[] | undefined; + const addHeaders = args.addHeaders as + | undefined + | { description?: string; enabled?: boolean; key: string; value?: string }[]; + + if (removeHeaderIds?.length) { + const existingHeaders = await queryCollection((_) => + _.from({ h: collections.graphqlHeaderCollection }).where((_) => eq(_.h.graphqlId, graphqlIdBytes)), + ); + const existingHeaderIds = new Set( + existingHeaders + .filter((h) => h.graphqlHeaderId != null) + .map((h) => Ulid.construct(h.graphqlHeaderId).toCanonical()), + ); + let removedCount = 0; + for (const id of removeHeaderIds) { + if (!existingHeaderIds.has(id)) continue; + collections.graphqlHeaderCollection.utils.delete({ graphqlHeaderId: parseUlid(id) }); + removedCount++; + } + if (removedCount > 0) { + patchedFields.push(`removedHeaders(${removedCount})`); + } + const skippedCount = removeHeaderIds.length - removedCount; + if (skippedCount > 0) { + warnings.push(`Skipped ${skippedCount} header ID(s) not belonging to this GraphQL node.`); + } + } + + // --- Add headers --- + if (addHeaders?.length) { + const existingHeaders = await queryCollection((_) => + _.from({ h: collections.graphqlHeaderCollection }).where((_) => eq(_.h.graphqlId, graphqlIdBytes)), + ); + const maxOrder = existingHeaders.reduce((max, h) => Math.max(max, h.order ?? -1), -1); + let nextOrder = maxOrder + 1; + for (const h of addHeaders) { + await collections.graphqlHeaderCollection.utils.insert({ + description: h.description ?? '', + enabled: h.enabled ?? true, + graphqlHeaderId: Ulid.generate().bytes, + graphqlId: graphqlIdBytes, + key: h.key, + order: nextOrder++, + value: h.value ?? '', + }); + } + patchedFields.push(`addedHeaders(${addHeaders.length})`); + } + + // --- Remove assertions --- + const removeAssertionIds = args.removeAssertionIds as string[] | undefined; + const addAssertions = args.addAssertions as undefined | { enabled?: boolean; value: string }[]; + + if (removeAssertionIds?.length) { + const existingAssertions = await queryCollection((_) => + _.from({ a: collections.graphqlAssertCollection }).where((_) => eq(_.a.graphqlId, graphqlIdBytes)), + ); + const existingAssertionIds = new Set( + existingAssertions + .filter((a) => a.graphqlAssertId != null) + .map((a) => Ulid.construct(a.graphqlAssertId).toCanonical()), + ); + let removedCount = 0; + for (const id of removeAssertionIds) { + if (!existingAssertionIds.has(id)) continue; + collections.graphqlAssertCollection.utils.delete({ graphqlAssertId: parseUlid(id) }); + removedCount++; + } + if (removedCount > 0) { + patchedFields.push(`removedAssertions(${removedCount})`); + } + const skippedCount = removeAssertionIds.length - removedCount; + if (skippedCount > 0) { + warnings.push(`Skipped ${skippedCount} assertion ID(s) not belonging to this GraphQL node.`); + } + } + + // --- Add assertions --- + if (addAssertions?.length) { + const existingAssertions = await queryCollection((_) => + _.from({ a: collections.graphqlAssertCollection }).where((_) => eq(_.a.graphqlId, graphqlIdBytes)), + ); + const maxOrder = existingAssertions.reduce((max, a) => Math.max(max, a.order ?? -1), -1); + let nextOrder = maxOrder + 1; + for (const a of addAssertions) { + await collections.graphqlAssertCollection.utils.insert({ + enabled: a.enabled ?? true, + graphqlAssertId: Ulid.generate().bytes, + graphqlId: graphqlIdBytes, + order: nextOrder++, + value: normalizeConditionSyntax(a.value), + }); + } + patchedFields.push(`addedAssertions(${addAssertions.length})`); + } + + if (patchedFields.length === 0) { + return { message: 'No patch operations provided', success: false }; + } + + return { patchedFields, success: true, warnings: warnings.length > 0 ? warnings : undefined }; + } + case 'patchHttpNode': { const nodeIdStr = args.nodeId as string; const node = flowContext.nodes.find((n) => n.id === nodeIdStr); @@ -1220,6 +1425,87 @@ const executeToolInternal = async ( if (hasFeUpdates) forEachCollection.utils.update(feUpdates); break; } + case 'GraphQL': { + if (!node.graphqlId) throw new Error(`GraphQL node "${node.name}" has no associated GraphQL request`); + const graphqlIdBytes = parseUlid(node.graphqlId); + + // Update url/query/variables + const gqlUpdates: Record = { graphqlId: graphqlIdBytes }; + let hasGqlUpdates = false; + + if (args.url !== undefined) { + gqlUpdates.url = args.url; + hasGqlUpdates = true; + updatedFields.push('url'); + } + if (args.query !== undefined) { + gqlUpdates.query = args.query; + hasGqlUpdates = true; + updatedFields.push('query'); + } + if (args.variables !== undefined) { + gqlUpdates.variables = args.variables; + hasGqlUpdates = true; + updatedFields.push('variables'); + } + if (hasGqlUpdates) { + collections.graphqlCollection.utils.update(gqlUpdates); + } + + // Replace headers if provided + if (args.headers !== undefined) { + const existingHeaders = await queryCollection((_) => + _.from({ h: collections.graphqlHeaderCollection }).where((_) => eq(_.h.graphqlId, graphqlIdBytes)), + ); + for (const h of existingHeaders) { + if (h.graphqlHeaderId) + collections.graphqlHeaderCollection.utils.delete({ graphqlHeaderId: h.graphqlHeaderId }); + } + const newHeaders = args.headers as { + description?: string; + enabled?: boolean; + key: string; + value?: string; + }[]; + for (let i = 0; i < newHeaders.length; i++) { + const h = newHeaders[i]!; + await collections.graphqlHeaderCollection.utils.insert({ + description: h.description ?? '', + enabled: h.enabled ?? true, + graphqlHeaderId: Ulid.generate().bytes, + graphqlId: graphqlIdBytes, + key: h.key, + order: i, + value: h.value ?? '', + }); + } + updatedFields.push('headers'); + } + + // Replace assertions if provided + if (args.assertions !== undefined) { + const existingAsserts = await queryCollection((_) => + _.from({ a: collections.graphqlAssertCollection }).where((_) => eq(_.a.graphqlId, graphqlIdBytes)), + ); + for (const a of existingAsserts) { + if (a.graphqlAssertId) + collections.graphqlAssertCollection.utils.delete({ graphqlAssertId: a.graphqlAssertId }); + } + const newAsserts = args.assertions as { enabled?: boolean; value: string }[]; + for (let i = 0; i < newAsserts.length; i++) { + const a = newAsserts[i]!; + await collections.graphqlAssertCollection.utils.insert({ + enabled: a.enabled ?? true, + graphqlAssertId: Ulid.generate().bytes, + graphqlId: graphqlIdBytes, + order: i, + value: normalizeConditionSyntax(a.value), + }); + } + updatedFields.push('assertions'); + } + break; + } case 'HTTP': { if (!node.httpId) throw new Error(`HTTP node "${node.name}" has no associated HTTP request`); const httpIdBytes = parseUlid(node.httpId); diff --git a/packages/client/src/features/agent/tool-schemas.ts b/packages/client/src/features/agent/tool-schemas.ts index 03f100398..634ae929b 100644 --- a/packages/client/src/features/agent/tool-schemas.ts +++ b/packages/client/src/features/agent/tool-schemas.ts @@ -126,6 +126,33 @@ if (createHttpNodeDef) { } } +// Patch CreateGraphQLNode to add field descriptions +const createGraphQLNodeDef = mutationSchemas.find((t) => t.name === 'createGraphQLNode'); +if (createGraphQLNodeDef) { + const params = createGraphQLNodeDef.parameters as { + properties: Record; + }; + if (params.properties['url']) { + params.properties['url'] = { + ...(params.properties['url'] as object), + description: 'The GraphQL API endpoint URL. Supports {{variable}} interpolation, e.g. {{BASE_URL}}/graphql', + }; + } + if (params.properties['query']) { + params.properties['query'] = { + ...(params.properties['query'] as object), + description: 'The GraphQL query or mutation string. Example: query { users { id name } }', + }; + } + if (params.properties['variables']) { + params.properties['variables'] = { + ...(params.properties['variables'] as object), + description: + 'JSON string of GraphQL variables. Supports {{variable}} interpolation. Example: {"userId": "{{user_id}}"}', + }; + } +} + /** All tool schemas combined - ready for AI tool calling */ export const allToolSchemas = [...executionSchemas, ...mutationSchemas]; diff --git a/packages/client/src/features/agent/types.ts b/packages/client/src/features/agent/types.ts index 388c4125f..68675bb3a 100644 --- a/packages/client/src/features/agent/types.ts +++ b/packages/client/src/features/agent/types.ts @@ -41,6 +41,7 @@ export interface FlowContextData { } export interface NodeInfo { + graphqlId?: string; httpId?: string; httpMethod?: string; id: string; diff --git a/packages/client/src/features/agent/use-agent-chat.ts b/packages/client/src/features/agent/use-agent-chat.ts index c5201ecf0..f194147bd 100644 --- a/packages/client/src/features/agent/use-agent-chat.ts +++ b/packages/client/src/features/agent/use-agent-chat.ts @@ -15,9 +15,15 @@ import { NodeExecutionCollectionSchema, NodeForCollectionSchema, NodeForEachCollectionSchema, + NodeGraphQLCollectionSchema, NodeHttpCollectionSchema, NodeJsCollectionSchema, } from '@the-dev-tools/spec/tanstack-db/v1/api/flow'; +import { + GraphQLAssertCollectionSchema, + GraphQLCollectionSchema, + GraphQLHeaderCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; import { HttpAssertCollectionSchema, HttpBodyRawCollectionSchema, @@ -202,6 +208,7 @@ const NODE_KIND_NAMES: Record = { [NodeKind.CONDITION]: 'Condition', [NodeKind.FOR]: 'For', [NodeKind.FOR_EACH]: 'ForEach', + [NodeKind.GRAPH_Q_L]: 'GraphQL', [NodeKind.HTTP]: 'HTTP', [NodeKind.JS]: 'JavaScript', [NodeKind.MANUAL_START]: 'ManualStart', @@ -266,7 +273,7 @@ const applyLayoutToFlow = async ( const clientToolSchemas: ToolSchema[] = [ { description: - "Inspect a node's full config and execution state. Returns type-specific config (HTTP: url/method/headers/params/body/assertions, JS: code, Condition: expression, For: iterations/condition, ForEach: path/condition) plus execution state/error. " + + "Inspect a node's full config and execution state. Returns type-specific config (HTTP: url/method/headers/params/body/assertions, GraphQL: url/query/variables/headers/assertions, JS: code, Condition: expression, For: iterations/condition, ForEach: path/condition) plus execution state/error. " + 'Set includeOutput: true to also get execution input/output payloads (can be large).', name: 'inspectNode', parameters: { @@ -299,19 +306,20 @@ const clientToolSchemas: ToolSchema[] = [ 'Base fields (name) work on any node. Type-specific fields: ' + 'Ai: prompt, maxIterations. Condition: condition. For: iterations, condition (break), errorHandling. ' + 'ForEach: path, condition (break), errorHandling. JS: code. ' + - 'HTTP: method, url, headers, searchParams, body, assertions (arrays replace existing set).', + 'HTTP: method, url, headers, searchParams, body, assertions (arrays replace existing set). ' + + 'GraphQL: url, query, variables, headers, assertions (arrays replace existing set).', name: 'updateNode', parameters: { additionalProperties: false, properties: { assertions: { - description: 'Replaces all existing assertions (HTTP only)', + description: 'Replaces all existing assertions (HTTP and GraphQL)', items: { properties: { enabled: { type: 'boolean' }, value: { description: - 'Expr-lang boolean expression evaluated against the HTTP response. Must be a complete expression, not a bare identifier. Available: response.status (int), response.body (parsed JSON), response.headers (map), response.duration. Examples: response.status == 200, response.body != nil, response.body.id != nil, len(response.body) > 0, response.headers["Content-Type"] contains "json"', + 'Expr-lang boolean expression evaluated against the HTTP/GraphQL response. Must be a complete expression, not a bare identifier. Available: response.status (int), response.body (parsed JSON), response.headers (map), response.duration. For GraphQL: data.* and errors.* also available. Examples: response.status == 200, response.body != nil, data.users[0].id != nil', type: 'string', }, }, @@ -340,7 +348,7 @@ const clientToolSchemas: ToolSchema[] = [ type: 'string', }, headers: { - description: 'Replaces all existing headers (HTTP only)', + description: 'Replaces all existing headers (HTTP and GraphQL)', items: { properties: { enabled: { type: 'boolean' }, @@ -381,6 +389,10 @@ const clientToolSchemas: ToolSchema[] = [ description: 'The prompt or system instructions for the AI agent (Ai nodes only)', type: 'string', }, + query: { + description: 'GraphQL query or mutation string (GraphQL nodes only)', + type: 'string', + }, searchParams: { description: 'Replaces all existing query parameters (HTTP only)', items: { @@ -396,7 +408,11 @@ const clientToolSchemas: ToolSchema[] = [ }, url: { description: - 'Request URL (HTTP nodes only). Supports {{variable}} interpolation, e.g. {{BASE_URL}}/api/users/{{id}}', + 'Request URL (HTTP and GraphQL nodes). Supports {{variable}} interpolation, e.g. {{BASE_URL}}/api/users/{{id}}', + type: 'string', + }, + variables: { + description: 'JSON string of GraphQL variables (GraphQL nodes only). Supports {{variable}} interpolation.', type: 'string', }, }, @@ -477,6 +493,60 @@ const clientToolSchemas: ToolSchema[] = [ type: 'object', }, }, + { + description: + 'Incrementally add or remove headers or assertions on a GraphQL node without replacing the full set. ' + + 'Use this when modifying individual items. For full replacement, use updateNode instead.', + name: 'patchGraphqlNode', + parameters: { + additionalProperties: false, + properties: { + addAssertions: { + description: 'Assertions to append', + items: { + properties: { + enabled: { type: 'boolean' }, + value: { + description: + 'Expr-lang boolean expression evaluated against the GraphQL response. Available: response.status, response.body, data.*, errors.*. Examples: response.status == 200, data.users != nil, len(data.users) > 0', + type: 'string', + }, + }, + required: ['value'], + type: 'object', + }, + type: 'array', + }, + addHeaders: { + description: 'Headers to append. Supports {{variable}} interpolation in values.', + items: { + properties: { + description: { type: 'string' }, + enabled: { type: 'boolean' }, + key: { type: 'string' }, + value: { description: 'Supports {{variable}} interpolation', type: 'string' }, + }, + required: ['key'], + type: 'object', + }, + type: 'array', + }, + nodeId: { description: 'The GraphQL node ID to patch', type: 'string' }, + removeAssertionIds: { + description: 'IDs of assertions to remove (get IDs from inspectNode)', + items: { type: 'string' }, + type: 'array', + }, + removeHeaderIds: { + description: 'IDs of headers to remove (get IDs from inspectNode)', + items: { type: 'string' }, + type: 'array', + }, + }, + required: ['nodeId'], + type: 'object', + }, + }, { description: 'PREFERRED tool for ALL node connections. Connects nodes into a chain with optional parallel fan-out. ' + @@ -628,6 +698,10 @@ export const useAgentChat = ({ apiKey, flowId, provider, selectedNodeIds }: UseA const httpHeaderCollection = useApiCollection(HttpHeaderCollectionSchema); const httpBodyRawCollection = useApiCollection(HttpBodyRawCollectionSchema); const httpAssertCollection = useApiCollection(HttpAssertCollectionSchema); + const nodeGraphqlCollection = useApiCollection(NodeGraphQLCollectionSchema); + const graphqlCollection = useApiCollection(GraphQLCollectionSchema); + const graphqlHeaderCollection = useApiCollection(GraphQLHeaderCollectionSchema); + const graphqlAssertCollection = useApiCollection(GraphQLAssertCollectionSchema); const executionCollection = useApiCollection(NodeExecutionCollectionSchema); const fileCollection = useApiCollection(FileCollectionSchema); const flowCollection = useApiCollection(FlowCollectionSchema); @@ -656,6 +730,9 @@ export const useAgentChat = ({ apiKey, flowId, provider, selectedNodeIds }: UseA fileCollection, forCollection, forEachCollection, + graphqlAssertCollection, + graphqlCollection, + graphqlHeaderCollection, httpAssertCollection, httpBodyRawCollection, httpCollection, @@ -663,6 +740,7 @@ export const useAgentChat = ({ apiKey, flowId, provider, selectedNodeIds }: UseA httpSearchParamCollection, jsCollection, nodeCollection, + nodeGraphqlCollection, nodeHttpCollection, variableCollection, }; @@ -812,6 +890,7 @@ export const useAgentChat = ({ apiKey, flowId, provider, selectedNodeIds }: UseA executionCollection, httpCollection, nodeCollection, + nodeGraphqlCollection, nodeHttpCollection, variableCollection, })), @@ -1027,11 +1106,15 @@ export const useAgentChat = ({ apiKey, flowId, provider, selectedNodeIds }: UseA forCollection, forEachCollection, nodeHttpCollection, + nodeGraphqlCollection, httpCollection, httpSearchParamCollection, httpHeaderCollection, httpBodyRawCollection, httpAssertCollection, + graphqlCollection, + graphqlHeaderCollection, + graphqlAssertCollection, executionCollection, fileCollection, flowCollection, diff --git a/packages/client/src/features/file-system/index.tsx b/packages/client/src/features/file-system/index.tsx index d52daeccd..72bb20001 100644 --- a/packages/client/src/features/file-system/index.tsx +++ b/packages/client/src/features/file-system/index.tsx @@ -29,6 +29,7 @@ import { FolderSchema, } from '@the-dev-tools/spec/buf/api/file_system/v1/file_system_pb'; import { FlowSchema, FlowService } from '@the-dev-tools/spec/buf/api/flow/v1/flow_pb'; +import { GraphQLSchema as GraphQLItemSchema } from '@the-dev-tools/spec/buf/api/graph_q_l/v1/graph_q_l_pb'; import { HttpDeltaSchema, HttpMethod, HttpSchema, HttpService } from '@the-dev-tools/spec/buf/api/http/v1/http_pb'; import { CredentialAnthropicCollectionSchema, @@ -38,6 +39,7 @@ import { } from '@the-dev-tools/spec/tanstack-db/v1/api/credential'; import { FileCollectionSchema, FolderCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/file_system'; import { FlowCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/flow'; +import { GraphQLCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; import { HttpCollectionSchema, HttpDeltaCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/http'; import { Button } from '@the-dev-tools/ui/button'; import { FlowsIcon, FolderOpenedIcon } from '@the-dev-tools/ui/icons'; @@ -84,6 +86,7 @@ export const FileCreateMenu = ({ parentFolderId, ...props }: FileCreateMenuProps const { workspaceId } = routes.dashboard.workspace.route.useLoaderData(); const folderCollection = useApiCollection(FolderCollectionSchema); + const graphqlCollection = useApiCollection(GraphQLCollectionSchema); const httpCollection = useApiCollection(HttpCollectionSchema); const flowCollection = useApiCollection(FlowCollectionSchema); @@ -116,6 +119,22 @@ export const FileCreateMenu = ({ parentFolderId, ...props }: FileCreateMenuProps HTTP request + { + const graphqlUlid = Ulid.generate(); + graphqlCollection.utils.insert({ graphqlId: graphqlUlid.bytes, name: 'New GraphQL request' }); + await insertFile({ fileId: graphqlUlid.bytes, kind: FileKind.GRAPH_Q_L }); + if (toNavigate) + await navigate({ + from: router.routesById[routes.dashboard.workspace.route.id].fullPath, + params: { graphqlIdCan: graphqlUlid.toCanonical() }, + to: router.routesById[routes.dashboard.workspace.graphql.route.id].fullPath, + }); + }} + > + GraphQL request + + { const flowUlid = Ulid.generate(); @@ -332,6 +351,7 @@ const FileItem = ({ id }: FileItemProps) => { Match.when(FileKind.HTTP, () => ), Match.when(FileKind.HTTP_DELTA, () => ), Match.when(FileKind.FLOW, () => ), + Match.when(FileKind.GRAPH_Q_L, () => ), Match.when(FileKind.CREDENTIAL, () => ), Match.orElse(() => null), ); @@ -884,6 +904,93 @@ const FlowFile = ({ id }: FileItemProps) => { return toNavigate ? : ; }; +const GraphQLFile = ({ id }: FileItemProps) => { + const router = useRouter(); + const matchRoute = useMatchRoute(); + + const fileCollection = useApiCollection(FileCollectionSchema); + + const { fileId: graphqlId } = useMemo(() => fileCollection.utils.parseKeyUnsafe(id), [fileCollection.utils, id]); + + const graphqlCollection = useApiCollection(GraphQLCollectionSchema); + + const { name } = + useLiveQuery( + (_) => + _.from({ item: graphqlCollection }) + .where((_) => eq(_.item.graphqlId, graphqlId)) + .select((_) => pick(_.item, 'name')) + .findOne(), + [graphqlCollection, graphqlId], + ).data ?? create(GraphQLItemSchema); + + const { containerRef, navigate: toNavigate = false, showControls } = useContext(FileTreeContext); + + const { escapeRef, escapeRender } = useEscapePortal(containerRef); + + const { edit, isEditing, textFieldProps } = useEditableTextState({ + onSuccess: (_) => graphqlCollection.utils.update({ graphqlId, name: _ }), + value: name, + }); + + const { menuProps, menuTriggerProps, onContextMenu } = useContextMenuState(); + + const route = { + from: router.routesById[routes.dashboard.workspace.route.id].fullPath, + params: { graphqlIdCan: Ulid.construct(graphqlId).toCanonical() }, + to: router.routesById[routes.dashboard.workspace.graphql.route.id].fullPath, + } satisfies ToOptions; + + const content = ( + <> + GQL + + + {name} + + + {isEditing && + escapeRender( + , + )} + + {showControls && ( + + + + + void edit()}>Rename + + pipe(fileCollection.utils.parseKeyUnsafe(id), (_) => fileCollection.utils.delete(_))} + variant='danger' + > + Delete + + + + )} + + ); + + const props = { + children: content, + className: toNavigate && matchRoute(route) !== false ? tw`bg-neutral` : '', + id, + onContextMenu, + textValue: name, + } satisfies TreeItemProps; + + return toNavigate ? : ; +}; + const CredentialFile = ({ id }: FileItemProps) => { const router = useRouter(); const matchRoute = useMatchRoute(); diff --git a/packages/client/src/pages/flow/add-node.tsx b/packages/client/src/pages/flow/add-node.tsx index 70d70db11..5a0a644dd 100644 --- a/packages/client/src/pages/flow/add-node.tsx +++ b/packages/client/src/pages/flow/add-node.tsx @@ -6,7 +6,12 @@ import * as RAC from 'react-aria-components'; import { FiArrowLeft, FiBriefcase, FiChevronRight, FiTerminal, FiX } from 'react-icons/fi'; import { TbRobotFace } from 'react-icons/tb'; import { FileKind } from '@the-dev-tools/spec/buf/api/file_system/v1/file_system_pb'; -import { HandleKind, NodeHttpInsertSchema, NodeKind } from '@the-dev-tools/spec/buf/api/flow/v1/flow_pb'; +import { + HandleKind, + NodeGraphQLInsertSchema, + NodeHttpInsertSchema, + NodeKind, +} from '@the-dev-tools/spec/buf/api/flow/v1/flow_pb'; import { HttpMethod } from '@the-dev-tools/spec/buf/api/http/v1/http_pb'; import { FileCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/file_system'; import { @@ -16,9 +21,11 @@ import { NodeConditionCollectionSchema, NodeForCollectionSchema, NodeForEachCollectionSchema, + NodeGraphQLCollectionSchema, NodeHttpCollectionSchema, NodeJsCollectionSchema, } from '@the-dev-tools/spec/tanstack-db/v1/api/flow'; +import { GraphQLCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; import { HttpCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/http'; import { Button } from '@the-dev-tools/ui/button'; import { FlowsIcon, ForIcon, IfIcon, SendRequestIcon } from '@the-dev-tools/ui/icons'; @@ -247,6 +254,13 @@ const AddCoreNodeSidebar = (props: AddNodeSidebarProps) => { onAction={() => void setSidebar?.((_) => )} title='HTTP Request' /> + + } + onAction={() => void setSidebar?.((_) => )} + title='GraphQL Request' + /> ); @@ -318,6 +332,69 @@ const AddHttpRequestNodeSidebar = ({ handleKind, position, previous, sourceId, t ); }; +const AddGraphQLRequestNodeSidebar = ({ handleKind, position, previous, sourceId, targetId }: AddNodeSidebarProps) => { + const { workspaceId } = routes.dashboard.workspace.route.useLoaderData(); + + const insertNode = useInsertNode(); + + const fileCollection = useApiCollection(FileCollectionSchema); + const graphqlCollection = useApiCollection(GraphQLCollectionSchema); + const nodeGraphQLCollection = useApiCollection(NodeGraphQLCollectionSchema); + + return ( + <> + + +
+ +
+ + { + const nodeId = Ulid.generate().bytes; + const data: MessageInitShape = { nodeId }; + + const file = fileCollection.get(key.toString())!; + + if (file.kind === FileKind.GRAPH_Q_L) { + data.graphqlId = file.fileId; + } else { + return; + } + + nodeGraphQLCollection.utils.insert(data); + insertNode({ handleKind, kind: NodeKind.GRAPH_Q_L, name: 'graphql', nodeId, position, sourceId, targetId }); + }} + showControls + /> + + ); +}; + const AddAiNode = ({ handleKind, position, sourceId, targetId }: AddNodeSidebarProps) => { const insertNode = useInsertNode(); diff --git a/packages/client/src/pages/flow/edit.tsx b/packages/client/src/pages/flow/edit.tsx index ba87f2347..fc58004c7 100644 --- a/packages/client/src/pages/flow/edit.tsx +++ b/packages/client/src/pages/flow/edit.tsx @@ -33,6 +33,7 @@ import { FlowCollectionSchema, FlowVariableCollectionSchema, NodeCollectionSchema, + NodeGraphQLCollectionSchema, NodeHttpCollectionSchema, } from '@the-dev-tools/spec/tanstack-db/v1/api/flow'; import { Button, ButtonAsRouteLink } from '@the-dev-tools/ui/button'; @@ -78,6 +79,7 @@ import { import { ConditionNode, ConditionSettings } from './nodes/condition'; import { ForNode, ForSettings } from './nodes/for'; import { ForEachNode, ForEachSettings } from './nodes/for-each'; +import { GraphQLNode, GraphQLSettings } from './nodes/graphql'; import { HttpNode, HttpSettings } from './nodes/http'; import { JavaScriptNode, JavaScriptSettings } from './nodes/javascript'; import { ManualStartNode } from './nodes/manual-start'; @@ -92,6 +94,7 @@ export const nodeTypes: XF.NodeTypes = { [NodeKind.CONDITION]: ConditionNode, [NodeKind.FOR]: ForNode, [NodeKind.FOR_EACH]: ForEachNode, + [NodeKind.GRAPH_Q_L]: GraphQLNode, [NodeKind.HTTP]: HttpNode, [NodeKind.JS]: JavaScriptNode, [NodeKind.MANUAL_START]: ManualStartNode, @@ -148,6 +151,7 @@ export const Flow = ({ children }: PropsWithChildren) => { const flowCollection = useApiCollection(FlowCollectionSchema); const edgeCollection = useApiCollection(EdgeCollectionSchema); const nodeCollection = useApiCollection(NodeCollectionSchema); + const nodeGraphQLCollection = useApiCollection(NodeGraphQLCollectionSchema); const nodeHttpCollection = useApiCollection(NodeHttpCollectionSchema); const nodeEditDialog = useNodeEditDialog(); @@ -440,6 +444,23 @@ export const Flow = ({ children }: PropsWithChildren) => { position, }); } + + if (file?.kind === FileKind.GRAPH_Q_L) { + const nodeId = Ulid.generate().bytes; + + nodeGraphQLCollection.utils.insert({ + graphqlId: file.fileId, + nodeId, + }); + + nodeCollection.utils.insert({ + flowId, + kind: NodeKind.GRAPH_Q_L, + name: `graphql_${getNodes().length}`, + nodeId, + position, + }); + } }, ref, }); @@ -856,6 +877,7 @@ const useNodeEditDialog = () => { Match.when({ kind: NodeKind.FOR }, (_) => ), Match.when({ kind: NodeKind.JS }, (_) => ), Match.when({ kind: NodeKind.HTTP }, (_) => ), + Match.when({ kind: NodeKind.GRAPH_Q_L }, (_) => ), Match.when({ kind: NodeKind.AI }, (_) => ), Match.when({ kind: NodeKind.AI_PROVIDER }, (_) => ), Match.when({ kind: NodeKind.AI_MEMORY }, (_) => ), diff --git a/packages/client/src/pages/flow/nodes/graphql.tsx b/packages/client/src/pages/flow/nodes/graphql.tsx new file mode 100644 index 000000000..2df2b96bd --- /dev/null +++ b/packages/client/src/pages/flow/nodes/graphql.tsx @@ -0,0 +1,162 @@ +import { create } from '@bufbuild/protobuf'; +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { useRouter } from '@tanstack/react-router'; +import * as XF from '@xyflow/react'; +import { Ulid } from 'id128'; +import { use } from 'react'; +import { FiExternalLink } from 'react-icons/fi'; +import { NodeGraphQLSchema } from '@the-dev-tools/spec/buf/api/flow/v1/flow_pb'; +import { + NodeExecutionCollectionSchema, + NodeGraphQLCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/flow'; +import { + GraphQLCollectionSchema, + GraphQLDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { ButtonAsLink } from '@the-dev-tools/ui/button'; +import { SendRequestIcon } from '@the-dev-tools/ui/icons'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useDeltaState } from '~/features/delta'; +import { ReferenceContext } from '~/features/expression'; +import { GraphQLRequestPanel, GraphQLResponseInfo, GraphQLResponsePanel, GraphQLUrl } from '~/pages/graphql/@x/flow'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; +import { routes } from '~/shared/routes'; +import { FlowContext } from '../context'; +import { Handle } from '../handle'; +import { NodeSettingsBody, NodeSettingsOutputProps, NodeSettingsProps, SimpleNode } from '../node'; + +const defaultNodeGraphQL = create(NodeGraphQLSchema); + +export const GraphQLNode = ({ id, selected }: XF.NodeProps) => { + const nodeId = Ulid.fromCanonical(id).bytes; + + const nodeGraphQLCollection = useApiCollection(NodeGraphQLCollectionSchema); + + const { deltaGraphqlId, graphqlId } = + useLiveQuery( + (_) => + _.from({ item: nodeGraphQLCollection }) + .where((_) => eq(_.item.nodeId, nodeId)) + .select((_) => pick(_.item, 'graphqlId', 'deltaGraphqlId')) + .findOne(), + [nodeGraphQLCollection, nodeId], + ).data ?? defaultNodeGraphQL; + + const deltaOptions = { + deltaId: deltaGraphqlId, + deltaSchema: GraphQLDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originId: graphqlId, + originSchema: GraphQLCollectionSchema, + }; + + const [name] = useDeltaState({ ...deltaOptions, valueKey: 'name' }); + + return ( + + + + + } + icon={} + nodeId={nodeId} + selected={selected} + title='GraphQL' + > +
+
GQL
+
{name}
+
+
+ ); +}; + +export const GraphQLSettings = ({ nodeId }: NodeSettingsProps) => { + const router = useRouter(); + + const { isReadOnly = false } = use(FlowContext); + + const { workspaceId } = routes.dashboard.workspace.route.useLoaderData(); + const { workspaceIdCan } = routes.dashboard.workspace.route.useParams(); + + const nodeGraphQLCollection = useApiCollection(NodeGraphQLCollectionSchema); + + const { deltaGraphqlId, graphqlId } = + useLiveQuery( + (_) => + _.from({ item: nodeGraphQLCollection }) + .where((_) => eq(_.item.nodeId, nodeId)) + .select((_) => pick(_.item, 'graphqlId', 'deltaGraphqlId')) + .findOne(), + [nodeGraphQLCollection, nodeId], + ).data ?? defaultNodeGraphQL; + + return ( + } + settingsHeader={ + + + Open GraphQL + + } + title='GraphQL request' + > + + + + + + + ); +}; + +const Output = ({ nodeExecutionId }: NodeSettingsOutputProps) => { + const collection = useApiCollection(NodeExecutionCollectionSchema); + + const { graphqlResponseId } = + useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => eq(_.item.nodeExecutionId, nodeExecutionId)) + .select((_) => pick(_.item, 'graphqlResponseId')) + .findOne(), + [collection, nodeExecutionId], + ).data ?? {}; + + if (!graphqlResponseId) return null; + + return ( +
+ + +
+ ); +}; diff --git a/packages/client/src/pages/graphql/@x/flow.tsx b/packages/client/src/pages/graphql/@x/flow.tsx new file mode 100644 index 000000000..04f37f467 --- /dev/null +++ b/packages/client/src/pages/graphql/@x/flow.tsx @@ -0,0 +1,3 @@ +export { GraphQLRequestPanel } from '../request/panel'; +export { GraphQLUrl } from '../request/url'; +export { GraphQLResponseInfo, GraphQLResponsePanel } from '../response'; diff --git a/packages/client/src/pages/graphql/@x/workspace.tsx b/packages/client/src/pages/graphql/@x/workspace.tsx new file mode 100644 index 000000000..13825d610 --- /dev/null +++ b/packages/client/src/pages/graphql/@x/workspace.tsx @@ -0,0 +1,3 @@ +import { resolveRoutesTo } from '../../../shared/lib/router'; + +export const resolveRoutesFrom = resolveRoutesTo(import.meta.dirname, '../routes'); diff --git a/packages/client/src/pages/graphql/history.tsx b/packages/client/src/pages/graphql/history.tsx new file mode 100644 index 000000000..292add948 --- /dev/null +++ b/packages/client/src/pages/graphql/history.tsx @@ -0,0 +1,159 @@ +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { Ulid } from 'id128'; +import { Suspense } from 'react'; +import { Collection, Dialog, Tab, TabList, TabPanel, Tabs } from 'react-aria-components'; +import { Panel, Group as PanelGroup, useDefaultLayout } from 'react-resizable-panels'; +import { twJoin } from 'tailwind-merge'; +import { + GraphQLResponseCollectionSchema, + GraphQLVersionCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Modal } from '@the-dev-tools/ui/modal'; +import { PanelResizeHandle } from '@the-dev-tools/ui/resizable-panel'; +import { Spinner } from '@the-dev-tools/ui/spinner'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; +import { GraphQLRequestPanel } from './request/panel'; +import { GraphQLUrl } from './request/url'; +import { GraphQLResponseInfo, GraphQLResponsePanel } from './response'; + +export interface HistoryModalProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; +} + +export const HistoryModal = ({ deltaGraphqlId, graphqlId }: HistoryModalProps) => { + 'use no memo'; + + const collection = useApiCollection(GraphQLVersionCollectionSchema); + + const { data: versions } = useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => eq(_.item.graphqlId, deltaGraphqlId ?? graphqlId)) + .orderBy((_) => _.item.graphqlVersionId, 'desc'), + [collection, deltaGraphqlId, graphqlId], + ); + + return ( + + + +
+
+
Response History
+
History of your GraphQL responses
+
+
+
+
+
+
+
+
+
+ +
Current Version
+ +
+
+
+
+
+ +
+ {versions.length} previous responses +
+ +
+ + + {(_) => ( + + twJoin( + tw` + flex cursor-pointer items-center gap-1.5 rounded-md px-3 py-1.5 text-md leading-5 + font-semibold text-on-neutral + `, + isSelected && tw`bg-neutral`, + ) + } + id={collection.utils.getKey(_)} + > + {Ulid.construct(_.graphqlVersionId).time.toLocaleString()} + + )} + +
+
+ +
+ + {(_) => ( + + + +
+ } + > + + + + )} + +
+ +
+
+ ); +}; + +interface VersionProps { + graphqlId: Uint8Array; +} + +const Version = ({ graphqlId }: VersionProps) => { + const responseCollection = useApiCollection(GraphQLResponseCollectionSchema); + + const { graphqlResponseId } = + useLiveQuery( + (_) => + _.from({ item: responseCollection }) + .where((_) => eq(_.item.graphqlId, graphqlId)) + .select((_) => pick(_.item, 'graphqlResponseId')) + .orderBy((_) => _.item.graphqlResponseId, 'desc') + .limit(1) + .findOne(), + [responseCollection, graphqlId], + ).data ?? {}; + + const endpointVersionsLayout = useDefaultLayout({ id: 'endpoint-versions' }); + + return ( + + +
+ +
+ + +
+ + {graphqlResponseId && ( + <> + + + + + + + + + )} +
+ ); +}; diff --git a/packages/client/src/pages/graphql/page.tsx b/packages/client/src/pages/graphql/page.tsx new file mode 100644 index 000000000..4c9bc5c0c --- /dev/null +++ b/packages/client/src/pages/graphql/page.tsx @@ -0,0 +1,70 @@ +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { Panel, Group as PanelGroup, useDefaultLayout } from 'react-resizable-panels'; +import { GraphQLResponseCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { PanelResizeHandle } from '@the-dev-tools/ui/resizable-panel'; +import { ReferenceContext } from '~/features/expression'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; +import { routes } from '~/shared/routes'; +import { GraphQLRequestPanel } from './request/panel'; +import { GraphQLTopBar } from './request/top-bar'; +import { GraphQLResponseInfo, GraphQLResponsePanel } from './response'; + +export const GraphQLPage = () => { + const { graphqlId } = routes.dashboard.workspace.graphql.route.useRouteContext(); + return ; +}; + +export const GraphQLDeltaPage = () => { + const { deltaGraphqlId, graphqlId } = routes.dashboard.workspace.graphql.delta.useRouteContext(); + return ; +}; + +interface PageProps { + deltaGraphqlId?: Uint8Array; + graphqlId: Uint8Array; +} + +const Page = ({ deltaGraphqlId, graphqlId }: PageProps) => { + const { workspaceId } = routes.dashboard.workspace.route.useLoaderData(); + + const responseCollection = useApiCollection(GraphQLResponseCollectionSchema); + + const { graphqlResponseId } = + useLiveQuery( + (_) => + _.from({ item: responseCollection }) + .where((_) => eq(_.item.graphqlId, deltaGraphqlId ?? graphqlId)) + .select((_) => pick(_.item, 'graphqlResponseId')) + .orderBy((_) => _.item.graphqlResponseId, 'desc') + .limit(1) + .findOne(), + [responseCollection, deltaGraphqlId, graphqlId], + ).data ?? {}; + + const endpointLayout = useDefaultLayout({ id: 'graphql-endpoint' }); + + return ( + + + + + + + + + + {graphqlResponseId && ( + <> + + + + + + + + + )} + + ); +}; diff --git a/packages/client/src/pages/graphql/request/assert.tsx b/packages/client/src/pages/graphql/request/assert.tsx new file mode 100644 index 000000000..5a5b641db --- /dev/null +++ b/packages/client/src/pages/graphql/request/assert.tsx @@ -0,0 +1,111 @@ +import { eq, or, useLiveQuery } from '@tanstack/react-db'; +import { Ulid } from 'id128'; +import { useDragAndDrop } from 'react-aria-components'; +import { FiPlus } from 'react-icons/fi'; +import { + GraphQLAssertCollectionSchema, + GraphQLAssertDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Button } from '@the-dev-tools/ui/button'; +import { DropIndicatorHorizontal } from '@the-dev-tools/ui/reorder'; +import { Table, TableBody, TableCell, TableColumn, TableFooter, TableHeader, TableRow } from '@the-dev-tools/ui/table'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { ColumnActionDeleteDelta, DeltaCheckbox, DeltaReference } from '~/features/delta'; +import { useApiCollection } from '~/shared/api'; +import { getNextOrder, handleCollectionReorder, pick } from '~/shared/lib'; + +export interface GraphQLAssertTableProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; + isReadOnly?: boolean; +} + +export const GraphQLAssertTable = ({ deltaGraphqlId, graphqlId, isReadOnly = false }: GraphQLAssertTableProps) => { + const collection = useApiCollection(GraphQLAssertCollectionSchema); + + const items = useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => or(eq(_.item.graphqlId, graphqlId), eq(_.item.graphqlId, deltaGraphqlId))) + .orderBy((_) => _.item.order) + .select((_) => pick(_.item, 'graphqlAssertId', 'order')), + [collection, deltaGraphqlId, graphqlId], + ).data.map((_) => pick(_, 'graphqlAssertId')); + + const deltaColumnOptions = { + deltaKey: 'deltaGraphqlAssertId', + deltaParentKey: { graphqlId: deltaGraphqlId }, + deltaSchema: GraphQLAssertDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originKey: 'graphqlAssertId', + originSchema: GraphQLAssertCollectionSchema, + } as const; + + const { dragAndDropHooks } = useDragAndDrop({ + getItems: (keys) => [...keys].map((key) => ({ key: key.toString() })), + onReorder: handleCollectionReorder(collection), + renderDropIndicator: () => , + }); + + return ( + + + + Value + {!isReadOnly && } + + + + {({ graphqlAssertId }) => ( + + + + + + + + + + {!isReadOnly && ( + + + + )} + + )} + + + {!isReadOnly && ( + + + + )} +
+ ); +}; diff --git a/packages/client/src/pages/graphql/request/header.tsx b/packages/client/src/pages/graphql/request/header.tsx new file mode 100644 index 000000000..dd2a7f64e --- /dev/null +++ b/packages/client/src/pages/graphql/request/header.tsx @@ -0,0 +1,139 @@ +import { eq, or, useLiveQuery } from '@tanstack/react-db'; +import { Ulid } from 'id128'; +import { useDragAndDrop } from 'react-aria-components'; +import { FiPlus } from 'react-icons/fi'; +import { + GraphQLHeaderCollectionSchema, + GraphQLHeaderDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Button } from '@the-dev-tools/ui/button'; +import { DropIndicatorHorizontal } from '@the-dev-tools/ui/reorder'; +import { Table, TableBody, TableCell, TableColumn, TableFooter, TableHeader, TableRow } from '@the-dev-tools/ui/table'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { ColumnActionDeleteDelta, DeltaCheckbox, DeltaReference, DeltaTextField } from '~/features/delta'; +import { useApiCollection } from '~/shared/api'; +import { getNextOrder, handleCollectionReorder, pick } from '~/shared/lib'; + +export interface GraphQLHeaderTableProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; + hideDescription?: boolean; + isReadOnly?: boolean; +} + +export const GraphQLHeaderTable = ({ + deltaGraphqlId, + graphqlId, + hideDescription = false, + isReadOnly = false, +}: GraphQLHeaderTableProps) => { + const collection = useApiCollection(GraphQLHeaderCollectionSchema); + + const items = useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => or(eq(_.item.graphqlId, graphqlId), eq(_.item.graphqlId, deltaGraphqlId))) + .orderBy((_) => _.item.order) + .select((_) => pick(_.item, 'graphqlHeaderId', 'order')), + [collection, deltaGraphqlId, graphqlId], + ).data.map((_) => pick(_, 'graphqlHeaderId')); + + const deltaColumnOptions = { + deltaKey: 'deltaGraphqlHeaderId', + deltaParentKey: { graphqlId: deltaGraphqlId }, + deltaSchema: GraphQLHeaderDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originKey: 'graphqlHeaderId', + originSchema: GraphQLHeaderCollectionSchema, + } as const; + + const { dragAndDropHooks } = useDragAndDrop({ + getItems: (keys) => [...keys].map((key) => ({ key: key.toString() })), + onReorder: handleCollectionReorder(collection), + renderDropIndicator: () => , + }); + + return ( + + + + Key + Value + {!hideDescription && Description} + {!isReadOnly && } + + + + {({ graphqlHeaderId }) => ( + + + + + + + + + + + + + + {!hideDescription && ( + + + + )} + + {!isReadOnly && ( + + + + )} + + )} + + + {!isReadOnly && ( + + + + )} +
+ ); +}; diff --git a/packages/client/src/pages/graphql/request/index.tsx b/packages/client/src/pages/graphql/request/index.tsx new file mode 100644 index 000000000..0dcfe6553 --- /dev/null +++ b/packages/client/src/pages/graphql/request/index.tsx @@ -0,0 +1,2 @@ +export { GraphQLRequestPanel, type GraphQLRequestPanelProps } from './panel'; +export { GraphQLTopBar, type GraphQLTopBarProps } from './top-bar'; diff --git a/packages/client/src/pages/graphql/request/panel.tsx b/packages/client/src/pages/graphql/request/panel.tsx new file mode 100644 index 000000000..34f33eda7 --- /dev/null +++ b/packages/client/src/pages/graphql/request/panel.tsx @@ -0,0 +1,113 @@ +import { count, eq, or, useLiveQuery } from '@tanstack/react-db'; +import { Suspense } from 'react'; +import { Tab, TabList, TabPanel, Tabs } from 'react-aria-components'; +import { twMerge } from 'tailwind-merge'; +import { + GraphQLAssertCollectionSchema, + GraphQLHeaderCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Spinner } from '@the-dev-tools/ui/spinner'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useApiCollection } from '~/shared/api'; +import { GraphQLAssertTable } from './assert'; +import { GraphQLHeaderTable } from './header'; +import { GraphQLQueryEditor } from './query-editor'; +import { GraphQLVariablesEditor } from './variables-editor'; + +export interface GraphQLRequestPanelProps { + className?: string; + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; + isReadOnly?: boolean; +} + +export const GraphQLRequestPanel = ({ + className, + deltaGraphqlId, + graphqlId, + isReadOnly = false, +}: GraphQLRequestPanelProps) => { + const headerCollection = useApiCollection(GraphQLHeaderCollectionSchema); + + const { headerCount = 0 } = + useLiveQuery( + (_) => + _.from({ item: headerCollection }) + .where((_) => or(eq(_.item.graphqlId, graphqlId), eq(_.item.graphqlId, deltaGraphqlId))) + .select((_) => ({ headerCount: count(_.item.graphqlId) })) + .findOne(), + [deltaGraphqlId, graphqlId, headerCollection], + ).data ?? {}; + + const assertCollection = useApiCollection(GraphQLAssertCollectionSchema); + + const { assertCount = 0 } = + useLiveQuery( + (_) => + _.from({ item: assertCollection }) + .where((_) => or(eq(_.item.graphqlId, graphqlId), eq(_.item.graphqlId, deltaGraphqlId))) + .select((_) => ({ assertCount: count(_.item.graphqlId) })) + .findOne(), + [assertCollection, deltaGraphqlId, graphqlId], + ).data ?? {}; + + const tabClass = ({ isSelected }: { isSelected: boolean }) => + twMerge( + tw` + -mb-px cursor-pointer border-b-2 border-transparent py-1.5 text-md leading-5 font-medium tracking-tight + text-on-neutral-low transition-colors + `, + isSelected && tw`border-b-accent text-on-neutral`, + ); + + return ( + + + + Query + + + + Variables + + + + Headers + {headerCount > 0 && ({headerCount})} + + + + Assertion + {assertCount > 0 && ({assertCount})} + + + + + + + } + > + + + + + + + + + + + + + + + + + + ); +}; diff --git a/packages/client/src/pages/graphql/request/query-editor.tsx b/packages/client/src/pages/graphql/request/query-editor.tsx new file mode 100644 index 000000000..7ee41cf67 --- /dev/null +++ b/packages/client/src/pages/graphql/request/query-editor.tsx @@ -0,0 +1,42 @@ +import CodeMirror from '@uiw/react-codemirror'; +import { + GraphQLCollectionSchema, + GraphQLDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useTheme } from '@the-dev-tools/ui/theme'; +import { useDeltaState } from '~/features/delta'; + +export interface GraphQLQueryEditorProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; + isReadOnly?: boolean; +} + +export const GraphQLQueryEditor = ({ deltaGraphqlId, graphqlId, isReadOnly = false }: GraphQLQueryEditorProps) => { + const { theme } = useTheme(); + + const deltaOptions = { + deltaId: deltaGraphqlId, + deltaSchema: GraphQLDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originId: graphqlId, + originSchema: GraphQLCollectionSchema, + valueKey: 'query', + } as const; + + const [value, setValue] = useDeltaState(deltaOptions); + + return ( + void setValue(_)} + placeholder='Enter your GraphQL query...' + readOnly={isReadOnly} + theme={theme} + value={value ?? ''} + /> + ); +}; diff --git a/packages/client/src/pages/graphql/request/top-bar.tsx b/packages/client/src/pages/graphql/request/top-bar.tsx new file mode 100644 index 000000000..19229b958 --- /dev/null +++ b/packages/client/src/pages/graphql/request/top-bar.tsx @@ -0,0 +1,131 @@ +import { Array, pipe } from 'effect'; +import { useState, useTransition } from 'react'; +import { Button as AriaButton, DialogTrigger, MenuTrigger } from 'react-aria-components'; +import { FiClock, FiMoreHorizontal } from 'react-icons/fi'; +import { GraphQLService } from '@the-dev-tools/spec/buf/api/graph_q_l/v1/graph_q_l_pb'; +import { GraphQLCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Button } from '@the-dev-tools/ui/button'; +import { Menu, MenuItem, useContextMenuState } from '@the-dev-tools/ui/menu'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { TextInputField, useEditableTextState } from '@the-dev-tools/ui/text-field'; +import { ReferenceField } from '~/features/expression'; +import { request, useApiCollection } from '~/shared/api'; +import { routes } from '~/shared/routes'; +import { HistoryModal } from '../history'; + +export interface GraphQLTopBarProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; +} + +export const GraphQLTopBar = ({ deltaGraphqlId, graphqlId }: GraphQLTopBarProps) => { + const { transport } = routes.root.useRouteContext(); + + const collection = useApiCollection(GraphQLCollectionSchema); + + const item = collection.get(collection.utils.getKey({ graphqlId })); + + const { menuProps, menuTriggerProps, onContextMenu } = useContextMenuState(); + + const { edit, isEditing, textFieldProps } = useEditableTextState({ + onSuccess: (_) => { + if (_ === item?.name) return; + collection.utils.update({ graphqlId, name: _ }); + }, + value: item?.name ?? '', + }); + + const [isSending, startTransition] = useTransition(); + + const [urlState, setUrlState] = useState(); + + return ( + <> +
+
+ {isEditing ? ( + + ) : ( + void edit()} + > + {item?.name} + + )} +
+ + + + + + + + + + + + void edit()}>Rename + + collection.utils.delete({ graphqlId })} variant='danger'> + Delete + + + +
+ +
+ { + if (urlState !== undefined) { + collection.utils.update({ graphqlId, url: urlState }); + } + }} + onChange={(_) => void setUrlState(_)} + value={urlState ?? item?.url ?? ''} + /> + + +
+ + ); +}; diff --git a/packages/client/src/pages/graphql/request/url.tsx b/packages/client/src/pages/graphql/request/url.tsx new file mode 100644 index 000000000..45c4881ab --- /dev/null +++ b/packages/client/src/pages/graphql/request/url.tsx @@ -0,0 +1,39 @@ +import { + GraphQLCollectionSchema, + GraphQLDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { DeltaResetButton, useDeltaState } from '~/features/delta'; +import { ReferenceField } from '~/features/expression'; + +export interface GraphQLUrlProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; + isReadOnly?: boolean; +} + +export const GraphQLUrl = ({ deltaGraphqlId, graphqlId, isReadOnly = false }: GraphQLUrlProps) => { + const deltaOptions = { + deltaId: deltaGraphqlId, + deltaSchema: GraphQLDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originId: graphqlId, + originSchema: GraphQLCollectionSchema, + }; + + const [url, setUrl] = useDeltaState({ ...deltaOptions, valueKey: 'url' }); + + return ( +
+ void setUrl(_)} + readOnly={isReadOnly} + value={url ?? ''} + /> + +
+ ); +}; diff --git a/packages/client/src/pages/graphql/request/variables-editor.tsx b/packages/client/src/pages/graphql/request/variables-editor.tsx new file mode 100644 index 000000000..e0b8b8db5 --- /dev/null +++ b/packages/client/src/pages/graphql/request/variables-editor.tsx @@ -0,0 +1,51 @@ +import { json } from '@codemirror/lang-json'; +import CodeMirror from '@uiw/react-codemirror'; +import { useMemo } from 'react'; +import { + GraphQLCollectionSchema, + GraphQLDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useTheme } from '@the-dev-tools/ui/theme'; +import { useDeltaState } from '~/features/delta'; + +export interface GraphQLVariablesEditorProps { + deltaGraphqlId?: Uint8Array | undefined; + graphqlId: Uint8Array; + isReadOnly?: boolean; +} + +export const GraphQLVariablesEditor = ({ + deltaGraphqlId, + graphqlId, + isReadOnly = false, +}: GraphQLVariablesEditorProps) => { + const { theme } = useTheme(); + + const deltaOptions = { + deltaId: deltaGraphqlId, + deltaSchema: GraphQLDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originId: graphqlId, + originSchema: GraphQLCollectionSchema, + valueKey: 'variables', + } as const; + + const [value, setValue] = useDeltaState(deltaOptions); + + const extensions = useMemo(() => [json()], []); + + return ( + void setValue(_)} + placeholder='{"key": "value"}' + readOnly={isReadOnly} + theme={theme} + value={value ?? ''} + /> + ); +}; diff --git a/packages/client/src/pages/graphql/response/assert.tsx b/packages/client/src/pages/graphql/response/assert.tsx new file mode 100644 index 000000000..2b99ea19d --- /dev/null +++ b/packages/client/src/pages/graphql/response/assert.tsx @@ -0,0 +1,42 @@ +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { Fragment } from 'react/jsx-runtime'; +import { twJoin } from 'tailwind-merge'; +import { GraphQLResponseAssertCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; + +export interface GraphQLAssertTableProps { + graphqlResponseId: Uint8Array; +} + +export const GraphQLAssertTable = ({ graphqlResponseId }: GraphQLAssertTableProps) => { + const collection = useApiCollection(GraphQLResponseAssertCollectionSchema); + + const { data: items } = useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => pick(_.item, 'graphqlResponseAssertId', 'value', 'success')), + [collection, graphqlResponseId], + ); + + return ( +
+ {items.map((_) => ( + +
+ {_.success ? 'Pass' : 'Fail'} +
+ + {_.value} +
+ ))} +
+ ); +}; diff --git a/packages/client/src/pages/graphql/response/body.tsx b/packages/client/src/pages/graphql/response/body.tsx new file mode 100644 index 000000000..736f5de4c --- /dev/null +++ b/packages/client/src/pages/graphql/response/body.tsx @@ -0,0 +1,45 @@ +import { create } from '@bufbuild/protobuf'; +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { useQuery } from '@tanstack/react-query'; +import CodeMirror from '@uiw/react-codemirror'; +import { GraphQLResponseSchema } from '@the-dev-tools/spec/buf/api/graph_q_l/v1/graph_q_l_pb'; +import { GraphQLResponseCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useTheme } from '@the-dev-tools/ui/theme'; +import { prettierFormatQueryOptions, useCodeMirrorLanguageExtensions } from '~/features/expression'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; + +export interface GraphQLResponseBodyProps { + graphqlResponseId: Uint8Array; +} + +export const GraphQLResponseBody = ({ graphqlResponseId }: GraphQLResponseBodyProps) => { + const { theme } = useTheme(); + const collection = useApiCollection(GraphQLResponseCollectionSchema); + + const { body } = + useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => pick(_.item, 'body')) + .findOne(), + [collection, graphqlResponseId], + ).data ?? create(GraphQLResponseSchema); + + const { data: prettierBody } = useQuery(prettierFormatQueryOptions({ language: 'json', text: body })); + const extensions = useCodeMirrorLanguageExtensions('json'); + + return ( + + ); +}; diff --git a/packages/client/src/pages/graphql/response/header.tsx b/packages/client/src/pages/graphql/response/header.tsx new file mode 100644 index 000000000..46dc04424 --- /dev/null +++ b/packages/client/src/pages/graphql/response/header.tsx @@ -0,0 +1,40 @@ +import { eq, useLiveQuery } from '@tanstack/react-db'; +import { GraphQLResponseHeaderCollectionSchema } from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Table, TableBody, TableCell, TableColumn, TableHeader, TableRow } from '@the-dev-tools/ui/table'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; + +export interface GraphQLResponseHeaderTableProps { + graphqlResponseId: Uint8Array; +} + +export const GraphQLResponseHeaderTable = ({ graphqlResponseId }: GraphQLResponseHeaderTableProps) => { + const collection = useApiCollection(GraphQLResponseHeaderCollectionSchema); + + const { data: items } = useLiveQuery( + (_) => + _.from({ item: collection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => pick(_.item, 'key', 'value')), + [collection, graphqlResponseId], + ); + + return ( + + + Key + Value + + + + {(_) => ( + + {_.key} + {_.value} + + )} + +
+ ); +}; diff --git a/packages/client/src/pages/graphql/response/index.tsx b/packages/client/src/pages/graphql/response/index.tsx new file mode 100644 index 000000000..84e19d0d0 --- /dev/null +++ b/packages/client/src/pages/graphql/response/index.tsx @@ -0,0 +1,187 @@ +import { create } from '@bufbuild/protobuf'; +import { count, eq, useLiveQuery } from '@tanstack/react-db'; +import { Duration, pipe } from 'effect'; +import { ReactNode, Suspense } from 'react'; +import { Tab, TabList, TabPanel, Tabs } from 'react-aria-components'; +import { twJoin, twMerge } from 'tailwind-merge'; +import { GraphQLResponseSchema } from '@the-dev-tools/spec/buf/api/graph_q_l/v1/graph_q_l_pb'; +import { + GraphQLResponseAssertCollectionSchema, + GraphQLResponseCollectionSchema, + GraphQLResponseHeaderCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { Separator } from '@the-dev-tools/ui/separator'; +import { Spinner } from '@the-dev-tools/ui/spinner'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { formatSize } from '@the-dev-tools/ui/utils'; +import { useApiCollection } from '~/shared/api'; +import { pick } from '~/shared/lib'; +import { GraphQLAssertTable } from './assert'; +import { GraphQLResponseBody } from './body'; +import { GraphQLResponseHeaderTable } from './header'; + +export interface GraphQLResponseInfoProps { + className?: string; + graphqlResponseId: Uint8Array; +} + +export const GraphQLResponseInfo = ({ className, graphqlResponseId }: GraphQLResponseInfoProps) => { + const responseCollection = useApiCollection(GraphQLResponseCollectionSchema); + + const { duration, size, status } = + useLiveQuery( + (_) => + _.from({ item: responseCollection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => pick(_.item, 'duration', 'size', 'status')) + .findOne(), + [responseCollection, graphqlResponseId], + ).data ?? create(GraphQLResponseSchema); + + return ( +
+
+ Status: + {status} +
+ + + +
+ Time: + {pipe(duration, Duration.millis, Duration.format)} +
+ + + +
+ Size: + {formatSize(size)} +
+
+ ); +}; + +export interface GraphQLResponsePanelProps { + children?: ReactNode; + className?: string; + fullWidth?: boolean; + graphqlResponseId: Uint8Array; +} + +export const GraphQLResponsePanel = ({ + children, + className, + fullWidth = false, + graphqlResponseId, +}: GraphQLResponsePanelProps) => { + const headerCollection = useApiCollection(GraphQLResponseHeaderCollectionSchema); + + const { headerCount = 0 } = + useLiveQuery( + (_) => + _.from({ item: headerCollection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => ({ headerCount: count(_.item.graphqlResponseHeaderId) })) + .findOne(), + [headerCollection, graphqlResponseId], + ).data ?? {}; + + const assertCollection = useApiCollection(GraphQLResponseAssertCollectionSchema); + + const { assertCount = 0 } = + useLiveQuery( + (_) => + _.from({ item: assertCollection }) + .where((_) => eq(_.item.graphqlResponseId, graphqlResponseId)) + .select((_) => ({ assertCount: count(_.item.graphqlResponseAssertId) })) + .findOne(), + [assertCollection, graphqlResponseId], + ).data ?? {}; + + return ( + +
+ + + twMerge( + tw` + -mb-px cursor-pointer border-b-2 border-transparent py-2 text-md leading-5 font-medium tracking-tight + text-on-neutral-low transition-colors + `, + isSelected && tw`border-b-accent text-on-neutral`, + ) + } + id='body' + > + Body + + + + twMerge( + tw` + -mb-px cursor-pointer border-b-2 border-transparent py-2 text-md leading-5 font-medium tracking-tight + text-on-neutral-low transition-colors + `, + isSelected && tw`border-b-accent text-on-neutral`, + ) + } + id='headers' + > + Headers + {headerCount > 0 && ({headerCount})} + + + + twMerge( + tw` + -mb-px cursor-pointer border-b-2 border-transparent py-2 text-md leading-5 font-medium tracking-tight + text-on-neutral-low transition-colors + `, + isSelected && tw`border-b-accent text-on-neutral`, + ) + } + id='assertions' + > + Assertion + {assertCount > 0 && ({assertCount})} + + + +
+ + {children} +
+ +
+ + +
+ } + > + + + + + + + + + + + + +
+
+ ); +}; diff --git a/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/delta.$deltaGraphqlIdCan.tsx b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/delta.$deltaGraphqlIdCan.tsx new file mode 100644 index 000000000..59968f9a5 --- /dev/null +++ b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/delta.$deltaGraphqlIdCan.tsx @@ -0,0 +1,24 @@ +import { createFileRoute } from '@tanstack/react-router'; +import { Ulid } from 'id128'; +import { openTab } from '~/widgets/tabs'; +import { GraphQLDeltaPage } from '../../../page'; +import { GraphQLTab, graphqlTabId } from '../../../tab'; + +export const Route = createFileRoute( + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan', +)({ + component: GraphQLDeltaPage, + context: ({ params: { deltaGraphqlIdCan } }) => { + const deltaGraphqlId = Ulid.fromCanonical(deltaGraphqlIdCan).bytes; + return { deltaGraphqlId }; + }, + onEnter: async (match) => { + const { deltaGraphqlId, graphqlId } = match.context; + + await openTab({ + id: graphqlTabId({ deltaGraphqlId, graphqlId }), + match, + node: , + }); + }, +}); diff --git a/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/index.tsx b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/index.tsx new file mode 100644 index 000000000..1053d3aca --- /dev/null +++ b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/index.tsx @@ -0,0 +1,19 @@ +import { createFileRoute } from '@tanstack/react-router'; +import { openTab } from '~/widgets/tabs'; +import { GraphQLPage } from '../../../page'; +import { GraphQLTab, graphqlTabId } from '../../../tab'; + +export const Route = createFileRoute( + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/', +)({ + component: GraphQLPage, + onEnter: async (match) => { + const { graphqlId } = match.context; + + await openTab({ + id: graphqlTabId({ graphqlId }), + match, + node: , + }); + }, +}); diff --git a/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/route.tsx b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/route.tsx new file mode 100644 index 000000000..e61f74b47 --- /dev/null +++ b/packages/client/src/pages/graphql/routes/graphql/$graphqlIdCan/route.tsx @@ -0,0 +1,11 @@ +import { createFileRoute } from '@tanstack/react-router'; +import { Ulid } from 'id128'; + +export const Route = createFileRoute( + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan', +)({ + context: ({ params: { graphqlIdCan } }) => { + const graphqlId = Ulid.fromCanonical(graphqlIdCan).bytes; + return { graphqlId }; + }, +}); diff --git a/packages/client/src/pages/graphql/tab.tsx b/packages/client/src/pages/graphql/tab.tsx new file mode 100644 index 000000000..b25dd984a --- /dev/null +++ b/packages/client/src/pages/graphql/tab.tsx @@ -0,0 +1,65 @@ +import { useLiveQuery } from '@tanstack/react-db'; +import { useEffect } from 'react'; +import { + GraphQLCollectionSchema, + GraphQLDeltaCollectionSchema, +} from '@the-dev-tools/spec/tanstack-db/v1/api/graph_q_l'; +import { tw } from '@the-dev-tools/ui/tailwind-literal'; +import { useDeltaState } from '~/features/delta'; +import { useApiCollection } from '~/shared/api'; +import { eqStruct } from '~/shared/lib'; +import { routes } from '~/shared/routes'; +import { useCloseTab } from '~/widgets/tabs'; + +export interface GraphQLTabProps { + deltaGraphqlId?: Uint8Array; + graphqlId: Uint8Array; +} + +export const graphqlTabId = ({ deltaGraphqlId, graphqlId }: GraphQLTabProps) => + JSON.stringify({ deltaGraphqlId, graphqlId, route: routes.dashboard.workspace.graphql.route.id }); + +export const GraphQLTab = ({ deltaGraphqlId, graphqlId }: GraphQLTabProps) => { + const closeTab = useCloseTab(); + + const graphqlCollection = useApiCollection(GraphQLCollectionSchema); + + const graphqlExists = + useLiveQuery( + (_) => _.from({ item: graphqlCollection }).where(eqStruct({ graphqlId })).findOne(), + [graphqlCollection, graphqlId], + ).data !== undefined; + + useEffect(() => { + if (!graphqlExists) void closeTab(graphqlTabId({ graphqlId })); + }, [graphqlExists, graphqlId, closeTab]); + + const deltaCollection = useApiCollection(GraphQLDeltaCollectionSchema); + + const deltaExists = + useLiveQuery( + (_) => _.from({ item: deltaCollection }).where(eqStruct({ deltaGraphqlId })).findOne(), + [deltaCollection, deltaGraphqlId], + ).data !== undefined; + + useEffect(() => { + if (deltaGraphqlId && !deltaExists) void closeTab(graphqlTabId({ deltaGraphqlId, graphqlId })); + }, [deltaExists, deltaGraphqlId, graphqlId, closeTab]); + + const deltaOptions = { + deltaId: deltaGraphqlId, + deltaSchema: GraphQLDeltaCollectionSchema, + isDelta: deltaGraphqlId !== undefined, + originId: graphqlId, + originSchema: GraphQLCollectionSchema, + }; + + const [name] = useDeltaState({ ...deltaOptions, valueKey: 'name' }); + + return ( + <> + GQL + {name} + + ); +}; diff --git a/packages/client/src/pages/workspace/routes/workspace/$workspaceIdCan/(graphql)/__virtual.ts b/packages/client/src/pages/workspace/routes/workspace/$workspaceIdCan/(graphql)/__virtual.ts new file mode 100644 index 000000000..5de4c49bd --- /dev/null +++ b/packages/client/src/pages/workspace/routes/workspace/$workspaceIdCan/(graphql)/__virtual.ts @@ -0,0 +1,3 @@ +import { resolveRoutesFrom } from '../../../../../graphql/@x/workspace'; + +export default resolveRoutesFrom(import.meta.dirname); diff --git a/packages/client/src/shared/routes.tsx b/packages/client/src/shared/routes.tsx index 4eac1ca65..b7c3990f0 100644 --- a/packages/client/src/shared/routes.tsx +++ b/packages/client/src/shared/routes.tsx @@ -20,6 +20,13 @@ export const routes = { index: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/'), history: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(flow)/flow/$flowIdCan/history'), }, + graphql: { + route: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan'), + index: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/'), + delta: getRouteApi( + '/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/delta/$deltaGraphqlIdCan', + ), + }, http: { route: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan'), index: getRouteApi('/(dashboard)/(workspace)/workspace/$workspaceIdCan/(http)/http/$httpIdCan/'), diff --git a/packages/db/pkg/sqlc/gen/db.go b/packages/db/pkg/sqlc/gen/db.go index d50582dd1..d2a8aeb06 100644 --- a/packages/db/pkg/sqlc/gen/db.go +++ b/packages/db/pkg/sqlc/gen/db.go @@ -132,6 +132,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.cleanupOrphanedFlowNodeForEachStmt, err = db.PrepareContext(ctx, cleanupOrphanedFlowNodeForEach); err != nil { return nil, fmt.Errorf("error preparing query CleanupOrphanedFlowNodeForEach: %w", err) } + if q.cleanupOrphanedFlowNodeGraphQLStmt, err = db.PrepareContext(ctx, cleanupOrphanedFlowNodeGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query CleanupOrphanedFlowNodeGraphQL: %w", err) + } if q.cleanupOrphanedFlowNodeHttpStmt, err = db.PrepareContext(ctx, cleanupOrphanedFlowNodeHttp); err != nil { return nil, fmt.Errorf("error preparing query CleanupOrphanedFlowNodeHttp: %w", err) } @@ -183,6 +186,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.createFlowNodeForEachStmt, err = db.PrepareContext(ctx, createFlowNodeForEach); err != nil { return nil, fmt.Errorf("error preparing query CreateFlowNodeForEach: %w", err) } + if q.createFlowNodeGraphQLStmt, err = db.PrepareContext(ctx, createFlowNodeGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query CreateFlowNodeGraphQL: %w", err) + } if q.createFlowNodeHTTPStmt, err = db.PrepareContext(ctx, createFlowNodeHTTP); err != nil { return nil, fmt.Errorf("error preparing query CreateFlowNodeHTTP: %w", err) } @@ -210,6 +216,30 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.createFlowsBulkStmt, err = db.PrepareContext(ctx, createFlowsBulk); err != nil { return nil, fmt.Errorf("error preparing query CreateFlowsBulk: %w", err) } + if q.createGraphQLStmt, err = db.PrepareContext(ctx, createGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQL: %w", err) + } + if q.createGraphQLAssertStmt, err = db.PrepareContext(ctx, createGraphQLAssert); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLAssert: %w", err) + } + if q.createGraphQLHeaderStmt, err = db.PrepareContext(ctx, createGraphQLHeader); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLHeader: %w", err) + } + if q.createGraphQLResponseStmt, err = db.PrepareContext(ctx, createGraphQLResponse); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLResponse: %w", err) + } + if q.createGraphQLResponseAssertStmt, err = db.PrepareContext(ctx, createGraphQLResponseAssert); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLResponseAssert: %w", err) + } + if q.createGraphQLResponseHeaderStmt, err = db.PrepareContext(ctx, createGraphQLResponseHeader); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLResponseHeader: %w", err) + } + if q.createGraphQLResponseHeaderBulkStmt, err = db.PrepareContext(ctx, createGraphQLResponseHeaderBulk); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLResponseHeaderBulk: %w", err) + } + if q.createGraphQLVersionStmt, err = db.PrepareContext(ctx, createGraphQLVersion); err != nil { + return nil, fmt.Errorf("error preparing query CreateGraphQLVersion: %w", err) + } if q.createHTTPStmt, err = db.PrepareContext(ctx, createHTTP); err != nil { return nil, fmt.Errorf("error preparing query CreateHTTP: %w", err) } @@ -324,6 +354,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.deleteFlowNodeForEachStmt, err = db.PrepareContext(ctx, deleteFlowNodeForEach); err != nil { return nil, fmt.Errorf("error preparing query DeleteFlowNodeForEach: %w", err) } + if q.deleteFlowNodeGraphQLStmt, err = db.PrepareContext(ctx, deleteFlowNodeGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query DeleteFlowNodeGraphQL: %w", err) + } if q.deleteFlowNodeHTTPStmt, err = db.PrepareContext(ctx, deleteFlowNodeHTTP); err != nil { return nil, fmt.Errorf("error preparing query DeleteFlowNodeHTTP: %w", err) } @@ -339,6 +372,21 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.deleteFlowVariableStmt, err = db.PrepareContext(ctx, deleteFlowVariable); err != nil { return nil, fmt.Errorf("error preparing query DeleteFlowVariable: %w", err) } + if q.deleteGraphQLStmt, err = db.PrepareContext(ctx, deleteGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query DeleteGraphQL: %w", err) + } + if q.deleteGraphQLAssertStmt, err = db.PrepareContext(ctx, deleteGraphQLAssert); err != nil { + return nil, fmt.Errorf("error preparing query DeleteGraphQLAssert: %w", err) + } + if q.deleteGraphQLHeaderStmt, err = db.PrepareContext(ctx, deleteGraphQLHeader); err != nil { + return nil, fmt.Errorf("error preparing query DeleteGraphQLHeader: %w", err) + } + if q.deleteGraphQLResponseStmt, err = db.PrepareContext(ctx, deleteGraphQLResponse); err != nil { + return nil, fmt.Errorf("error preparing query DeleteGraphQLResponse: %w", err) + } + if q.deleteGraphQLResponseHeaderStmt, err = db.PrepareContext(ctx, deleteGraphQLResponseHeader); err != nil { + return nil, fmt.Errorf("error preparing query DeleteGraphQLResponseHeader: %w", err) + } if q.deleteHTTPStmt, err = db.PrepareContext(ctx, deleteHTTP); err != nil { return nil, fmt.Errorf("error preparing query DeleteHTTP: %w", err) } @@ -501,6 +549,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.getFlowNodeForEachStmt, err = db.PrepareContext(ctx, getFlowNodeForEach); err != nil { return nil, fmt.Errorf("error preparing query GetFlowNodeForEach: %w", err) } + if q.getFlowNodeGraphQLStmt, err = db.PrepareContext(ctx, getFlowNodeGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query GetFlowNodeGraphQL: %w", err) + } if q.getFlowNodeHTTPStmt, err = db.PrepareContext(ctx, getFlowNodeHTTP); err != nil { return nil, fmt.Errorf("error preparing query GetFlowNodeHTTP: %w", err) } @@ -543,6 +594,72 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.getFlowsByWorkspaceIDStmt, err = db.PrepareContext(ctx, getFlowsByWorkspaceID); err != nil { return nil, fmt.Errorf("error preparing query GetFlowsByWorkspaceID: %w", err) } + if q.getGraphQLStmt, err = db.PrepareContext(ctx, getGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQL: %w", err) + } + if q.getGraphQLAssertStmt, err = db.PrepareContext(ctx, getGraphQLAssert); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLAssert: %w", err) + } + if q.getGraphQLAssertDeltasByParentIDStmt, err = db.PrepareContext(ctx, getGraphQLAssertDeltasByParentID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLAssertDeltasByParentID: %w", err) + } + if q.getGraphQLAssertDeltasByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLAssertDeltasByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLAssertDeltasByWorkspaceID: %w", err) + } + if q.getGraphQLAssertsByGraphQLIDStmt, err = db.PrepareContext(ctx, getGraphQLAssertsByGraphQLID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLAssertsByGraphQLID: %w", err) + } + if q.getGraphQLAssertsByIDsStmt, err = db.PrepareContext(ctx, getGraphQLAssertsByIDs); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLAssertsByIDs: %w", err) + } + if q.getGraphQLDeltasByParentIDStmt, err = db.PrepareContext(ctx, getGraphQLDeltasByParentID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLDeltasByParentID: %w", err) + } + if q.getGraphQLDeltasByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLDeltasByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLDeltasByWorkspaceID: %w", err) + } + if q.getGraphQLHeaderDeltasByParentIDStmt, err = db.PrepareContext(ctx, getGraphQLHeaderDeltasByParentID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLHeaderDeltasByParentID: %w", err) + } + if q.getGraphQLHeaderDeltasByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLHeaderDeltasByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLHeaderDeltasByWorkspaceID: %w", err) + } + if q.getGraphQLHeadersStmt, err = db.PrepareContext(ctx, getGraphQLHeaders); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLHeaders: %w", err) + } + if q.getGraphQLHeadersByIDsStmt, err = db.PrepareContext(ctx, getGraphQLHeadersByIDs); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLHeadersByIDs: %w", err) + } + if q.getGraphQLResponseStmt, err = db.PrepareContext(ctx, getGraphQLResponse); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponse: %w", err) + } + if q.getGraphQLResponseAssertsByResponseIDStmt, err = db.PrepareContext(ctx, getGraphQLResponseAssertsByResponseID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponseAssertsByResponseID: %w", err) + } + if q.getGraphQLResponseAssertsByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLResponseAssertsByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponseAssertsByWorkspaceID: %w", err) + } + if q.getGraphQLResponseHeadersByResponseIDStmt, err = db.PrepareContext(ctx, getGraphQLResponseHeadersByResponseID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponseHeadersByResponseID: %w", err) + } + if q.getGraphQLResponseHeadersByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLResponseHeadersByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponseHeadersByWorkspaceID: %w", err) + } + if q.getGraphQLResponsesByGraphQLIDStmt, err = db.PrepareContext(ctx, getGraphQLResponsesByGraphQLID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponsesByGraphQLID: %w", err) + } + if q.getGraphQLResponsesByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLResponsesByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLResponsesByWorkspaceID: %w", err) + } + if q.getGraphQLVersionsByGraphQLIDStmt, err = db.PrepareContext(ctx, getGraphQLVersionsByGraphQLID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLVersionsByGraphQLID: %w", err) + } + if q.getGraphQLWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLWorkspaceID: %w", err) + } + if q.getGraphQLsByWorkspaceIDStmt, err = db.PrepareContext(ctx, getGraphQLsByWorkspaceID); err != nil { + return nil, fmt.Errorf("error preparing query GetGraphQLsByWorkspaceID: %w", err) + } if q.getHTTPStmt, err = db.PrepareContext(ctx, getHTTP); err != nil { return nil, fmt.Errorf("error preparing query GetHTTP: %w", err) } @@ -840,6 +957,9 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.updateFlowNodeForEachStmt, err = db.PrepareContext(ctx, updateFlowNodeForEach); err != nil { return nil, fmt.Errorf("error preparing query UpdateFlowNodeForEach: %w", err) } + if q.updateFlowNodeGraphQLStmt, err = db.PrepareContext(ctx, updateFlowNodeGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query UpdateFlowNodeGraphQL: %w", err) + } if q.updateFlowNodeHTTPStmt, err = db.PrepareContext(ctx, updateFlowNodeHTTP); err != nil { return nil, fmt.Errorf("error preparing query UpdateFlowNodeHTTP: %w", err) } @@ -861,6 +981,21 @@ func Prepare(ctx context.Context, db DBTX) (*Queries, error) { if q.updateFlowVariableOrderStmt, err = db.PrepareContext(ctx, updateFlowVariableOrder); err != nil { return nil, fmt.Errorf("error preparing query UpdateFlowVariableOrder: %w", err) } + if q.updateGraphQLStmt, err = db.PrepareContext(ctx, updateGraphQL); err != nil { + return nil, fmt.Errorf("error preparing query UpdateGraphQL: %w", err) + } + if q.updateGraphQLAssertStmt, err = db.PrepareContext(ctx, updateGraphQLAssert); err != nil { + return nil, fmt.Errorf("error preparing query UpdateGraphQLAssert: %w", err) + } + if q.updateGraphQLAssertDeltaStmt, err = db.PrepareContext(ctx, updateGraphQLAssertDelta); err != nil { + return nil, fmt.Errorf("error preparing query UpdateGraphQLAssertDelta: %w", err) + } + if q.updateGraphQLDeltaStmt, err = db.PrepareContext(ctx, updateGraphQLDelta); err != nil { + return nil, fmt.Errorf("error preparing query UpdateGraphQLDelta: %w", err) + } + if q.updateGraphQLHeaderStmt, err = db.PrepareContext(ctx, updateGraphQLHeader); err != nil { + return nil, fmt.Errorf("error preparing query UpdateGraphQLHeader: %w", err) + } if q.updateHTTPStmt, err = db.PrepareContext(ctx, updateHTTP); err != nil { return nil, fmt.Errorf("error preparing query UpdateHTTP: %w", err) } @@ -1136,6 +1271,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing cleanupOrphanedFlowNodeForEachStmt: %w", cerr) } } + if q.cleanupOrphanedFlowNodeGraphQLStmt != nil { + if cerr := q.cleanupOrphanedFlowNodeGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing cleanupOrphanedFlowNodeGraphQLStmt: %w", cerr) + } + } if q.cleanupOrphanedFlowNodeHttpStmt != nil { if cerr := q.cleanupOrphanedFlowNodeHttpStmt.Close(); cerr != nil { err = fmt.Errorf("error closing cleanupOrphanedFlowNodeHttpStmt: %w", cerr) @@ -1221,6 +1361,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing createFlowNodeForEachStmt: %w", cerr) } } + if q.createFlowNodeGraphQLStmt != nil { + if cerr := q.createFlowNodeGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createFlowNodeGraphQLStmt: %w", cerr) + } + } if q.createFlowNodeHTTPStmt != nil { if cerr := q.createFlowNodeHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing createFlowNodeHTTPStmt: %w", cerr) @@ -1266,6 +1411,46 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing createFlowsBulkStmt: %w", cerr) } } + if q.createGraphQLStmt != nil { + if cerr := q.createGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLStmt: %w", cerr) + } + } + if q.createGraphQLAssertStmt != nil { + if cerr := q.createGraphQLAssertStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLAssertStmt: %w", cerr) + } + } + if q.createGraphQLHeaderStmt != nil { + if cerr := q.createGraphQLHeaderStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLHeaderStmt: %w", cerr) + } + } + if q.createGraphQLResponseStmt != nil { + if cerr := q.createGraphQLResponseStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLResponseStmt: %w", cerr) + } + } + if q.createGraphQLResponseAssertStmt != nil { + if cerr := q.createGraphQLResponseAssertStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLResponseAssertStmt: %w", cerr) + } + } + if q.createGraphQLResponseHeaderStmt != nil { + if cerr := q.createGraphQLResponseHeaderStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLResponseHeaderStmt: %w", cerr) + } + } + if q.createGraphQLResponseHeaderBulkStmt != nil { + if cerr := q.createGraphQLResponseHeaderBulkStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLResponseHeaderBulkStmt: %w", cerr) + } + } + if q.createGraphQLVersionStmt != nil { + if cerr := q.createGraphQLVersionStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing createGraphQLVersionStmt: %w", cerr) + } + } if q.createHTTPStmt != nil { if cerr := q.createHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing createHTTPStmt: %w", cerr) @@ -1456,6 +1641,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing deleteFlowNodeForEachStmt: %w", cerr) } } + if q.deleteFlowNodeGraphQLStmt != nil { + if cerr := q.deleteFlowNodeGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteFlowNodeGraphQLStmt: %w", cerr) + } + } if q.deleteFlowNodeHTTPStmt != nil { if cerr := q.deleteFlowNodeHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing deleteFlowNodeHTTPStmt: %w", cerr) @@ -1481,6 +1671,31 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing deleteFlowVariableStmt: %w", cerr) } } + if q.deleteGraphQLStmt != nil { + if cerr := q.deleteGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteGraphQLStmt: %w", cerr) + } + } + if q.deleteGraphQLAssertStmt != nil { + if cerr := q.deleteGraphQLAssertStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteGraphQLAssertStmt: %w", cerr) + } + } + if q.deleteGraphQLHeaderStmt != nil { + if cerr := q.deleteGraphQLHeaderStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteGraphQLHeaderStmt: %w", cerr) + } + } + if q.deleteGraphQLResponseStmt != nil { + if cerr := q.deleteGraphQLResponseStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteGraphQLResponseStmt: %w", cerr) + } + } + if q.deleteGraphQLResponseHeaderStmt != nil { + if cerr := q.deleteGraphQLResponseHeaderStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing deleteGraphQLResponseHeaderStmt: %w", cerr) + } + } if q.deleteHTTPStmt != nil { if cerr := q.deleteHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing deleteHTTPStmt: %w", cerr) @@ -1751,6 +1966,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing getFlowNodeForEachStmt: %w", cerr) } } + if q.getFlowNodeGraphQLStmt != nil { + if cerr := q.getFlowNodeGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getFlowNodeGraphQLStmt: %w", cerr) + } + } if q.getFlowNodeHTTPStmt != nil { if cerr := q.getFlowNodeHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing getFlowNodeHTTPStmt: %w", cerr) @@ -1821,6 +2041,116 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing getFlowsByWorkspaceIDStmt: %w", cerr) } } + if q.getGraphQLStmt != nil { + if cerr := q.getGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLStmt: %w", cerr) + } + } + if q.getGraphQLAssertStmt != nil { + if cerr := q.getGraphQLAssertStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLAssertStmt: %w", cerr) + } + } + if q.getGraphQLAssertDeltasByParentIDStmt != nil { + if cerr := q.getGraphQLAssertDeltasByParentIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLAssertDeltasByParentIDStmt: %w", cerr) + } + } + if q.getGraphQLAssertDeltasByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLAssertDeltasByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLAssertDeltasByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLAssertsByGraphQLIDStmt != nil { + if cerr := q.getGraphQLAssertsByGraphQLIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLAssertsByGraphQLIDStmt: %w", cerr) + } + } + if q.getGraphQLAssertsByIDsStmt != nil { + if cerr := q.getGraphQLAssertsByIDsStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLAssertsByIDsStmt: %w", cerr) + } + } + if q.getGraphQLDeltasByParentIDStmt != nil { + if cerr := q.getGraphQLDeltasByParentIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLDeltasByParentIDStmt: %w", cerr) + } + } + if q.getGraphQLDeltasByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLDeltasByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLDeltasByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLHeaderDeltasByParentIDStmt != nil { + if cerr := q.getGraphQLHeaderDeltasByParentIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLHeaderDeltasByParentIDStmt: %w", cerr) + } + } + if q.getGraphQLHeaderDeltasByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLHeaderDeltasByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLHeaderDeltasByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLHeadersStmt != nil { + if cerr := q.getGraphQLHeadersStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLHeadersStmt: %w", cerr) + } + } + if q.getGraphQLHeadersByIDsStmt != nil { + if cerr := q.getGraphQLHeadersByIDsStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLHeadersByIDsStmt: %w", cerr) + } + } + if q.getGraphQLResponseStmt != nil { + if cerr := q.getGraphQLResponseStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponseStmt: %w", cerr) + } + } + if q.getGraphQLResponseAssertsByResponseIDStmt != nil { + if cerr := q.getGraphQLResponseAssertsByResponseIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponseAssertsByResponseIDStmt: %w", cerr) + } + } + if q.getGraphQLResponseAssertsByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLResponseAssertsByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponseAssertsByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLResponseHeadersByResponseIDStmt != nil { + if cerr := q.getGraphQLResponseHeadersByResponseIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponseHeadersByResponseIDStmt: %w", cerr) + } + } + if q.getGraphQLResponseHeadersByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLResponseHeadersByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponseHeadersByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLResponsesByGraphQLIDStmt != nil { + if cerr := q.getGraphQLResponsesByGraphQLIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponsesByGraphQLIDStmt: %w", cerr) + } + } + if q.getGraphQLResponsesByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLResponsesByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLResponsesByWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLVersionsByGraphQLIDStmt != nil { + if cerr := q.getGraphQLVersionsByGraphQLIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLVersionsByGraphQLIDStmt: %w", cerr) + } + } + if q.getGraphQLWorkspaceIDStmt != nil { + if cerr := q.getGraphQLWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLWorkspaceIDStmt: %w", cerr) + } + } + if q.getGraphQLsByWorkspaceIDStmt != nil { + if cerr := q.getGraphQLsByWorkspaceIDStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing getGraphQLsByWorkspaceIDStmt: %w", cerr) + } + } if q.getHTTPStmt != nil { if cerr := q.getHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing getHTTPStmt: %w", cerr) @@ -2316,6 +2646,11 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing updateFlowNodeForEachStmt: %w", cerr) } } + if q.updateFlowNodeGraphQLStmt != nil { + if cerr := q.updateFlowNodeGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateFlowNodeGraphQLStmt: %w", cerr) + } + } if q.updateFlowNodeHTTPStmt != nil { if cerr := q.updateFlowNodeHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing updateFlowNodeHTTPStmt: %w", cerr) @@ -2351,6 +2686,31 @@ func (q *Queries) Close() error { err = fmt.Errorf("error closing updateFlowVariableOrderStmt: %w", cerr) } } + if q.updateGraphQLStmt != nil { + if cerr := q.updateGraphQLStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateGraphQLStmt: %w", cerr) + } + } + if q.updateGraphQLAssertStmt != nil { + if cerr := q.updateGraphQLAssertStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateGraphQLAssertStmt: %w", cerr) + } + } + if q.updateGraphQLAssertDeltaStmt != nil { + if cerr := q.updateGraphQLAssertDeltaStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateGraphQLAssertDeltaStmt: %w", cerr) + } + } + if q.updateGraphQLDeltaStmt != nil { + if cerr := q.updateGraphQLDeltaStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateGraphQLDeltaStmt: %w", cerr) + } + } + if q.updateGraphQLHeaderStmt != nil { + if cerr := q.updateGraphQLHeaderStmt.Close(); cerr != nil { + err = fmt.Errorf("error closing updateGraphQLHeaderStmt: %w", cerr) + } + } if q.updateHTTPStmt != nil { if cerr := q.updateHTTPStmt.Close(); cerr != nil { err = fmt.Errorf("error closing updateHTTPStmt: %w", cerr) @@ -2576,6 +2936,7 @@ type Queries struct { cleanupOrphanedFlowNodeConditionStmt *sql.Stmt cleanupOrphanedFlowNodeForStmt *sql.Stmt cleanupOrphanedFlowNodeForEachStmt *sql.Stmt + cleanupOrphanedFlowNodeGraphQLStmt *sql.Stmt cleanupOrphanedFlowNodeHttpStmt *sql.Stmt cleanupOrphanedFlowNodeJsStmt *sql.Stmt cleanupOrphanedNodeExecutionsStmt *sql.Stmt @@ -2593,6 +2954,7 @@ type Queries struct { createFlowNodeConditionStmt *sql.Stmt createFlowNodeForStmt *sql.Stmt createFlowNodeForEachStmt *sql.Stmt + createFlowNodeGraphQLStmt *sql.Stmt createFlowNodeHTTPStmt *sql.Stmt createFlowNodeJsStmt *sql.Stmt createFlowNodeMemoryStmt *sql.Stmt @@ -2602,6 +2964,14 @@ type Queries struct { createFlowVariableStmt *sql.Stmt createFlowVariableBulkStmt *sql.Stmt createFlowsBulkStmt *sql.Stmt + createGraphQLStmt *sql.Stmt + createGraphQLAssertStmt *sql.Stmt + createGraphQLHeaderStmt *sql.Stmt + createGraphQLResponseStmt *sql.Stmt + createGraphQLResponseAssertStmt *sql.Stmt + createGraphQLResponseHeaderStmt *sql.Stmt + createGraphQLResponseHeaderBulkStmt *sql.Stmt + createGraphQLVersionStmt *sql.Stmt createHTTPStmt *sql.Stmt createHTTPAssertStmt *sql.Stmt createHTTPAssertBulkStmt *sql.Stmt @@ -2640,11 +3010,17 @@ type Queries struct { deleteFlowNodeConditionStmt *sql.Stmt deleteFlowNodeForStmt *sql.Stmt deleteFlowNodeForEachStmt *sql.Stmt + deleteFlowNodeGraphQLStmt *sql.Stmt deleteFlowNodeHTTPStmt *sql.Stmt deleteFlowNodeJsStmt *sql.Stmt deleteFlowNodeMemoryStmt *sql.Stmt deleteFlowTagStmt *sql.Stmt deleteFlowVariableStmt *sql.Stmt + deleteGraphQLStmt *sql.Stmt + deleteGraphQLAssertStmt *sql.Stmt + deleteGraphQLHeaderStmt *sql.Stmt + deleteGraphQLResponseStmt *sql.Stmt + deleteGraphQLResponseHeaderStmt *sql.Stmt deleteHTTPStmt *sql.Stmt deleteHTTPAssertStmt *sql.Stmt deleteHTTPBodyFormStmt *sql.Stmt @@ -2699,6 +3075,7 @@ type Queries struct { getFlowNodeConditionStmt *sql.Stmt getFlowNodeForStmt *sql.Stmt getFlowNodeForEachStmt *sql.Stmt + getFlowNodeGraphQLStmt *sql.Stmt getFlowNodeHTTPStmt *sql.Stmt getFlowNodeJsStmt *sql.Stmt getFlowNodeMemoryStmt *sql.Stmt @@ -2713,6 +3090,28 @@ type Queries struct { getFlowVariablesByFlowIDsStmt *sql.Stmt getFlowsByVersionParentIDStmt *sql.Stmt getFlowsByWorkspaceIDStmt *sql.Stmt + getGraphQLStmt *sql.Stmt + getGraphQLAssertStmt *sql.Stmt + getGraphQLAssertDeltasByParentIDStmt *sql.Stmt + getGraphQLAssertDeltasByWorkspaceIDStmt *sql.Stmt + getGraphQLAssertsByGraphQLIDStmt *sql.Stmt + getGraphQLAssertsByIDsStmt *sql.Stmt + getGraphQLDeltasByParentIDStmt *sql.Stmt + getGraphQLDeltasByWorkspaceIDStmt *sql.Stmt + getGraphQLHeaderDeltasByParentIDStmt *sql.Stmt + getGraphQLHeaderDeltasByWorkspaceIDStmt *sql.Stmt + getGraphQLHeadersStmt *sql.Stmt + getGraphQLHeadersByIDsStmt *sql.Stmt + getGraphQLResponseStmt *sql.Stmt + getGraphQLResponseAssertsByResponseIDStmt *sql.Stmt + getGraphQLResponseAssertsByWorkspaceIDStmt *sql.Stmt + getGraphQLResponseHeadersByResponseIDStmt *sql.Stmt + getGraphQLResponseHeadersByWorkspaceIDStmt *sql.Stmt + getGraphQLResponsesByGraphQLIDStmt *sql.Stmt + getGraphQLResponsesByWorkspaceIDStmt *sql.Stmt + getGraphQLVersionsByGraphQLIDStmt *sql.Stmt + getGraphQLWorkspaceIDStmt *sql.Stmt + getGraphQLsByWorkspaceIDStmt *sql.Stmt getHTTPStmt *sql.Stmt getHTTPAssertStmt *sql.Stmt getHTTPAssertsByHttpIDStmt *sql.Stmt @@ -2812,6 +3211,7 @@ type Queries struct { updateFlowNodeConditionStmt *sql.Stmt updateFlowNodeForStmt *sql.Stmt updateFlowNodeForEachStmt *sql.Stmt + updateFlowNodeGraphQLStmt *sql.Stmt updateFlowNodeHTTPStmt *sql.Stmt updateFlowNodeIDMappingStmt *sql.Stmt updateFlowNodeJsStmt *sql.Stmt @@ -2819,6 +3219,11 @@ type Queries struct { updateFlowNodeStateStmt *sql.Stmt updateFlowVariableStmt *sql.Stmt updateFlowVariableOrderStmt *sql.Stmt + updateGraphQLStmt *sql.Stmt + updateGraphQLAssertStmt *sql.Stmt + updateGraphQLAssertDeltaStmt *sql.Stmt + updateGraphQLDeltaStmt *sql.Stmt + updateGraphQLHeaderStmt *sql.Stmt updateHTTPStmt *sql.Stmt updateHTTPAssertStmt *sql.Stmt updateHTTPAssertDeltaStmt *sql.Stmt @@ -2891,6 +3296,7 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { cleanupOrphanedFlowNodeConditionStmt: q.cleanupOrphanedFlowNodeConditionStmt, cleanupOrphanedFlowNodeForStmt: q.cleanupOrphanedFlowNodeForStmt, cleanupOrphanedFlowNodeForEachStmt: q.cleanupOrphanedFlowNodeForEachStmt, + cleanupOrphanedFlowNodeGraphQLStmt: q.cleanupOrphanedFlowNodeGraphQLStmt, cleanupOrphanedFlowNodeHttpStmt: q.cleanupOrphanedFlowNodeHttpStmt, cleanupOrphanedFlowNodeJsStmt: q.cleanupOrphanedFlowNodeJsStmt, cleanupOrphanedNodeExecutionsStmt: q.cleanupOrphanedNodeExecutionsStmt, @@ -2908,6 +3314,7 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { createFlowNodeConditionStmt: q.createFlowNodeConditionStmt, createFlowNodeForStmt: q.createFlowNodeForStmt, createFlowNodeForEachStmt: q.createFlowNodeForEachStmt, + createFlowNodeGraphQLStmt: q.createFlowNodeGraphQLStmt, createFlowNodeHTTPStmt: q.createFlowNodeHTTPStmt, createFlowNodeJsStmt: q.createFlowNodeJsStmt, createFlowNodeMemoryStmt: q.createFlowNodeMemoryStmt, @@ -2917,6 +3324,14 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { createFlowVariableStmt: q.createFlowVariableStmt, createFlowVariableBulkStmt: q.createFlowVariableBulkStmt, createFlowsBulkStmt: q.createFlowsBulkStmt, + createGraphQLStmt: q.createGraphQLStmt, + createGraphQLAssertStmt: q.createGraphQLAssertStmt, + createGraphQLHeaderStmt: q.createGraphQLHeaderStmt, + createGraphQLResponseStmt: q.createGraphQLResponseStmt, + createGraphQLResponseAssertStmt: q.createGraphQLResponseAssertStmt, + createGraphQLResponseHeaderStmt: q.createGraphQLResponseHeaderStmt, + createGraphQLResponseHeaderBulkStmt: q.createGraphQLResponseHeaderBulkStmt, + createGraphQLVersionStmt: q.createGraphQLVersionStmt, createHTTPStmt: q.createHTTPStmt, createHTTPAssertStmt: q.createHTTPAssertStmt, createHTTPAssertBulkStmt: q.createHTTPAssertBulkStmt, @@ -2955,11 +3370,17 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { deleteFlowNodeConditionStmt: q.deleteFlowNodeConditionStmt, deleteFlowNodeForStmt: q.deleteFlowNodeForStmt, deleteFlowNodeForEachStmt: q.deleteFlowNodeForEachStmt, + deleteFlowNodeGraphQLStmt: q.deleteFlowNodeGraphQLStmt, deleteFlowNodeHTTPStmt: q.deleteFlowNodeHTTPStmt, deleteFlowNodeJsStmt: q.deleteFlowNodeJsStmt, deleteFlowNodeMemoryStmt: q.deleteFlowNodeMemoryStmt, deleteFlowTagStmt: q.deleteFlowTagStmt, deleteFlowVariableStmt: q.deleteFlowVariableStmt, + deleteGraphQLStmt: q.deleteGraphQLStmt, + deleteGraphQLAssertStmt: q.deleteGraphQLAssertStmt, + deleteGraphQLHeaderStmt: q.deleteGraphQLHeaderStmt, + deleteGraphQLResponseStmt: q.deleteGraphQLResponseStmt, + deleteGraphQLResponseHeaderStmt: q.deleteGraphQLResponseHeaderStmt, deleteHTTPStmt: q.deleteHTTPStmt, deleteHTTPAssertStmt: q.deleteHTTPAssertStmt, deleteHTTPBodyFormStmt: q.deleteHTTPBodyFormStmt, @@ -3014,6 +3435,7 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { getFlowNodeConditionStmt: q.getFlowNodeConditionStmt, getFlowNodeForStmt: q.getFlowNodeForStmt, getFlowNodeForEachStmt: q.getFlowNodeForEachStmt, + getFlowNodeGraphQLStmt: q.getFlowNodeGraphQLStmt, getFlowNodeHTTPStmt: q.getFlowNodeHTTPStmt, getFlowNodeJsStmt: q.getFlowNodeJsStmt, getFlowNodeMemoryStmt: q.getFlowNodeMemoryStmt, @@ -3028,6 +3450,28 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { getFlowVariablesByFlowIDsStmt: q.getFlowVariablesByFlowIDsStmt, getFlowsByVersionParentIDStmt: q.getFlowsByVersionParentIDStmt, getFlowsByWorkspaceIDStmt: q.getFlowsByWorkspaceIDStmt, + getGraphQLStmt: q.getGraphQLStmt, + getGraphQLAssertStmt: q.getGraphQLAssertStmt, + getGraphQLAssertDeltasByParentIDStmt: q.getGraphQLAssertDeltasByParentIDStmt, + getGraphQLAssertDeltasByWorkspaceIDStmt: q.getGraphQLAssertDeltasByWorkspaceIDStmt, + getGraphQLAssertsByGraphQLIDStmt: q.getGraphQLAssertsByGraphQLIDStmt, + getGraphQLAssertsByIDsStmt: q.getGraphQLAssertsByIDsStmt, + getGraphQLDeltasByParentIDStmt: q.getGraphQLDeltasByParentIDStmt, + getGraphQLDeltasByWorkspaceIDStmt: q.getGraphQLDeltasByWorkspaceIDStmt, + getGraphQLHeaderDeltasByParentIDStmt: q.getGraphQLHeaderDeltasByParentIDStmt, + getGraphQLHeaderDeltasByWorkspaceIDStmt: q.getGraphQLHeaderDeltasByWorkspaceIDStmt, + getGraphQLHeadersStmt: q.getGraphQLHeadersStmt, + getGraphQLHeadersByIDsStmt: q.getGraphQLHeadersByIDsStmt, + getGraphQLResponseStmt: q.getGraphQLResponseStmt, + getGraphQLResponseAssertsByResponseIDStmt: q.getGraphQLResponseAssertsByResponseIDStmt, + getGraphQLResponseAssertsByWorkspaceIDStmt: q.getGraphQLResponseAssertsByWorkspaceIDStmt, + getGraphQLResponseHeadersByResponseIDStmt: q.getGraphQLResponseHeadersByResponseIDStmt, + getGraphQLResponseHeadersByWorkspaceIDStmt: q.getGraphQLResponseHeadersByWorkspaceIDStmt, + getGraphQLResponsesByGraphQLIDStmt: q.getGraphQLResponsesByGraphQLIDStmt, + getGraphQLResponsesByWorkspaceIDStmt: q.getGraphQLResponsesByWorkspaceIDStmt, + getGraphQLVersionsByGraphQLIDStmt: q.getGraphQLVersionsByGraphQLIDStmt, + getGraphQLWorkspaceIDStmt: q.getGraphQLWorkspaceIDStmt, + getGraphQLsByWorkspaceIDStmt: q.getGraphQLsByWorkspaceIDStmt, getHTTPStmt: q.getHTTPStmt, getHTTPAssertStmt: q.getHTTPAssertStmt, getHTTPAssertsByHttpIDStmt: q.getHTTPAssertsByHttpIDStmt, @@ -3127,6 +3571,7 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { updateFlowNodeConditionStmt: q.updateFlowNodeConditionStmt, updateFlowNodeForStmt: q.updateFlowNodeForStmt, updateFlowNodeForEachStmt: q.updateFlowNodeForEachStmt, + updateFlowNodeGraphQLStmt: q.updateFlowNodeGraphQLStmt, updateFlowNodeHTTPStmt: q.updateFlowNodeHTTPStmt, updateFlowNodeIDMappingStmt: q.updateFlowNodeIDMappingStmt, updateFlowNodeJsStmt: q.updateFlowNodeJsStmt, @@ -3134,6 +3579,11 @@ func (q *Queries) WithTx(tx *sql.Tx) *Queries { updateFlowNodeStateStmt: q.updateFlowNodeStateStmt, updateFlowVariableStmt: q.updateFlowVariableStmt, updateFlowVariableOrderStmt: q.updateFlowVariableOrderStmt, + updateGraphQLStmt: q.updateGraphQLStmt, + updateGraphQLAssertStmt: q.updateGraphQLAssertStmt, + updateGraphQLAssertDeltaStmt: q.updateGraphQLAssertDeltaStmt, + updateGraphQLDeltaStmt: q.updateGraphQLDeltaStmt, + updateGraphQLHeaderStmt: q.updateGraphQLHeaderStmt, updateHTTPStmt: q.updateHTTPStmt, updateHTTPAssertStmt: q.updateHTTPAssertStmt, updateHTTPAssertDeltaStmt: q.updateHTTPAssertDeltaStmt, diff --git a/packages/db/pkg/sqlc/gen/flow.sql.go b/packages/db/pkg/sqlc/gen/flow.sql.go index 2c673455c..8398c387c 100644 --- a/packages/db/pkg/sqlc/gen/flow.sql.go +++ b/packages/db/pkg/sqlc/gen/flow.sql.go @@ -52,6 +52,15 @@ func (q *Queries) CleanupOrphanedFlowNodeForEach(ctx context.Context) error { return err } +const cleanupOrphanedFlowNodeGraphQL = `-- name: CleanupOrphanedFlowNodeGraphQL :exec +DELETE FROM flow_node_graphql WHERE flow_node_id NOT IN (SELECT id FROM flow_node) +` + +func (q *Queries) CleanupOrphanedFlowNodeGraphQL(ctx context.Context) error { + _, err := q.exec(ctx, q.cleanupOrphanedFlowNodeGraphQLStmt, cleanupOrphanedFlowNodeGraphQL) + return err +} + const cleanupOrphanedFlowNodeHttp = `-- name: CleanupOrphanedFlowNodeHttp :exec DELETE FROM flow_node_http WHERE flow_node_id NOT IN (SELECT id FROM flow_node) ` @@ -230,6 +239,21 @@ func (q *Queries) CreateFlowNodeForEach(ctx context.Context, arg CreateFlowNodeF return err } +const createFlowNodeGraphQL = `-- name: CreateFlowNodeGraphQL :exec +INSERT INTO flow_node_graphql (flow_node_id, graphql_id, delta_graphql_id) VALUES (?, ?, ?) +` + +type CreateFlowNodeGraphQLParams struct { + FlowNodeID idwrap.IDWrap + GraphqlID idwrap.IDWrap + DeltaGraphqlID []byte +} + +func (q *Queries) CreateFlowNodeGraphQL(ctx context.Context, arg CreateFlowNodeGraphQLParams) error { + _, err := q.exec(ctx, q.createFlowNodeGraphQLStmt, createFlowNodeGraphQL, arg.FlowNodeID, arg.GraphqlID, arg.DeltaGraphqlID) + return err +} + const createFlowNodeHTTP = `-- name: CreateFlowNodeHTTP :exec INSERT INTO flow_node_http ( @@ -870,10 +894,10 @@ func (q *Queries) CreateMigration(ctx context.Context, arg CreateMigrationParams const createNodeExecution = `-- name: CreateNodeExecution :one INSERT INTO node_execution ( id, node_id, name, state, error, input_data, input_data_compress_type, - output_data, output_data_compress_type, http_response_id, completed_at + output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) -RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ` type CreateNodeExecutionParams struct { @@ -887,6 +911,7 @@ type CreateNodeExecutionParams struct { OutputData []byte OutputDataCompressType int8 HttpResponseID *idwrap.IDWrap + GraphqlResponseID *idwrap.IDWrap CompletedAt sql.NullInt64 } @@ -902,6 +927,7 @@ func (q *Queries) CreateNodeExecution(ctx context.Context, arg CreateNodeExecuti arg.OutputData, arg.OutputDataCompressType, arg.HttpResponseID, + arg.GraphqlResponseID, arg.CompletedAt, ) var i NodeExecution @@ -916,6 +942,7 @@ func (q *Queries) CreateNodeExecution(ctx context.Context, arg CreateNodeExecuti &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ) return i, err @@ -1012,6 +1039,15 @@ func (q *Queries) DeleteFlowNodeForEach(ctx context.Context, flowNodeID idwrap.I return err } +const deleteFlowNodeGraphQL = `-- name: DeleteFlowNodeGraphQL :exec +DELETE FROM flow_node_graphql WHERE flow_node_id = ? +` + +func (q *Queries) DeleteFlowNodeGraphQL(ctx context.Context, flowNodeID idwrap.IDWrap) error { + _, err := q.exec(ctx, q.deleteFlowNodeGraphQLStmt, deleteFlowNodeGraphQL, flowNodeID) + return err +} + const deleteFlowNodeHTTP = `-- name: DeleteFlowNodeHTTP :exec DELETE FROM flow_node_http WHERE @@ -1499,6 +1535,25 @@ func (q *Queries) GetFlowNodeForEach(ctx context.Context, flowNodeID idwrap.IDWr return i, err } +const getFlowNodeGraphQL = `-- name: GetFlowNodeGraphQL :one +SELECT + flow_node_id, + graphql_id, + delta_graphql_id +FROM + flow_node_graphql +WHERE + flow_node_id = ? +LIMIT 1 +` + +func (q *Queries) GetFlowNodeGraphQL(ctx context.Context, flowNodeID idwrap.IDWrap) (FlowNodeGraphql, error) { + row := q.queryRow(ctx, q.getFlowNodeGraphQLStmt, getFlowNodeGraphQL, flowNodeID) + var i FlowNodeGraphql + err := row.Scan(&i.FlowNodeID, &i.GraphqlID, &i.DeltaGraphqlID) + return i, err +} + const getFlowNodeHTTP = `-- name: GetFlowNodeHTTP :one SELECT flow_node_id, @@ -1981,7 +2036,7 @@ func (q *Queries) GetFlowsByWorkspaceID(ctx context.Context, workspaceID idwrap. } const getLatestNodeExecutionByNodeID = `-- name: GetLatestNodeExecutionByNodeID :one -SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at +SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at FROM node_execution WHERE node_id = ? AND completed_at IS NOT NULL ORDER BY completed_at DESC, id DESC @@ -2002,6 +2057,7 @@ func (q *Queries) GetLatestNodeExecutionByNodeID(ctx context.Context, nodeID idw &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ) return i, err @@ -2105,7 +2161,7 @@ func (q *Queries) GetMigrations(ctx context.Context) ([]Migration, error) { } const getNodeExecution = `-- name: GetNodeExecution :one -SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at FROM node_execution +SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at FROM node_execution WHERE id = ? ` @@ -2124,13 +2180,14 @@ func (q *Queries) GetNodeExecution(ctx context.Context, id idwrap.IDWrap) (NodeE &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ) return i, err } const getNodeExecutionsByNodeID = `-- name: GetNodeExecutionsByNodeID :many -SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at +SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at FROM node_execution WHERE node_id = ? AND completed_at IS NOT NULL ORDER BY completed_at DESC, id DESC @@ -2156,6 +2213,7 @@ func (q *Queries) GetNodeExecutionsByNodeID(ctx context.Context, nodeID idwrap.I &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ); err != nil { return nil, err @@ -2237,7 +2295,7 @@ func (q *Queries) GetTagsByWorkspaceID(ctx context.Context, workspaceID idwrap.I } const listNodeExecutions = `-- name: ListNodeExecutions :many -SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at FROM node_execution +SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at FROM node_execution WHERE node_id = ? ORDER BY completed_at DESC, id DESC LIMIT ? OFFSET ? @@ -2269,6 +2327,7 @@ func (q *Queries) ListNodeExecutions(ctx context.Context, arg ListNodeExecutions &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ); err != nil { return nil, err @@ -2285,7 +2344,7 @@ func (q *Queries) ListNodeExecutions(ctx context.Context, arg ListNodeExecutions } const listNodeExecutionsByFlowRun = `-- name: ListNodeExecutionsByFlowRun :many -SELECT ne.id, ne.node_id, ne.name, ne.state, ne.error, ne.input_data, ne.input_data_compress_type, ne.output_data, ne.output_data_compress_type, ne.http_response_id, ne.completed_at FROM node_execution ne +SELECT ne.id, ne.node_id, ne.name, ne.state, ne.error, ne.input_data, ne.input_data_compress_type, ne.output_data, ne.output_data_compress_type, ne.http_response_id, ne.graphql_response_id, ne.completed_at FROM node_execution ne JOIN flow_node fn ON ne.node_id = fn.id WHERE fn.flow_id = ? ORDER BY ne.completed_at DESC, ne.id DESC @@ -2311,6 +2370,7 @@ func (q *Queries) ListNodeExecutionsByFlowRun(ctx context.Context, flowID idwrap &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ); err != nil { return nil, err @@ -2327,7 +2387,7 @@ func (q *Queries) ListNodeExecutionsByFlowRun(ctx context.Context, flowID idwrap } const listNodeExecutionsByState = `-- name: ListNodeExecutionsByState :many -SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at FROM node_execution +SELECT id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at FROM node_execution WHERE node_id = ? AND state = ? ORDER BY completed_at DESC, id DESC LIMIT ? OFFSET ? @@ -2365,6 +2425,7 @@ func (q *Queries) ListNodeExecutionsByState(ctx context.Context, arg ListNodeExe &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ); err != nil { return nil, err @@ -2554,6 +2615,24 @@ func (q *Queries) UpdateFlowNodeForEach(ctx context.Context, arg UpdateFlowNodeF return err } +const updateFlowNodeGraphQL = `-- name: UpdateFlowNodeGraphQL :exec +INSERT INTO flow_node_graphql (flow_node_id, graphql_id, delta_graphql_id) VALUES (?, ?, ?) +ON CONFLICT(flow_node_id) DO UPDATE SET + graphql_id = excluded.graphql_id, + delta_graphql_id = excluded.delta_graphql_id +` + +type UpdateFlowNodeGraphQLParams struct { + FlowNodeID idwrap.IDWrap + GraphqlID idwrap.IDWrap + DeltaGraphqlID []byte +} + +func (q *Queries) UpdateFlowNodeGraphQL(ctx context.Context, arg UpdateFlowNodeGraphQLParams) error { + _, err := q.exec(ctx, q.updateFlowNodeGraphQLStmt, updateFlowNodeGraphQL, arg.FlowNodeID, arg.GraphqlID, arg.DeltaGraphqlID) + return err +} + const updateFlowNodeHTTP = `-- name: UpdateFlowNodeHTTP :exec INSERT INTO flow_node_http ( flow_node_id, @@ -2682,10 +2761,10 @@ func (q *Queries) UpdateFlowVariableOrder(ctx context.Context, arg UpdateFlowVar const updateNodeExecution = `-- name: UpdateNodeExecution :one UPDATE node_execution -SET state = ?, error = ?, output_data = ?, - output_data_compress_type = ?, http_response_id = ?, completed_at = ? +SET state = ?, error = ?, output_data = ?, + output_data_compress_type = ?, http_response_id = ?, graphql_response_id = ?, completed_at = ? WHERE id = ? -RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at +RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ` type UpdateNodeExecutionParams struct { @@ -2694,6 +2773,7 @@ type UpdateNodeExecutionParams struct { OutputData []byte OutputDataCompressType int8 HttpResponseID *idwrap.IDWrap + GraphqlResponseID *idwrap.IDWrap CompletedAt sql.NullInt64 ID idwrap.IDWrap } @@ -2705,6 +2785,7 @@ func (q *Queries) UpdateNodeExecution(ctx context.Context, arg UpdateNodeExecuti arg.OutputData, arg.OutputDataCompressType, arg.HttpResponseID, + arg.GraphqlResponseID, arg.CompletedAt, arg.ID, ) @@ -2720,6 +2801,7 @@ func (q *Queries) UpdateNodeExecution(ctx context.Context, arg UpdateNodeExecuti &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ) return i, err @@ -2764,19 +2846,20 @@ func (q *Queries) UpdateTag(ctx context.Context, arg UpdateTagParams) error { const upsertNodeExecution = `-- name: UpsertNodeExecution :one INSERT INTO node_execution ( id, node_id, name, state, error, input_data, input_data_compress_type, - output_data, output_data_compress_type, http_response_id, completed_at + output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(id) DO UPDATE SET state = excluded.state, - error = excluded.error, + error = excluded.error, input_data = excluded.input_data, input_data_compress_type = excluded.input_data_compress_type, output_data = excluded.output_data, output_data_compress_type = excluded.output_data_compress_type, http_response_id = excluded.http_response_id, + graphql_response_id = excluded.graphql_response_id, completed_at = excluded.completed_at -RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, completed_at +RETURNING id, node_id, name, state, error, input_data, input_data_compress_type, output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ` type UpsertNodeExecutionParams struct { @@ -2790,6 +2873,7 @@ type UpsertNodeExecutionParams struct { OutputData []byte OutputDataCompressType int8 HttpResponseID *idwrap.IDWrap + GraphqlResponseID *idwrap.IDWrap CompletedAt sql.NullInt64 } @@ -2805,6 +2889,7 @@ func (q *Queries) UpsertNodeExecution(ctx context.Context, arg UpsertNodeExecuti arg.OutputData, arg.OutputDataCompressType, arg.HttpResponseID, + arg.GraphqlResponseID, arg.CompletedAt, ) var i NodeExecution @@ -2819,6 +2904,7 @@ func (q *Queries) UpsertNodeExecution(ctx context.Context, arg UpsertNodeExecuti &i.OutputData, &i.OutputDataCompressType, &i.HttpResponseID, + &i.GraphqlResponseID, &i.CompletedAt, ) return i, err diff --git a/packages/db/pkg/sqlc/gen/graphql.sql.go b/packages/db/pkg/sqlc/gen/graphql.sql.go new file mode 100644 index 000000000..dc1660fcd --- /dev/null +++ b/packages/db/pkg/sqlc/gen/graphql.sql.go @@ -0,0 +1,1749 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: graphql.sql + +package gen + +import ( + "context" + "strings" + "time" + + idwrap "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" +) + +const createGraphQL = `-- name: CreateGraphQL :exec +INSERT INTO graphql ( + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +` + +type CreateGraphQLParams struct { + ID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + FolderID *idwrap.IDWrap + Name string + Url string + Query string + Variables string + Description string + LastRunAt interface{} + CreatedAt int64 + UpdatedAt int64 + ParentGraphqlID []byte + IsDelta bool + IsSnapshot bool + DeltaName interface{} + DeltaUrl interface{} + DeltaQuery interface{} + DeltaVariables interface{} + DeltaDescription interface{} +} + +func (q *Queries) CreateGraphQL(ctx context.Context, arg CreateGraphQLParams) error { + _, err := q.exec(ctx, q.createGraphQLStmt, createGraphQL, + arg.ID, + arg.WorkspaceID, + arg.FolderID, + arg.Name, + arg.Url, + arg.Query, + arg.Variables, + arg.Description, + arg.LastRunAt, + arg.CreatedAt, + arg.UpdatedAt, + arg.ParentGraphqlID, + arg.IsDelta, + arg.IsSnapshot, + arg.DeltaName, + arg.DeltaUrl, + arg.DeltaQuery, + arg.DeltaVariables, + arg.DeltaDescription, + ) + return err +} + +const createGraphQLAssert = `-- name: CreateGraphQLAssert :exec +INSERT INTO graphql_assert ( + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +` + +type CreateGraphQLAssertParams struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +func (q *Queries) CreateGraphQLAssert(ctx context.Context, arg CreateGraphQLAssertParams) error { + _, err := q.exec(ctx, q.createGraphQLAssertStmt, createGraphQLAssert, + arg.ID, + arg.GraphqlID, + arg.Value, + arg.Enabled, + arg.Description, + arg.DisplayOrder, + arg.ParentGraphqlAssertID, + arg.IsDelta, + arg.DeltaValue, + arg.DeltaEnabled, + arg.DeltaDescription, + arg.DeltaDisplayOrder, + arg.CreatedAt, + arg.UpdatedAt, + ) + return err +} + +const createGraphQLHeader = `-- name: CreateGraphQLHeader :exec +INSERT INTO graphql_header ( + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +` + +type CreateGraphQLHeaderParams struct { + ID idwrap.IDWrap + GraphqlID idwrap.IDWrap + HeaderKey string + HeaderValue string + Description string + Enabled bool + DisplayOrder float64 + CreatedAt int64 + UpdatedAt int64 + ParentGraphqlHeaderID []byte + IsDelta bool + DeltaHeaderKey interface{} + DeltaHeaderValue interface{} + DeltaDescription interface{} + DeltaEnabled interface{} + DeltaDisplayOrder interface{} +} + +func (q *Queries) CreateGraphQLHeader(ctx context.Context, arg CreateGraphQLHeaderParams) error { + _, err := q.exec(ctx, q.createGraphQLHeaderStmt, createGraphQLHeader, + arg.ID, + arg.GraphqlID, + arg.HeaderKey, + arg.HeaderValue, + arg.Description, + arg.Enabled, + arg.DisplayOrder, + arg.CreatedAt, + arg.UpdatedAt, + arg.ParentGraphqlHeaderID, + arg.IsDelta, + arg.DeltaHeaderKey, + arg.DeltaHeaderValue, + arg.DeltaDescription, + arg.DeltaEnabled, + arg.DeltaDisplayOrder, + ) + return err +} + +const createGraphQLResponse = `-- name: CreateGraphQLResponse :exec +INSERT INTO graphql_response ( + id, graphql_id, status, body, time, duration, size, created_at +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?) +` + +type CreateGraphQLResponseParams struct { + ID idwrap.IDWrap + GraphqlID idwrap.IDWrap + Status interface{} + Body []byte + Time time.Time + Duration interface{} + Size interface{} + CreatedAt int64 +} + +func (q *Queries) CreateGraphQLResponse(ctx context.Context, arg CreateGraphQLResponseParams) error { + _, err := q.exec(ctx, q.createGraphQLResponseStmt, createGraphQLResponse, + arg.ID, + arg.GraphqlID, + arg.Status, + arg.Body, + arg.Time, + arg.Duration, + arg.Size, + arg.CreatedAt, + ) + return err +} + +const createGraphQLResponseAssert = `-- name: CreateGraphQLResponseAssert :exec + +INSERT INTO graphql_response_assert ( + id, response_id, value, success, created_at +) +VALUES (?, ?, ?, ?, ?) +` + +type CreateGraphQLResponseAssertParams struct { + ID []byte + ResponseID []byte + Value string + Success bool + CreatedAt int64 +} + +// GraphQL Response Assert Queries +func (q *Queries) CreateGraphQLResponseAssert(ctx context.Context, arg CreateGraphQLResponseAssertParams) error { + _, err := q.exec(ctx, q.createGraphQLResponseAssertStmt, createGraphQLResponseAssert, + arg.ID, + arg.ResponseID, + arg.Value, + arg.Success, + arg.CreatedAt, + ) + return err +} + +const createGraphQLResponseHeader = `-- name: CreateGraphQLResponseHeader :exec +INSERT INTO graphql_response_header ( + id, response_id, key, value, created_at +) +VALUES (?, ?, ?, ?, ?) +` + +type CreateGraphQLResponseHeaderParams struct { + ID idwrap.IDWrap + ResponseID idwrap.IDWrap + Key string + Value string + CreatedAt int64 +} + +func (q *Queries) CreateGraphQLResponseHeader(ctx context.Context, arg CreateGraphQLResponseHeaderParams) error { + _, err := q.exec(ctx, q.createGraphQLResponseHeaderStmt, createGraphQLResponseHeader, + arg.ID, + arg.ResponseID, + arg.Key, + arg.Value, + arg.CreatedAt, + ) + return err +} + +const createGraphQLResponseHeaderBulk = `-- name: CreateGraphQLResponseHeaderBulk :exec +INSERT INTO graphql_response_header ( + id, response_id, key, value, created_at +) +VALUES + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?) +` + +type CreateGraphQLResponseHeaderBulkParams struct { + ID idwrap.IDWrap + ResponseID idwrap.IDWrap + Key string + Value string + CreatedAt int64 + ID_2 idwrap.IDWrap + ResponseID_2 idwrap.IDWrap + Key_2 string + Value_2 string + CreatedAt_2 int64 + ID_3 idwrap.IDWrap + ResponseID_3 idwrap.IDWrap + Key_3 string + Value_3 string + CreatedAt_3 int64 + ID_4 idwrap.IDWrap + ResponseID_4 idwrap.IDWrap + Key_4 string + Value_4 string + CreatedAt_4 int64 + ID_5 idwrap.IDWrap + ResponseID_5 idwrap.IDWrap + Key_5 string + Value_5 string + CreatedAt_5 int64 + ID_6 idwrap.IDWrap + ResponseID_6 idwrap.IDWrap + Key_6 string + Value_6 string + CreatedAt_6 int64 + ID_7 idwrap.IDWrap + ResponseID_7 idwrap.IDWrap + Key_7 string + Value_7 string + CreatedAt_7 int64 + ID_8 idwrap.IDWrap + ResponseID_8 idwrap.IDWrap + Key_8 string + Value_8 string + CreatedAt_8 int64 + ID_9 idwrap.IDWrap + ResponseID_9 idwrap.IDWrap + Key_9 string + Value_9 string + CreatedAt_9 int64 + ID_10 idwrap.IDWrap + ResponseID_10 idwrap.IDWrap + Key_10 string + Value_10 string + CreatedAt_10 int64 +} + +func (q *Queries) CreateGraphQLResponseHeaderBulk(ctx context.Context, arg CreateGraphQLResponseHeaderBulkParams) error { + _, err := q.exec(ctx, q.createGraphQLResponseHeaderBulkStmt, createGraphQLResponseHeaderBulk, + arg.ID, + arg.ResponseID, + arg.Key, + arg.Value, + arg.CreatedAt, + arg.ID_2, + arg.ResponseID_2, + arg.Key_2, + arg.Value_2, + arg.CreatedAt_2, + arg.ID_3, + arg.ResponseID_3, + arg.Key_3, + arg.Value_3, + arg.CreatedAt_3, + arg.ID_4, + arg.ResponseID_4, + arg.Key_4, + arg.Value_4, + arg.CreatedAt_4, + arg.ID_5, + arg.ResponseID_5, + arg.Key_5, + arg.Value_5, + arg.CreatedAt_5, + arg.ID_6, + arg.ResponseID_6, + arg.Key_6, + arg.Value_6, + arg.CreatedAt_6, + arg.ID_7, + arg.ResponseID_7, + arg.Key_7, + arg.Value_7, + arg.CreatedAt_7, + arg.ID_8, + arg.ResponseID_8, + arg.Key_8, + arg.Value_8, + arg.CreatedAt_8, + arg.ID_9, + arg.ResponseID_9, + arg.Key_9, + arg.Value_9, + arg.CreatedAt_9, + arg.ID_10, + arg.ResponseID_10, + arg.Key_10, + arg.Value_10, + arg.CreatedAt_10, + ) + return err +} + +const createGraphQLVersion = `-- name: CreateGraphQLVersion :exec + +INSERT INTO graphql_version ( + id, graphql_id, version_name, version_description, is_active, created_at, created_by +) +VALUES (?, ?, ?, ?, ?, ?, ?) +` + +type CreateGraphQLVersionParams struct { + ID []byte + GraphqlID []byte + VersionName string + VersionDescription string + IsActive bool + CreatedAt int64 + CreatedBy []byte +} + +// GraphQL Version Queries +func (q *Queries) CreateGraphQLVersion(ctx context.Context, arg CreateGraphQLVersionParams) error { + _, err := q.exec(ctx, q.createGraphQLVersionStmt, createGraphQLVersion, + arg.ID, + arg.GraphqlID, + arg.VersionName, + arg.VersionDescription, + arg.IsActive, + arg.CreatedAt, + arg.CreatedBy, + ) + return err +} + +const deleteGraphQL = `-- name: DeleteGraphQL :exec +DELETE FROM graphql +WHERE id = ? +` + +func (q *Queries) DeleteGraphQL(ctx context.Context, id idwrap.IDWrap) error { + _, err := q.exec(ctx, q.deleteGraphQLStmt, deleteGraphQL, id) + return err +} + +const deleteGraphQLAssert = `-- name: DeleteGraphQLAssert :exec +DELETE FROM graphql_assert +WHERE id = ? +` + +func (q *Queries) DeleteGraphQLAssert(ctx context.Context, id []byte) error { + _, err := q.exec(ctx, q.deleteGraphQLAssertStmt, deleteGraphQLAssert, id) + return err +} + +const deleteGraphQLHeader = `-- name: DeleteGraphQLHeader :exec +DELETE FROM graphql_header +WHERE id = ? +` + +func (q *Queries) DeleteGraphQLHeader(ctx context.Context, id idwrap.IDWrap) error { + _, err := q.exec(ctx, q.deleteGraphQLHeaderStmt, deleteGraphQLHeader, id) + return err +} + +const deleteGraphQLResponse = `-- name: DeleteGraphQLResponse :exec +DELETE FROM graphql_response WHERE id = ? +` + +func (q *Queries) DeleteGraphQLResponse(ctx context.Context, id idwrap.IDWrap) error { + _, err := q.exec(ctx, q.deleteGraphQLResponseStmt, deleteGraphQLResponse, id) + return err +} + +const deleteGraphQLResponseHeader = `-- name: DeleteGraphQLResponseHeader :exec +DELETE FROM graphql_response_header WHERE id = ? +` + +func (q *Queries) DeleteGraphQLResponseHeader(ctx context.Context, id idwrap.IDWrap) error { + _, err := q.exec(ctx, q.deleteGraphQLResponseHeaderStmt, deleteGraphQLResponseHeader, id) + return err +} + +const getGraphQL = `-- name: GetGraphQL :one + +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE id = ? LIMIT 1 +` + +// GraphQL Core Queries +func (q *Queries) GetGraphQL(ctx context.Context, id idwrap.IDWrap) (Graphql, error) { + row := q.queryRow(ctx, q.getGraphQLStmt, getGraphQL, id) + var i Graphql + err := row.Scan( + &i.ID, + &i.WorkspaceID, + &i.FolderID, + &i.Name, + &i.Url, + &i.Query, + &i.Variables, + &i.Description, + &i.LastRunAt, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlID, + &i.IsDelta, + &i.IsSnapshot, + &i.DeltaName, + &i.DeltaUrl, + &i.DeltaQuery, + &i.DeltaVariables, + &i.DeltaDescription, + ) + return i, err +} + +const getGraphQLAssert = `-- name: GetGraphQLAssert :one + +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE id = ? +LIMIT 1 +` + +type GetGraphQLAssertRow struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +// GraphQL Assert Queries +func (q *Queries) GetGraphQLAssert(ctx context.Context, id []byte) (GetGraphQLAssertRow, error) { + row := q.queryRow(ctx, q.getGraphQLAssertStmt, getGraphQLAssert, id) + var i GetGraphQLAssertRow + err := row.Scan( + &i.ID, + &i.GraphqlID, + &i.Value, + &i.Enabled, + &i.Description, + &i.DisplayOrder, + &i.ParentGraphqlAssertID, + &i.IsDelta, + &i.DeltaValue, + &i.DeltaEnabled, + &i.DeltaDescription, + &i.DeltaDisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const getGraphQLAssertDeltasByParentID = `-- name: GetGraphQLAssertDeltasByParentID :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE parent_graphql_assert_id = ? AND is_delta = TRUE +ORDER BY display_order +` + +type GetGraphQLAssertDeltasByParentIDRow struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +func (q *Queries) GetGraphQLAssertDeltasByParentID(ctx context.Context, parentGraphqlAssertID []byte) ([]GetGraphQLAssertDeltasByParentIDRow, error) { + rows, err := q.query(ctx, q.getGraphQLAssertDeltasByParentIDStmt, getGraphQLAssertDeltasByParentID, parentGraphqlAssertID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetGraphQLAssertDeltasByParentIDRow{} + for rows.Next() { + var i GetGraphQLAssertDeltasByParentIDRow + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Value, + &i.Enabled, + &i.Description, + &i.DisplayOrder, + &i.ParentGraphqlAssertID, + &i.IsDelta, + &i.DeltaValue, + &i.DeltaEnabled, + &i.DeltaDescription, + &i.DeltaDisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLAssertDeltasByWorkspaceID = `-- name: GetGraphQLAssertDeltasByWorkspaceID :many +SELECT + ga.id, + ga.graphql_id, + ga.value, + ga.enabled, + ga.description, + ga.display_order, + ga.parent_graphql_assert_id, + ga.is_delta, + ga.delta_value, + ga.delta_enabled, + ga.delta_description, + ga.delta_display_order, + ga.created_at, + ga.updated_at +FROM graphql_assert ga +INNER JOIN graphql g ON ga.graphql_id = g.id +WHERE g.workspace_id = ? AND ga.is_delta = TRUE +ORDER BY ga.display_order +` + +type GetGraphQLAssertDeltasByWorkspaceIDRow struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +func (q *Queries) GetGraphQLAssertDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]GetGraphQLAssertDeltasByWorkspaceIDRow, error) { + rows, err := q.query(ctx, q.getGraphQLAssertDeltasByWorkspaceIDStmt, getGraphQLAssertDeltasByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetGraphQLAssertDeltasByWorkspaceIDRow{} + for rows.Next() { + var i GetGraphQLAssertDeltasByWorkspaceIDRow + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Value, + &i.Enabled, + &i.Description, + &i.DisplayOrder, + &i.ParentGraphqlAssertID, + &i.IsDelta, + &i.DeltaValue, + &i.DeltaEnabled, + &i.DeltaDescription, + &i.DeltaDisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLAssertsByGraphQLID = `-- name: GetGraphQLAssertsByGraphQLID :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE graphql_id = ? +ORDER BY display_order +` + +type GetGraphQLAssertsByGraphQLIDRow struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +func (q *Queries) GetGraphQLAssertsByGraphQLID(ctx context.Context, graphqlID []byte) ([]GetGraphQLAssertsByGraphQLIDRow, error) { + rows, err := q.query(ctx, q.getGraphQLAssertsByGraphQLIDStmt, getGraphQLAssertsByGraphQLID, graphqlID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetGraphQLAssertsByGraphQLIDRow{} + for rows.Next() { + var i GetGraphQLAssertsByGraphQLIDRow + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Value, + &i.Enabled, + &i.Description, + &i.DisplayOrder, + &i.ParentGraphqlAssertID, + &i.IsDelta, + &i.DeltaValue, + &i.DeltaEnabled, + &i.DeltaDescription, + &i.DeltaDisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLAssertsByIDs = `-- name: GetGraphQLAssertsByIDs :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE id IN (/*SLICE:ids*/?) +` + +type GetGraphQLAssertsByIDsRow struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + CreatedAt int64 + UpdatedAt int64 +} + +func (q *Queries) GetGraphQLAssertsByIDs(ctx context.Context, ids [][]byte) ([]GetGraphQLAssertsByIDsRow, error) { + query := getGraphQLAssertsByIDs + var queryParams []interface{} + if len(ids) > 0 { + for _, v := range ids { + queryParams = append(queryParams, v) + } + query = strings.Replace(query, "/*SLICE:ids*/?", strings.Repeat(",?", len(ids))[1:], 1) + } else { + query = strings.Replace(query, "/*SLICE:ids*/?", "NULL", 1) + } + rows, err := q.query(ctx, nil, query, queryParams...) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GetGraphQLAssertsByIDsRow{} + for rows.Next() { + var i GetGraphQLAssertsByIDsRow + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Value, + &i.Enabled, + &i.Description, + &i.DisplayOrder, + &i.ParentGraphqlAssertID, + &i.IsDelta, + &i.DeltaValue, + &i.DeltaEnabled, + &i.DeltaDescription, + &i.DeltaDisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLDeltasByParentID = `-- name: GetGraphQLDeltasByParentID :many +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE parent_graphql_id = ? AND is_delta = TRUE +ORDER BY updated_at DESC +` + +func (q *Queries) GetGraphQLDeltasByParentID(ctx context.Context, parentGraphqlID []byte) ([]Graphql, error) { + rows, err := q.query(ctx, q.getGraphQLDeltasByParentIDStmt, getGraphQLDeltasByParentID, parentGraphqlID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Graphql{} + for rows.Next() { + var i Graphql + if err := rows.Scan( + &i.ID, + &i.WorkspaceID, + &i.FolderID, + &i.Name, + &i.Url, + &i.Query, + &i.Variables, + &i.Description, + &i.LastRunAt, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlID, + &i.IsDelta, + &i.IsSnapshot, + &i.DeltaName, + &i.DeltaUrl, + &i.DeltaQuery, + &i.DeltaVariables, + &i.DeltaDescription, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLDeltasByWorkspaceID = `-- name: GetGraphQLDeltasByWorkspaceID :many + +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE workspace_id = ? AND is_delta = TRUE +ORDER BY updated_at DESC +` + +// GraphQL Delta Queries +func (q *Queries) GetGraphQLDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]Graphql, error) { + rows, err := q.query(ctx, q.getGraphQLDeltasByWorkspaceIDStmt, getGraphQLDeltasByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Graphql{} + for rows.Next() { + var i Graphql + if err := rows.Scan( + &i.ID, + &i.WorkspaceID, + &i.FolderID, + &i.Name, + &i.Url, + &i.Query, + &i.Variables, + &i.Description, + &i.LastRunAt, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlID, + &i.IsDelta, + &i.IsSnapshot, + &i.DeltaName, + &i.DeltaUrl, + &i.DeltaQuery, + &i.DeltaVariables, + &i.DeltaDescription, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLHeaderDeltasByParentID = `-- name: GetGraphQLHeaderDeltasByParentID :many +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE parent_graphql_header_id = ? AND is_delta = TRUE +ORDER BY display_order +` + +func (q *Queries) GetGraphQLHeaderDeltasByParentID(ctx context.Context, parentGraphqlHeaderID []byte) ([]GraphqlHeader, error) { + rows, err := q.query(ctx, q.getGraphQLHeaderDeltasByParentIDStmt, getGraphQLHeaderDeltasByParentID, parentGraphqlHeaderID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlHeader{} + for rows.Next() { + var i GraphqlHeader + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.HeaderKey, + &i.HeaderValue, + &i.Description, + &i.Enabled, + &i.DisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlHeaderID, + &i.IsDelta, + &i.DeltaHeaderKey, + &i.DeltaHeaderValue, + &i.DeltaDescription, + &i.DeltaEnabled, + &i.DeltaDisplayOrder, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLHeaderDeltasByWorkspaceID = `-- name: GetGraphQLHeaderDeltasByWorkspaceID :many + +SELECT + h.id, h.graphql_id, h.header_key, h.header_value, h.description, + h.enabled, h.display_order, h.created_at, h.updated_at, + h.parent_graphql_header_id, h.is_delta, + h.delta_header_key, h.delta_header_value, h.delta_description, h.delta_enabled, h.delta_display_order +FROM graphql_header h +JOIN graphql g ON h.graphql_id = g.id +WHERE g.workspace_id = ? AND h.is_delta = TRUE +ORDER BY h.updated_at DESC +` + +// GraphQL Header Delta Queries +func (q *Queries) GetGraphQLHeaderDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]GraphqlHeader, error) { + rows, err := q.query(ctx, q.getGraphQLHeaderDeltasByWorkspaceIDStmt, getGraphQLHeaderDeltasByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlHeader{} + for rows.Next() { + var i GraphqlHeader + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.HeaderKey, + &i.HeaderValue, + &i.Description, + &i.Enabled, + &i.DisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlHeaderID, + &i.IsDelta, + &i.DeltaHeaderKey, + &i.DeltaHeaderValue, + &i.DeltaDescription, + &i.DeltaEnabled, + &i.DeltaDisplayOrder, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLHeaders = `-- name: GetGraphQLHeaders :many + +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE graphql_id = ? +ORDER BY display_order +` + +// GraphQL Header Queries +func (q *Queries) GetGraphQLHeaders(ctx context.Context, graphqlID idwrap.IDWrap) ([]GraphqlHeader, error) { + rows, err := q.query(ctx, q.getGraphQLHeadersStmt, getGraphQLHeaders, graphqlID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlHeader{} + for rows.Next() { + var i GraphqlHeader + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.HeaderKey, + &i.HeaderValue, + &i.Description, + &i.Enabled, + &i.DisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlHeaderID, + &i.IsDelta, + &i.DeltaHeaderKey, + &i.DeltaHeaderValue, + &i.DeltaDescription, + &i.DeltaEnabled, + &i.DeltaDisplayOrder, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLHeadersByIDs = `-- name: GetGraphQLHeadersByIDs :many +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE id IN (/*SLICE:ids*/?) +` + +func (q *Queries) GetGraphQLHeadersByIDs(ctx context.Context, ids []idwrap.IDWrap) ([]GraphqlHeader, error) { + query := getGraphQLHeadersByIDs + var queryParams []interface{} + if len(ids) > 0 { + for _, v := range ids { + queryParams = append(queryParams, v) + } + query = strings.Replace(query, "/*SLICE:ids*/?", strings.Repeat(",?", len(ids))[1:], 1) + } else { + query = strings.Replace(query, "/*SLICE:ids*/?", "NULL", 1) + } + rows, err := q.query(ctx, nil, query, queryParams...) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlHeader{} + for rows.Next() { + var i GraphqlHeader + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.HeaderKey, + &i.HeaderValue, + &i.Description, + &i.Enabled, + &i.DisplayOrder, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlHeaderID, + &i.IsDelta, + &i.DeltaHeaderKey, + &i.DeltaHeaderValue, + &i.DeltaDescription, + &i.DeltaEnabled, + &i.DeltaDisplayOrder, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponse = `-- name: GetGraphQLResponse :one + +SELECT + id, graphql_id, status, body, time, duration, size, created_at +FROM graphql_response +WHERE id = ? LIMIT 1 +` + +// GraphQL Response Queries +func (q *Queries) GetGraphQLResponse(ctx context.Context, id idwrap.IDWrap) (GraphqlResponse, error) { + row := q.queryRow(ctx, q.getGraphQLResponseStmt, getGraphQLResponse, id) + var i GraphqlResponse + err := row.Scan( + &i.ID, + &i.GraphqlID, + &i.Status, + &i.Body, + &i.Time, + &i.Duration, + &i.Size, + &i.CreatedAt, + ) + return i, err +} + +const getGraphQLResponseAssertsByResponseID = `-- name: GetGraphQLResponseAssertsByResponseID :many +SELECT id, response_id, value, success, created_at +FROM graphql_response_assert +WHERE response_id = ? +ORDER BY created_at +` + +func (q *Queries) GetGraphQLResponseAssertsByResponseID(ctx context.Context, responseID []byte) ([]GraphqlResponseAssert, error) { + rows, err := q.query(ctx, q.getGraphQLResponseAssertsByResponseIDStmt, getGraphQLResponseAssertsByResponseID, responseID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponseAssert{} + for rows.Next() { + var i GraphqlResponseAssert + if err := rows.Scan( + &i.ID, + &i.ResponseID, + &i.Value, + &i.Success, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponseAssertsByWorkspaceID = `-- name: GetGraphQLResponseAssertsByWorkspaceID :many +SELECT + gra.id, + gra.response_id, + gra.value, + gra.success, + gra.created_at +FROM graphql_response_assert gra +INNER JOIN graphql_response gr ON gra.response_id = gr.id +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ? +` + +func (q *Queries) GetGraphQLResponseAssertsByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]GraphqlResponseAssert, error) { + rows, err := q.query(ctx, q.getGraphQLResponseAssertsByWorkspaceIDStmt, getGraphQLResponseAssertsByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponseAssert{} + for rows.Next() { + var i GraphqlResponseAssert + if err := rows.Scan( + &i.ID, + &i.ResponseID, + &i.Value, + &i.Success, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponseHeadersByResponseID = `-- name: GetGraphQLResponseHeadersByResponseID :many + +SELECT + id, response_id, key, value, created_at +FROM graphql_response_header +WHERE response_id = ? +ORDER BY key +` + +// GraphQL Response Header Queries +func (q *Queries) GetGraphQLResponseHeadersByResponseID(ctx context.Context, responseID idwrap.IDWrap) ([]GraphqlResponseHeader, error) { + rows, err := q.query(ctx, q.getGraphQLResponseHeadersByResponseIDStmt, getGraphQLResponseHeadersByResponseID, responseID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponseHeader{} + for rows.Next() { + var i GraphqlResponseHeader + if err := rows.Scan( + &i.ID, + &i.ResponseID, + &i.Key, + &i.Value, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponseHeadersByWorkspaceID = `-- name: GetGraphQLResponseHeadersByWorkspaceID :many +SELECT + grh.id, grh.response_id, grh.key, grh.value, grh.created_at +FROM graphql_response_header grh +INNER JOIN graphql_response gr ON grh.response_id = gr.id +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ? +ORDER BY gr.time DESC, grh.key +` + +func (q *Queries) GetGraphQLResponseHeadersByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]GraphqlResponseHeader, error) { + rows, err := q.query(ctx, q.getGraphQLResponseHeadersByWorkspaceIDStmt, getGraphQLResponseHeadersByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponseHeader{} + for rows.Next() { + var i GraphqlResponseHeader + if err := rows.Scan( + &i.ID, + &i.ResponseID, + &i.Key, + &i.Value, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponsesByGraphQLID = `-- name: GetGraphQLResponsesByGraphQLID :many +SELECT + id, graphql_id, status, body, time, duration, size, created_at +FROM graphql_response +WHERE graphql_id = ? +ORDER BY time DESC +` + +func (q *Queries) GetGraphQLResponsesByGraphQLID(ctx context.Context, graphqlID idwrap.IDWrap) ([]GraphqlResponse, error) { + rows, err := q.query(ctx, q.getGraphQLResponsesByGraphQLIDStmt, getGraphQLResponsesByGraphQLID, graphqlID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponse{} + for rows.Next() { + var i GraphqlResponse + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Status, + &i.Body, + &i.Time, + &i.Duration, + &i.Size, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLResponsesByWorkspaceID = `-- name: GetGraphQLResponsesByWorkspaceID :many +SELECT + gr.id, gr.graphql_id, gr.status, gr.body, gr.time, + gr.duration, gr.size, gr.created_at +FROM graphql_response gr +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ? +ORDER BY gr.time DESC +` + +func (q *Queries) GetGraphQLResponsesByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]GraphqlResponse, error) { + rows, err := q.query(ctx, q.getGraphQLResponsesByWorkspaceIDStmt, getGraphQLResponsesByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlResponse{} + for rows.Next() { + var i GraphqlResponse + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.Status, + &i.Body, + &i.Time, + &i.Duration, + &i.Size, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLVersionsByGraphQLID = `-- name: GetGraphQLVersionsByGraphQLID :many +SELECT id, graphql_id, version_name, version_description, is_active, created_at, created_by +FROM graphql_version +WHERE graphql_id = ? +ORDER BY created_at DESC +` + +func (q *Queries) GetGraphQLVersionsByGraphQLID(ctx context.Context, graphqlID []byte) ([]GraphqlVersion, error) { + rows, err := q.query(ctx, q.getGraphQLVersionsByGraphQLIDStmt, getGraphQLVersionsByGraphQLID, graphqlID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GraphqlVersion{} + for rows.Next() { + var i GraphqlVersion + if err := rows.Scan( + &i.ID, + &i.GraphqlID, + &i.VersionName, + &i.VersionDescription, + &i.IsActive, + &i.CreatedAt, + &i.CreatedBy, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getGraphQLWorkspaceID = `-- name: GetGraphQLWorkspaceID :one +SELECT workspace_id +FROM graphql +WHERE id = ? +LIMIT 1 +` + +func (q *Queries) GetGraphQLWorkspaceID(ctx context.Context, id idwrap.IDWrap) (idwrap.IDWrap, error) { + row := q.queryRow(ctx, q.getGraphQLWorkspaceIDStmt, getGraphQLWorkspaceID, id) + var workspace_id idwrap.IDWrap + err := row.Scan(&workspace_id) + return workspace_id, err +} + +const getGraphQLsByWorkspaceID = `-- name: GetGraphQLsByWorkspaceID :many +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE workspace_id = ? +ORDER BY updated_at DESC +` + +func (q *Queries) GetGraphQLsByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]Graphql, error) { + rows, err := q.query(ctx, q.getGraphQLsByWorkspaceIDStmt, getGraphQLsByWorkspaceID, workspaceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []Graphql{} + for rows.Next() { + var i Graphql + if err := rows.Scan( + &i.ID, + &i.WorkspaceID, + &i.FolderID, + &i.Name, + &i.Url, + &i.Query, + &i.Variables, + &i.Description, + &i.LastRunAt, + &i.CreatedAt, + &i.UpdatedAt, + &i.ParentGraphqlID, + &i.IsDelta, + &i.IsSnapshot, + &i.DeltaName, + &i.DeltaUrl, + &i.DeltaQuery, + &i.DeltaVariables, + &i.DeltaDescription, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateGraphQL = `-- name: UpdateGraphQL :exec +UPDATE graphql +SET + name = ?, + url = ?, + query = ?, + variables = ?, + description = ?, + last_run_at = COALESCE(?, last_run_at), + updated_at = unixepoch() +WHERE id = ? +` + +type UpdateGraphQLParams struct { + Name string + Url string + Query string + Variables string + Description string + LastRunAt interface{} + ID idwrap.IDWrap +} + +func (q *Queries) UpdateGraphQL(ctx context.Context, arg UpdateGraphQLParams) error { + _, err := q.exec(ctx, q.updateGraphQLStmt, updateGraphQL, + arg.Name, + arg.Url, + arg.Query, + arg.Variables, + arg.Description, + arg.LastRunAt, + arg.ID, + ) + return err +} + +const updateGraphQLAssert = `-- name: UpdateGraphQLAssert :exec +UPDATE graphql_assert +SET + value = ?, + enabled = ?, + description = ?, + display_order = ?, + updated_at = ? +WHERE id = ? +` + +type UpdateGraphQLAssertParams struct { + Value string + Enabled bool + Description string + DisplayOrder float64 + UpdatedAt int64 + ID []byte +} + +func (q *Queries) UpdateGraphQLAssert(ctx context.Context, arg UpdateGraphQLAssertParams) error { + _, err := q.exec(ctx, q.updateGraphQLAssertStmt, updateGraphQLAssert, + arg.Value, + arg.Enabled, + arg.Description, + arg.DisplayOrder, + arg.UpdatedAt, + arg.ID, + ) + return err +} + +const updateGraphQLAssertDelta = `-- name: UpdateGraphQLAssertDelta :exec +UPDATE graphql_assert +SET + delta_value = ?, + delta_enabled = ?, + delta_description = ?, + delta_display_order = ?, + updated_at = ? +WHERE id = ? +` + +type UpdateGraphQLAssertDeltaParams struct { + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} + UpdatedAt int64 + ID []byte +} + +func (q *Queries) UpdateGraphQLAssertDelta(ctx context.Context, arg UpdateGraphQLAssertDeltaParams) error { + _, err := q.exec(ctx, q.updateGraphQLAssertDeltaStmt, updateGraphQLAssertDelta, + arg.DeltaValue, + arg.DeltaEnabled, + arg.DeltaDescription, + arg.DeltaDisplayOrder, + arg.UpdatedAt, + arg.ID, + ) + return err +} + +const updateGraphQLDelta = `-- name: UpdateGraphQLDelta :exec +UPDATE graphql +SET + delta_name = ?, + delta_url = ?, + delta_query = ?, + delta_variables = ?, + delta_description = ?, + updated_at = unixepoch() +WHERE id = ? +` + +type UpdateGraphQLDeltaParams struct { + DeltaName interface{} + DeltaUrl interface{} + DeltaQuery interface{} + DeltaVariables interface{} + DeltaDescription interface{} + ID idwrap.IDWrap +} + +func (q *Queries) UpdateGraphQLDelta(ctx context.Context, arg UpdateGraphQLDeltaParams) error { + _, err := q.exec(ctx, q.updateGraphQLDeltaStmt, updateGraphQLDelta, + arg.DeltaName, + arg.DeltaUrl, + arg.DeltaQuery, + arg.DeltaVariables, + arg.DeltaDescription, + arg.ID, + ) + return err +} + +const updateGraphQLHeader = `-- name: UpdateGraphQLHeader :exec +UPDATE graphql_header +SET + header_key = ?, + header_value = ?, + description = ?, + enabled = ?, + display_order = ?, + updated_at = unixepoch() +WHERE id = ? +` + +type UpdateGraphQLHeaderParams struct { + HeaderKey string + HeaderValue string + Description string + Enabled bool + DisplayOrder float64 + ID idwrap.IDWrap +} + +func (q *Queries) UpdateGraphQLHeader(ctx context.Context, arg UpdateGraphQLHeaderParams) error { + _, err := q.exec(ctx, q.updateGraphQLHeaderStmt, updateGraphQLHeader, + arg.HeaderKey, + arg.HeaderValue, + arg.Description, + arg.Enabled, + arg.DisplayOrder, + arg.ID, + ) + return err +} diff --git a/packages/db/pkg/sqlc/gen/models.go b/packages/db/pkg/sqlc/gen/models.go index f02222c7c..0ce8a7244 100644 --- a/packages/db/pkg/sqlc/gen/models.go +++ b/packages/db/pkg/sqlc/gen/models.go @@ -177,6 +177,12 @@ type FlowNodeForEach struct { Expression string } +type FlowNodeGraphql struct { + FlowNodeID idwrap.IDWrap + GraphqlID idwrap.IDWrap + DeltaGraphqlID []byte +} + type FlowNodeHttp struct { FlowNodeID idwrap.IDWrap HttpID idwrap.IDWrap @@ -211,6 +217,101 @@ type FlowVariable struct { DisplayOrder float64 } +type Graphql struct { + ID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + FolderID *idwrap.IDWrap + Name string + Url string + Query string + Variables string + Description string + LastRunAt interface{} + CreatedAt int64 + UpdatedAt int64 + ParentGraphqlID []byte + IsDelta bool + IsSnapshot bool + DeltaName interface{} + DeltaUrl interface{} + DeltaQuery interface{} + DeltaVariables interface{} + DeltaDescription interface{} +} + +type GraphqlAssert struct { + ID []byte + GraphqlID []byte + Value string + Enabled bool + Description string + DisplayOrder float64 + CreatedAt int64 + UpdatedAt int64 + ParentGraphqlAssertID []byte + IsDelta bool + DeltaValue interface{} + DeltaEnabled interface{} + DeltaDescription interface{} + DeltaDisplayOrder interface{} +} + +type GraphqlHeader struct { + ID idwrap.IDWrap + GraphqlID idwrap.IDWrap + HeaderKey string + HeaderValue string + Description string + Enabled bool + DisplayOrder float64 + CreatedAt int64 + UpdatedAt int64 + ParentGraphqlHeaderID []byte + IsDelta bool + DeltaHeaderKey interface{} + DeltaHeaderValue interface{} + DeltaDescription interface{} + DeltaEnabled interface{} + DeltaDisplayOrder interface{} +} + +type GraphqlResponse struct { + ID idwrap.IDWrap + GraphqlID idwrap.IDWrap + Status interface{} + Body []byte + Time time.Time + Duration interface{} + Size interface{} + CreatedAt int64 +} + +type GraphqlResponseAssert struct { + ID []byte + ResponseID []byte + Value string + Success bool + CreatedAt int64 +} + +type GraphqlResponseHeader struct { + ID idwrap.IDWrap + ResponseID idwrap.IDWrap + Key string + Value string + CreatedAt int64 +} + +type GraphqlVersion struct { + ID []byte + GraphqlID []byte + VersionName string + VersionDescription string + IsActive bool + CreatedAt int64 + CreatedBy []byte +} + type Http struct { ID idwrap.IDWrap WorkspaceID idwrap.IDWrap @@ -395,6 +496,7 @@ type NodeExecution struct { OutputData []byte OutputDataCompressType int8 HttpResponseID *idwrap.IDWrap + GraphqlResponseID *idwrap.IDWrap CompletedAt sql.NullInt64 } diff --git a/packages/db/pkg/sqlc/queries/flow.sql b/packages/db/pkg/sqlc/queries/flow.sql index a20589127..43a055cf4 100644 --- a/packages/db/pkg/sqlc/queries/flow.sql +++ b/packages/db/pkg/sqlc/queries/flow.sql @@ -437,6 +437,32 @@ DELETE FROM flow_node_http WHERE flow_node_id = ?; +-- name: GetFlowNodeGraphQL :one +SELECT + flow_node_id, + graphql_id, + delta_graphql_id +FROM + flow_node_graphql +WHERE + flow_node_id = ? +LIMIT 1; + +-- name: CreateFlowNodeGraphQL :exec +INSERT INTO flow_node_graphql (flow_node_id, graphql_id, delta_graphql_id) VALUES (?, ?, ?); + +-- name: UpdateFlowNodeGraphQL :exec +INSERT INTO flow_node_graphql (flow_node_id, graphql_id, delta_graphql_id) VALUES (?, ?, ?) +ON CONFLICT(flow_node_id) DO UPDATE SET + graphql_id = excluded.graphql_id, + delta_graphql_id = excluded.delta_graphql_id; + +-- name: DeleteFlowNodeGraphQL :exec +DELETE FROM flow_node_graphql WHERE flow_node_id = ?; + +-- name: CleanupOrphanedFlowNodeGraphQL :exec +DELETE FROM flow_node_graphql WHERE flow_node_id NOT IN (SELECT id FROM flow_node); + -- name: GetFlowNodeCondition :one SELECT flow_node_id, @@ -648,32 +674,33 @@ ORDER BY ne.completed_at DESC, ne.id DESC; -- name: CreateNodeExecution :one INSERT INTO node_execution ( id, node_id, name, state, error, input_data, input_data_compress_type, - output_data, output_data_compress_type, http_response_id, completed_at + output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) RETURNING *; -- name: UpdateNodeExecution :one UPDATE node_execution -SET state = ?, error = ?, output_data = ?, - output_data_compress_type = ?, http_response_id = ?, completed_at = ? +SET state = ?, error = ?, output_data = ?, + output_data_compress_type = ?, http_response_id = ?, graphql_response_id = ?, completed_at = ? WHERE id = ? RETURNING *; -- name: UpsertNodeExecution :one INSERT INTO node_execution ( id, node_id, name, state, error, input_data, input_data_compress_type, - output_data, output_data_compress_type, http_response_id, completed_at + output_data, output_data_compress_type, http_response_id, graphql_response_id, completed_at ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(id) DO UPDATE SET state = excluded.state, - error = excluded.error, + error = excluded.error, input_data = excluded.input_data, input_data_compress_type = excluded.input_data_compress_type, output_data = excluded.output_data, output_data_compress_type = excluded.output_data_compress_type, http_response_id = excluded.http_response_id, + graphql_response_id = excluded.graphql_response_id, completed_at = excluded.completed_at RETURNING *; diff --git a/packages/db/pkg/sqlc/queries/graphql.sql b/packages/db/pkg/sqlc/queries/graphql.sql new file mode 100644 index 000000000..c1bd3a838 --- /dev/null +++ b/packages/db/pkg/sqlc/queries/graphql.sql @@ -0,0 +1,431 @@ +-- +-- GraphQL Core Queries +-- + +-- name: GetGraphQL :one +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE id = ? LIMIT 1; + +-- name: GetGraphQLsByWorkspaceID :many +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE workspace_id = ? +ORDER BY updated_at DESC; + +-- name: GetGraphQLWorkspaceID :one +SELECT workspace_id +FROM graphql +WHERE id = ? +LIMIT 1; + +-- name: CreateGraphQL :exec +INSERT INTO graphql ( + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); + +-- name: UpdateGraphQL :exec +UPDATE graphql +SET + name = ?, + url = ?, + query = ?, + variables = ?, + description = ?, + last_run_at = COALESCE(?, last_run_at), + updated_at = unixepoch() +WHERE id = ?; + +-- name: UpdateGraphQLDelta :exec +UPDATE graphql +SET + delta_name = ?, + delta_url = ?, + delta_query = ?, + delta_variables = ?, + delta_description = ?, + updated_at = unixepoch() +WHERE id = ?; + +-- name: DeleteGraphQL :exec +DELETE FROM graphql +WHERE id = ?; + +-- +-- GraphQL Header Queries +-- + +-- name: GetGraphQLHeaders :many +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE graphql_id = ? +ORDER BY display_order; + +-- name: GetGraphQLHeadersByIDs :many +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE id IN (sqlc.slice('ids')); + +-- name: CreateGraphQLHeader :exec +INSERT INTO graphql_header ( + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); + +-- name: UpdateGraphQLHeader :exec +UPDATE graphql_header +SET + header_key = ?, + header_value = ?, + description = ?, + enabled = ?, + display_order = ?, + updated_at = unixepoch() +WHERE id = ?; + +-- name: DeleteGraphQLHeader :exec +DELETE FROM graphql_header +WHERE id = ?; + +-- +-- GraphQL Response Queries +-- + +-- name: GetGraphQLResponse :one +SELECT + id, graphql_id, status, body, time, duration, size, created_at +FROM graphql_response +WHERE id = ? LIMIT 1; + +-- name: GetGraphQLResponsesByGraphQLID :many +SELECT + id, graphql_id, status, body, time, duration, size, created_at +FROM graphql_response +WHERE graphql_id = ? +ORDER BY time DESC; + +-- name: GetGraphQLResponsesByWorkspaceID :many +SELECT + gr.id, gr.graphql_id, gr.status, gr.body, gr.time, + gr.duration, gr.size, gr.created_at +FROM graphql_response gr +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ? +ORDER BY gr.time DESC; + +-- name: CreateGraphQLResponse :exec +INSERT INTO graphql_response ( + id, graphql_id, status, body, time, duration, size, created_at +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?); + +-- name: DeleteGraphQLResponse :exec +DELETE FROM graphql_response WHERE id = ?; + +-- +-- GraphQL Response Header Queries +-- + +-- name: GetGraphQLResponseHeadersByResponseID :many +SELECT + id, response_id, key, value, created_at +FROM graphql_response_header +WHERE response_id = ? +ORDER BY key; + +-- name: GetGraphQLResponseHeadersByWorkspaceID :many +SELECT + grh.id, grh.response_id, grh.key, grh.value, grh.created_at +FROM graphql_response_header grh +INNER JOIN graphql_response gr ON grh.response_id = gr.id +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ? +ORDER BY gr.time DESC, grh.key; + +-- name: CreateGraphQLResponseHeader :exec +INSERT INTO graphql_response_header ( + id, response_id, key, value, created_at +) +VALUES (?, ?, ?, ?, ?); + +-- name: CreateGraphQLResponseHeaderBulk :exec +INSERT INTO graphql_response_header ( + id, response_id, key, value, created_at +) +VALUES + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?), + (?, ?, ?, ?, ?); + +-- name: DeleteGraphQLResponseHeader :exec +DELETE FROM graphql_response_header WHERE id = ?; + +-- +-- GraphQL Delta Queries +-- + +-- name: GetGraphQLDeltasByWorkspaceID :many +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE workspace_id = ? AND is_delta = TRUE +ORDER BY updated_at DESC; + +-- name: GetGraphQLDeltasByParentID :many +SELECT + id, workspace_id, folder_id, name, url, query, variables, + description, last_run_at, created_at, updated_at, + parent_graphql_id, is_delta, is_snapshot, + delta_name, delta_url, delta_query, delta_variables, delta_description +FROM graphql +WHERE parent_graphql_id = ? AND is_delta = TRUE +ORDER BY updated_at DESC; + +-- +-- GraphQL Header Delta Queries +-- + +-- name: GetGraphQLHeaderDeltasByWorkspaceID :many +SELECT + h.id, h.graphql_id, h.header_key, h.header_value, h.description, + h.enabled, h.display_order, h.created_at, h.updated_at, + h.parent_graphql_header_id, h.is_delta, + h.delta_header_key, h.delta_header_value, h.delta_description, h.delta_enabled, h.delta_display_order +FROM graphql_header h +JOIN graphql g ON h.graphql_id = g.id +WHERE g.workspace_id = ? AND h.is_delta = TRUE +ORDER BY h.updated_at DESC; + +-- name: GetGraphQLHeaderDeltasByParentID :many +SELECT + id, graphql_id, header_key, header_value, description, + enabled, display_order, created_at, updated_at, + parent_graphql_header_id, is_delta, + delta_header_key, delta_header_value, delta_description, delta_enabled, delta_display_order +FROM graphql_header +WHERE parent_graphql_header_id = ? AND is_delta = TRUE +ORDER BY display_order; + +-- +-- GraphQL Assert Queries +-- + +-- name: GetGraphQLAssert :one +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE id = ? +LIMIT 1; + +-- name: GetGraphQLAssertsByGraphQLID :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE graphql_id = ? +ORDER BY display_order; + +-- name: GetGraphQLAssertsByIDs :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE id IN (sqlc.slice('ids')); + +-- name: CreateGraphQLAssert :exec +INSERT INTO graphql_assert ( + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); + +-- name: UpdateGraphQLAssert :exec +UPDATE graphql_assert +SET + value = ?, + enabled = ?, + description = ?, + display_order = ?, + updated_at = ? +WHERE id = ?; + +-- name: UpdateGraphQLAssertDelta :exec +UPDATE graphql_assert +SET + delta_value = ?, + delta_enabled = ?, + delta_description = ?, + delta_display_order = ?, + updated_at = ? +WHERE id = ?; + +-- name: DeleteGraphQLAssert :exec +DELETE FROM graphql_assert +WHERE id = ?; + +-- name: GetGraphQLAssertDeltasByWorkspaceID :many +SELECT + ga.id, + ga.graphql_id, + ga.value, + ga.enabled, + ga.description, + ga.display_order, + ga.parent_graphql_assert_id, + ga.is_delta, + ga.delta_value, + ga.delta_enabled, + ga.delta_description, + ga.delta_display_order, + ga.created_at, + ga.updated_at +FROM graphql_assert ga +INNER JOIN graphql g ON ga.graphql_id = g.id +WHERE g.workspace_id = ? AND ga.is_delta = TRUE +ORDER BY ga.display_order; + +-- name: GetGraphQLAssertDeltasByParentID :many +SELECT + id, + graphql_id, + value, + enabled, + description, + display_order, + parent_graphql_assert_id, + is_delta, + delta_value, + delta_enabled, + delta_description, + delta_display_order, + created_at, + updated_at +FROM graphql_assert +WHERE parent_graphql_assert_id = ? AND is_delta = TRUE +ORDER BY display_order; + +-- +-- GraphQL Version Queries +-- + +-- name: CreateGraphQLVersion :exec +INSERT INTO graphql_version ( + id, graphql_id, version_name, version_description, is_active, created_at, created_by +) +VALUES (?, ?, ?, ?, ?, ?, ?); + +-- name: GetGraphQLVersionsByGraphQLID :many +SELECT id, graphql_id, version_name, version_description, is_active, created_at, created_by +FROM graphql_version +WHERE graphql_id = ? +ORDER BY created_at DESC; + +-- +-- GraphQL Response Assert Queries +-- + +-- name: CreateGraphQLResponseAssert :exec +INSERT INTO graphql_response_assert ( + id, response_id, value, success, created_at +) +VALUES (?, ?, ?, ?, ?); + +-- name: GetGraphQLResponseAssertsByResponseID :many +SELECT id, response_id, value, success, created_at +FROM graphql_response_assert +WHERE response_id = ? +ORDER BY created_at; + +-- name: GetGraphQLResponseAssertsByWorkspaceID :many +SELECT + gra.id, + gra.response_id, + gra.value, + gra.success, + gra.created_at +FROM graphql_response_assert gra +INNER JOIN graphql_response gr ON gra.response_id = gr.id +INNER JOIN graphql g ON gr.graphql_id = g.id +WHERE g.workspace_id = ?; diff --git a/packages/db/pkg/sqlc/schema/03_files.sql b/packages/db/pkg/sqlc/schema/03_files.sql index 532473d45..21f4f8d75 100644 --- a/packages/db/pkg/sqlc/schema/03_files.sql +++ b/packages/db/pkg/sqlc/schema/03_files.sql @@ -16,13 +16,14 @@ CREATE TABLE files ( path_hash TEXT, updated_at BIGINT NOT NULL DEFAULT (unixepoch()), CHECK (length (id) == 16), - CHECK (content_kind IN (0, 1, 2, 3, 4)), -- 0 = folder, 1 = http, 2 = flow, 3 = http_delta, 4 = credential + CHECK (content_kind IN (0, 1, 2, 3, 4, 5)), -- 0 = folder, 1 = http, 2 = http_delta, 3 = flow, 4 = credential, 5 = graphql CHECK ( (content_kind = 0 AND content_id IS NOT NULL) OR (content_kind = 1 AND content_id IS NOT NULL) OR (content_kind = 2 AND content_id IS NOT NULL) OR (content_kind = 3 AND content_id IS NOT NULL) OR (content_kind = 4 AND content_id IS NOT NULL) OR + (content_kind = 5 AND content_id IS NOT NULL) OR (content_id IS NULL) ), FOREIGN KEY (workspace_id) REFERENCES workspaces (id) ON DELETE CASCADE, diff --git a/packages/db/pkg/sqlc/schema/05_flow.sql b/packages/db/pkg/sqlc/schema/05_flow.sql index 54c90dcda..6d7729c6c 100644 --- a/packages/db/pkg/sqlc/schema/05_flow.sql +++ b/packages/db/pkg/sqlc/schema/05_flow.sql @@ -84,6 +84,14 @@ CREATE TABLE flow_node_http ( ); +CREATE TABLE flow_node_graphql ( + flow_node_id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + delta_graphql_id BLOB, + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE, + FOREIGN KEY (delta_graphql_id) REFERENCES graphql (id) ON DELETE SET NULL +); + CREATE TABLE flow_node_condition ( flow_node_id BLOB NOT NULL PRIMARY KEY, expression TEXT NOT NULL @@ -123,8 +131,10 @@ CREATE TABLE node_execution ( output_data_compress_type INT8 NOT NULL DEFAULT 0, -- Add new fields http_response_id BLOB, -- Response ID for HTTP request nodes (NULL for non-request nodes) + graphql_response_id BLOB, -- Response ID for GraphQL request nodes completed_at BIGINT, -- Unix timestamp in milliseconds - FOREIGN KEY (http_response_id) REFERENCES http_response (id) ON DELETE SET NULL + FOREIGN KEY (http_response_id) REFERENCES http_response (id) ON DELETE SET NULL, + FOREIGN KEY (graphql_response_id) REFERENCES graphql_response (id) ON DELETE SET NULL ); CREATE INDEX node_execution_idx1 ON node_execution (node_id); diff --git a/packages/db/pkg/sqlc/schema/08_graphql.sql b/packages/db/pkg/sqlc/schema/08_graphql.sql new file mode 100644 index 000000000..cad1f8ce9 --- /dev/null +++ b/packages/db/pkg/sqlc/schema/08_graphql.sql @@ -0,0 +1,130 @@ +/* + * + * GRAPHQL SYSTEM + * GraphQL request support - simpler than HTTP (no delta system) + * + */ + +-- Core GraphQL request table +CREATE TABLE graphql ( + id BLOB NOT NULL PRIMARY KEY, + workspace_id BLOB NOT NULL, + folder_id BLOB, + name TEXT NOT NULL, + url TEXT NOT NULL, + query TEXT NOT NULL DEFAULT '', + variables TEXT NOT NULL DEFAULT '', + description TEXT NOT NULL DEFAULT '', + last_run_at BIGINT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (workspace_id) REFERENCES workspaces (id) ON DELETE CASCADE, + FOREIGN KEY (folder_id) REFERENCES files (id) ON DELETE SET NULL +); + +CREATE INDEX graphql_workspace_idx ON graphql (workspace_id); +CREATE INDEX graphql_folder_idx ON graphql (folder_id) WHERE folder_id IS NOT NULL; + +-- GraphQL versions (snapshots of requests at a point in time) +CREATE TABLE graphql_version ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + version_name TEXT NOT NULL, + version_description TEXT NOT NULL DEFAULT '', + is_active BOOLEAN NOT NULL DEFAULT FALSE, + + -- Metadata + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + created_by BLOB, -- User ID who created this version + + -- Foreign keys + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE, + FOREIGN KEY (created_by) REFERENCES users (id) ON DELETE SET NULL, + + -- Constraints + CHECK (version_name != '') +); + +CREATE INDEX graphql_version_graphql_idx ON graphql_version (graphql_id); +CREATE INDEX graphql_version_active_idx ON graphql_version (is_active) WHERE is_active = TRUE; +CREATE INDEX graphql_version_created_by_idx ON graphql_version (created_by); + +-- GraphQL request headers +CREATE TABLE graphql_header ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + header_key TEXT NOT NULL, + header_value TEXT NOT NULL, + description TEXT NOT NULL DEFAULT '', + enabled BOOLEAN NOT NULL DEFAULT TRUE, + display_order REAL NOT NULL DEFAULT 0, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE +); + +CREATE INDEX graphql_header_graphql_idx ON graphql_header (graphql_id); +CREATE INDEX graphql_header_order_idx ON graphql_header (graphql_id, display_order); + +-- GraphQL request assertions +CREATE TABLE graphql_assert ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + value TEXT NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT TRUE, + description TEXT NOT NULL DEFAULT '', + display_order REAL NOT NULL DEFAULT 0, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE +); + +CREATE INDEX graphql_assert_graphql_idx ON graphql_assert (graphql_id); +CREATE INDEX graphql_assert_order_idx ON graphql_assert (graphql_id, display_order); + +-- GraphQL response (read-only) +CREATE TABLE graphql_response ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + status INT32 NOT NULL, + body BLOB, + time DATETIME NOT NULL, + duration INT32 NOT NULL, + size INT32 NOT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE +); + +CREATE INDEX graphql_response_graphql_idx ON graphql_response (graphql_id); +CREATE INDEX graphql_response_time_idx ON graphql_response (graphql_id, time DESC); + +-- GraphQL response headers (read-only) +CREATE TABLE graphql_response_header ( + id BLOB NOT NULL PRIMARY KEY, + response_id BLOB NOT NULL, + key TEXT NOT NULL, + value TEXT NOT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (response_id) REFERENCES graphql_response (id) ON DELETE CASCADE +); + +CREATE INDEX graphql_response_header_response_idx ON graphql_response_header (response_id); + +-- GraphQL response assertions (read-only) +CREATE TABLE graphql_response_assert ( + id BLOB NOT NULL PRIMARY KEY, + response_id BLOB NOT NULL, + value TEXT NOT NULL, + success BOOLEAN NOT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (response_id) REFERENCES graphql_response (id) ON DELETE CASCADE +); + +CREATE INDEX graphql_response_assert_response_idx ON graphql_response_assert (response_id); +CREATE INDEX graphql_response_assert_success_idx ON graphql_response_assert (response_id, success); diff --git a/packages/db/pkg/sqlc/schema/09_graphql_delta.sql b/packages/db/pkg/sqlc/schema/09_graphql_delta.sql new file mode 100644 index 000000000..e9899657f --- /dev/null +++ b/packages/db/pkg/sqlc/schema/09_graphql_delta.sql @@ -0,0 +1,56 @@ +/* + * + * GRAPHQL DELTA SYSTEM + * Adds delta/variant support to GraphQL tables for flow node overrides + * + */ + +-- Add delta system fields to graphql table +ALTER TABLE graphql ADD COLUMN parent_graphql_id BLOB DEFAULT NULL; +ALTER TABLE graphql ADD COLUMN is_delta BOOLEAN NOT NULL DEFAULT FALSE; +ALTER TABLE graphql ADD COLUMN is_snapshot BOOLEAN NOT NULL DEFAULT FALSE; + +-- Add delta override fields to graphql table +ALTER TABLE graphql ADD COLUMN delta_name TEXT NULL; +ALTER TABLE graphql ADD COLUMN delta_url TEXT NULL; +ALTER TABLE graphql ADD COLUMN delta_query TEXT NULL; +ALTER TABLE graphql ADD COLUMN delta_variables TEXT NULL; +ALTER TABLE graphql ADD COLUMN delta_description TEXT NULL; + +-- Add foreign key for parent relationship (SQLite requires recreating the table) +-- Since we can't add FK constraints to existing tables in SQLite, we'll handle this +-- at the application level for now and add it in the next major migration + +-- Add indexes for delta resolution and performance +CREATE INDEX graphql_parent_delta_idx ON graphql (parent_graphql_id, is_delta); +CREATE INDEX graphql_delta_resolution_idx ON graphql (parent_graphql_id, is_delta, updated_at DESC); +CREATE INDEX graphql_active_streaming_idx ON graphql (workspace_id, updated_at DESC) WHERE is_delta = FALSE; + +-- Add delta system fields to graphql_header table +ALTER TABLE graphql_header ADD COLUMN parent_graphql_header_id BLOB DEFAULT NULL; +ALTER TABLE graphql_header ADD COLUMN is_delta BOOLEAN NOT NULL DEFAULT FALSE; + +-- Add delta override fields to graphql_header table +ALTER TABLE graphql_header ADD COLUMN delta_header_key TEXT NULL; +ALTER TABLE graphql_header ADD COLUMN delta_header_value TEXT NULL; +ALTER TABLE graphql_header ADD COLUMN delta_description TEXT NULL; +ALTER TABLE graphql_header ADD COLUMN delta_enabled BOOLEAN NULL; +ALTER TABLE graphql_header ADD COLUMN delta_display_order REAL NULL; + +-- Add indexes for graphql_header delta support +CREATE INDEX graphql_header_parent_delta_idx ON graphql_header (parent_graphql_header_id, is_delta); +CREATE INDEX graphql_header_delta_streaming_idx ON graphql_header (parent_graphql_header_id, is_delta, updated_at DESC); + +-- Add delta system fields to graphql_assert table +ALTER TABLE graphql_assert ADD COLUMN parent_graphql_assert_id BLOB DEFAULT NULL; +ALTER TABLE graphql_assert ADD COLUMN is_delta BOOLEAN NOT NULL DEFAULT FALSE; + +-- Add delta override fields to graphql_assert table +ALTER TABLE graphql_assert ADD COLUMN delta_value TEXT NULL; +ALTER TABLE graphql_assert ADD COLUMN delta_enabled BOOLEAN NULL; +ALTER TABLE graphql_assert ADD COLUMN delta_description TEXT NULL; +ALTER TABLE graphql_assert ADD COLUMN delta_display_order REAL NULL; + +-- Add indexes for graphql_assert delta support +CREATE INDEX graphql_assert_parent_delta_idx ON graphql_assert (parent_graphql_assert_id, is_delta); +CREATE INDEX graphql_assert_delta_streaming_idx ON graphql_assert (parent_graphql_assert_id, is_delta, updated_at DESC); diff --git a/packages/db/pkg/sqlc/sqlc.yaml b/packages/db/pkg/sqlc/sqlc.yaml index dd1533dc2..2402ae3ae 100644 --- a/packages/db/pkg/sqlc/sqlc.yaml +++ b/packages/db/pkg/sqlc/sqlc.yaml @@ -274,6 +274,19 @@ sql: import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' package: 'idwrap' type: 'IDWrap' + ## flow_node_graphql + ### flow_node_id + - column: 'flow_node_graphql.flow_node_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + ### graphql_id + - column: 'flow_node_graphql.graphql_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' ## flow_node_condition ### flow_node_id - column: 'flow_node_condition.flow_node_id' @@ -442,6 +455,13 @@ sql: package: 'idwrap' type: 'IDWrap' pointer: true + ### graphql_response_id + - column: 'node_execution.graphql_response_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + pointer: true ## files ### id - column: 'files.id' @@ -853,3 +873,62 @@ sql: import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' package: 'idwrap' type: 'IDWrap' + ## GraphQL system + ### graphql table + - column: 'graphql.id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql.workspace_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql.folder_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + pointer: true + - column: 'graphql.created_at' + go_type: 'int64' + - column: 'graphql.updated_at' + go_type: 'int64' + ### graphql_header table + - column: 'graphql_header.id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql_header.graphql_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql_header.created_at' + go_type: 'int64' + - column: 'graphql_header.updated_at' + go_type: 'int64' + ### graphql_response table + - column: 'graphql_response.id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql_response.graphql_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + ### graphql_response_header table + - column: 'graphql_response_header.id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' + - column: 'graphql_response_header.response_id' + go_type: + import: 'github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap' + package: 'idwrap' + type: 'IDWrap' diff --git a/packages/server/cmd/serverrun/serverrun.go b/packages/server/cmd/serverrun/serverrun.go index c69360329..ae80bae5f 100644 --- a/packages/server/cmd/serverrun/serverrun.go +++ b/packages/server/cmd/serverrun/serverrun.go @@ -28,6 +28,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rexportv2" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rfile" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rflowv2" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rgraphql" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rhealth" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rhttp" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rimportv2" @@ -38,6 +39,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/credvault" "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream/memory" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/http/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" @@ -47,6 +49,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/suser" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" @@ -54,6 +57,7 @@ import ( envapiv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/environment/v1" filesystemv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/file_system/v1" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" httpv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/http/v1" "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/private/node_js_executor/v1/node_js_executorv1connect" apiv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/workspace/v1" @@ -188,6 +192,14 @@ func Run() error { flowNodeAIService := sflow.NewNodeAIService(queries) flowNodeAiProviderService := sflow.NewNodeAiProviderService(queries) flowNodeMemoryService := sflow.NewNodeMemoryService(queries) + flowNodeGraphQLService := sflow.NewNodeGraphQLService(queries) + + // GraphQL + graphqlService := sgraphql.New(queries, logger) + graphqlReader := graphqlService.Reader() + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(queries) + graphqlResponseService := sgraphql.NewGraphQLResponseService(queries) nodeExecutionService := sflow.NewNodeExecutionService(queries) nodeExecutionReader := sflow.NewNodeExecutionReader(currentDB) @@ -300,6 +312,12 @@ func Run() error { httpAssertService, ) + graphqlResolver := gqlresolver.NewStandardResolver( + graphqlReader, + &graphqlHeaderService, + &graphqlAssertService, + ) + httpSrv := rhttp.New(rhttp.HttpServiceRPCDeps{ DB: currentDB, Readers: rhttp.HttpServiceRPCReaders{ @@ -447,15 +465,19 @@ func Run() error { NodeJs: &flowNodeNodeJsService, NodeAI: &flowNodeAIService, NodeAiProvider: &flowNodeAiProviderService, - NodeMemory: &flowNodeMemoryService, - NodeExecution: &nodeExecutionService, + NodeMemory: &flowNodeMemoryService, + NodeGraphQL: &flowNodeGraphQLService, + NodeExecution: &nodeExecutionService, FlowVariable: &flowVariableService, Env: &environmentService, Var: &variableService, Http: &httpService, HttpBodyRaw: httpBodyRawService, - HttpResponse: httpResponseService, - File: fileService, + HttpResponse: httpResponseService, + GraphQLResponse: graphqlResponseService, + GraphQL: &graphqlService, + GraphQLHeader: &graphqlHeaderService, + File: fileService, Importer: workspaceImporter, Credential: credentialService, }, @@ -472,16 +494,22 @@ func Run() error { Js: streamers.Js, Ai: streamers.Ai, AiProvider: streamers.AiProvider, - Memory: streamers.Memory, - Execution: streamers.Execution, + Memory: streamers.Memory, + NodeGraphQL: streamers.NodeGraphQL, + GraphQL: streamers.GraphQL, + Execution: streamers.Execution, HttpResponse: streamers.HttpResponse, HttpResponseHeader: streamers.HttpResponseHeader, - HttpResponseAssert: streamers.HttpResponseAssert, - Log: streamers.Log, - File: streamers.File, + HttpResponseAssert: streamers.HttpResponseAssert, + GraphQLResponse: streamers.GraphQLResponse, + GraphQLResponseHeader: streamers.GraphQLResponseHeader, + GraphQLResponseAssert: streamers.GraphQLResponseAssert, + Log: streamers.Log, + File: streamers.File, }, - Resolver: requestResolver, - Logger: logger, + Resolver: requestResolver, + GraphQLResolver: graphqlResolver, + Logger: logger, JsClient: jsClient, }) newServiceManager.addService(rflowv2.CreateService(flowSrvV2, optionsAll)) @@ -534,21 +562,58 @@ func Run() error { }) newServiceManager.addService(rcredential.CreateService(credentialSrv, optionsAll)) + // GraphQL Service + graphqlStreamers := &rgraphql.GraphQLStreamers{ + GraphQL: streamers.GraphQL, + GraphQLHeader: streamers.GraphQLHeader, + GraphQLAssert: streamers.GraphQLAssert, + GraphQLResponse: streamers.GraphQLResponse, + GraphQLResponseHeader: streamers.GraphQLResponseHeader, + GraphQLResponseAssert: streamers.GraphQLResponseAssert, + GraphQLVersion: streamers.GraphQLVersion, + File: streamers.File, + } + + graphqlSrv := rgraphql.New(rgraphql.GraphQLServiceRPCDeps{ + DB: currentDB, + Services: rgraphql.GraphQLServiceRPCServices{ + GraphQL: graphqlService, + Header: graphqlHeaderService, + GraphQLAssert: graphqlAssertService, + Response: graphqlResponseService, + User: userService, + Workspace: workspaceService, + WorkspaceUser: workspaceUserService, + Env: environmentService, + Variable: variableService, + File: fileService, + }, + Readers: rgraphql.GraphQLServiceRPCReaders{ + GraphQL: graphqlReader, + User: userReader, + Workspace: workspaceReader, + }, + Resolver: graphqlResolver, + Streamers: graphqlStreamers, + }) + newServiceManager.addService(rgraphql.CreateService(graphqlSrv, optionsAll)) + // Reference Service refServiceRPC := rreference.NewReferenceServiceRPC(rreference.ReferenceServiceRPCDeps{ DB: currentDB, Readers: rreference.ReferenceServiceRPCReaders{ - User: userReader, - Workspace: workspaceReader, - Env: envReader, - Variable: varReader, - Flow: flowReader, - Node: nodeReader, - NodeRequest: flowNodeRequestReader, - FlowVariable: flowVariableReader, - FlowEdge: flowEdgeReader, - NodeExecution: nodeExecutionReader, - HttpResponse: httpResponseReader, + User: userReader, + Workspace: workspaceReader, + Env: envReader, + Variable: varReader, + Flow: flowReader, + Node: nodeReader, + NodeRequest: flowNodeRequestReader, + FlowVariable: flowVariableReader, + FlowEdge: flowEdgeReader, + NodeExecution: nodeExecutionReader, + HttpResponse: httpResponseReader, + GraphQLResponse: &graphqlResponseService, }, }) newServiceManager.addService(rreference.CreateService(refServiceRPC, optionsAll)) @@ -695,8 +760,16 @@ type streamers struct { Ai eventstream.SyncStreamer[rflowv2.AiTopic, rflowv2.AiEvent] AiProvider eventstream.SyncStreamer[rflowv2.AiProviderTopic, rflowv2.AiProviderEvent] Memory eventstream.SyncStreamer[rflowv2.MemoryTopic, rflowv2.MemoryEvent] + NodeGraphQL eventstream.SyncStreamer[rflowv2.NodeGraphQLTopic, rflowv2.NodeGraphQLEvent] Execution eventstream.SyncStreamer[rflowv2.ExecutionTopic, rflowv2.ExecutionEvent] File eventstream.SyncStreamer[rfile.FileTopic, rfile.FileEvent] + GraphQL eventstream.SyncStreamer[rgraphql.GraphQLTopic, rgraphql.GraphQLEvent] + GraphQLHeader eventstream.SyncStreamer[rgraphql.GraphQLHeaderTopic, rgraphql.GraphQLHeaderEvent] + GraphQLAssert eventstream.SyncStreamer[rgraphql.GraphQLAssertTopic, rgraphql.GraphQLAssertEvent] + GraphQLResponse eventstream.SyncStreamer[rgraphql.GraphQLResponseTopic, rgraphql.GraphQLResponseEvent] + GraphQLResponseHeader eventstream.SyncStreamer[rgraphql.GraphQLResponseHeaderTopic, rgraphql.GraphQLResponseHeaderEvent] + GraphQLResponseAssert eventstream.SyncStreamer[rgraphql.GraphQLResponseAssertTopic, rgraphql.GraphQLResponseAssertEvent] + GraphQLVersion eventstream.SyncStreamer[rgraphql.GraphQLVersionTopic, rgraphql.GraphQLVersionEvent] Credential eventstream.SyncStreamer[rcredential.CredentialTopic, rcredential.CredentialEvent] CredentialOpenAi eventstream.SyncStreamer[rcredential.CredentialOpenAiTopic, rcredential.CredentialOpenAiEvent] CredentialGemini eventstream.SyncStreamer[rcredential.CredentialGeminiTopic, rcredential.CredentialGeminiEvent] @@ -732,9 +805,17 @@ func newStreamers() *streamers { Ai: memory.NewInMemorySyncStreamer[rflowv2.AiTopic, rflowv2.AiEvent](), AiProvider: memory.NewInMemorySyncStreamer[rflowv2.AiProviderTopic, rflowv2.AiProviderEvent](), Memory: memory.NewInMemorySyncStreamer[rflowv2.MemoryTopic, rflowv2.MemoryEvent](), - Execution: memory.NewInMemorySyncStreamer[rflowv2.ExecutionTopic, rflowv2.ExecutionEvent](), - File: memory.NewInMemorySyncStreamer[rfile.FileTopic, rfile.FileEvent](), - Credential: memory.NewInMemorySyncStreamer[rcredential.CredentialTopic, rcredential.CredentialEvent](), + NodeGraphQL: memory.NewInMemorySyncStreamer[rflowv2.NodeGraphQLTopic, rflowv2.NodeGraphQLEvent](), + Execution: memory.NewInMemorySyncStreamer[rflowv2.ExecutionTopic, rflowv2.ExecutionEvent](), + File: memory.NewInMemorySyncStreamer[rfile.FileTopic, rfile.FileEvent](), + GraphQL: memory.NewInMemorySyncStreamer[rgraphql.GraphQLTopic, rgraphql.GraphQLEvent](), + GraphQLHeader: memory.NewInMemorySyncStreamer[rgraphql.GraphQLHeaderTopic, rgraphql.GraphQLHeaderEvent](), + GraphQLAssert: memory.NewInMemorySyncStreamer[rgraphql.GraphQLAssertTopic, rgraphql.GraphQLAssertEvent](), + GraphQLResponse: memory.NewInMemorySyncStreamer[rgraphql.GraphQLResponseTopic, rgraphql.GraphQLResponseEvent](), + GraphQLResponseHeader: memory.NewInMemorySyncStreamer[rgraphql.GraphQLResponseHeaderTopic, rgraphql.GraphQLResponseHeaderEvent](), + GraphQLResponseAssert: memory.NewInMemorySyncStreamer[rgraphql.GraphQLResponseAssertTopic, rgraphql.GraphQLResponseAssertEvent](), + GraphQLVersion: memory.NewInMemorySyncStreamer[rgraphql.GraphQLVersionTopic, rgraphql.GraphQLVersionEvent](), + Credential: memory.NewInMemorySyncStreamer[rcredential.CredentialTopic, rcredential.CredentialEvent](), CredentialOpenAi: memory.NewInMemorySyncStreamer[rcredential.CredentialOpenAiTopic, rcredential.CredentialOpenAiEvent](), CredentialGemini: memory.NewInMemorySyncStreamer[rcredential.CredentialGeminiTopic, rcredential.CredentialGeminiEvent](), CredentialAnthropic: memory.NewInMemorySyncStreamer[rcredential.CredentialAnthropicTopic, rcredential.CredentialAnthropicEvent](), @@ -769,8 +850,16 @@ func (s *streamers) shutdown() { s.Ai.Shutdown() s.AiProvider.Shutdown() s.Memory.Shutdown() + s.NodeGraphQL.Shutdown() s.Execution.Shutdown() s.File.Shutdown() + s.GraphQL.Shutdown() + s.GraphQLHeader.Shutdown() + s.GraphQLAssert.Shutdown() + s.GraphQLResponse.Shutdown() + s.GraphQLResponseHeader.Shutdown() + s.GraphQLResponseAssert.Shutdown() + s.GraphQLVersion.Shutdown() s.Credential.Shutdown() s.CredentialOpenAi.Shutdown() s.CredentialGemini.Shutdown() @@ -995,4 +1084,30 @@ func registerCascadeHandlers(registry *streamregistry.Registry, httpStreamers *r }) }) } + + // GraphQL entity + if streamers.GraphQL != nil { + registry.Register(mutation.EntityGraphQL, func(evt mutation.Event) { + if evt.Op != mutation.OpDelete { + return + } + streamers.GraphQL.Publish(rgraphql.GraphQLTopic{WorkspaceID: evt.WorkspaceID}, rgraphql.GraphQLEvent{ + Type: "delete", + GraphQL: &graphqlv1.GraphQL{GraphqlId: evt.ID.Bytes()}, + }) + }) + } + + // GraphQL Header entity + if streamers.GraphQLHeader != nil { + registry.Register(mutation.EntityGraphQLHeader, func(evt mutation.Event) { + if evt.Op != mutation.OpDelete { + return + } + streamers.GraphQLHeader.Publish(rgraphql.GraphQLHeaderTopic{WorkspaceID: evt.WorkspaceID}, rgraphql.GraphQLHeaderEvent{ + Type: "delete", + GraphQLHeader: &graphqlv1.GraphQLHeader{GraphqlHeaderId: evt.ID.Bytes(), GraphqlId: evt.ParentID.Bytes()}, + }) + }) + } } diff --git a/packages/server/docs/specs/GRAPHQL.md b/packages/server/docs/specs/GRAPHQL.md new file mode 100644 index 000000000..d8c5f5f7b --- /dev/null +++ b/packages/server/docs/specs/GRAPHQL.md @@ -0,0 +1,367 @@ +# GraphQL Specification + +## Overview + +The GraphQL system adds first-class GraphQL request support to DevTools. It enables users to compose GraphQL queries/mutations, execute them against any GraphQL endpoint, introspect schemas for autocompletion and documentation, and view responses -- all following the same architecture patterns as the existing HTTP system. + +## Reference Implementation + +This design is informed by [Bruno](https://github.com/usebruno/bruno)'s GraphQL implementation, adapted to DevTools' TypeScript + Go stack (TypeSpec, Connect RPC, TanStack React DB, CodeMirror 6). + +### What Bruno Does + +- **Query Editor**: CodeMirror with `codemirror-graphql` for syntax highlighting, schema-aware autocompletion, real-time validation, and query formatting via Prettier +- **Variables Editor**: JSON editor for GraphQL variables with prettify support +- **Schema Introspection**: Fetches schema via standard introspection query (`getIntrospectionQuery()` from `graphql` lib), caches result, builds `GraphQLSchema` object via `buildClientSchema()` +- **Documentation Explorer**: Custom component that navigates the `GraphQLSchema` type map with breadcrumb navigation, search, and clickable type references +- **Request Execution**: HTTP POST with `Content-Type: application/json`, body `{ "query": "...", "variables": {...} }` +- **Tabbed UI**: Query (default), Variables, Headers, Auth, Docs tabs + +### What We Include + +- Query editor with schema-aware autocompletion and validation (via `cm6-graphql` for CodeMirror 6) +- Variables editor (JSON) +- Headers (key-value table for manual auth and custom headers) +- Schema introspection and caching in SQLite +- Documentation explorer +- Request execution and response display + +### What We Exclude (For Now) + +- **Scripts/hooks**: Pre/post-request scripts (not needed) +- **Variable extraction**: Already handled automatically by DevTools +- **Auth UI**: Users set auth manually via headers; dedicated auth UI added later +- **Delta system**: Not needed initially; can be added later + +--- + +## Core Concepts + +### 1. Request Definition + +A GraphQL request defines what to send to a GraphQL endpoint. + +- **URL**: The GraphQL endpoint (e.g., `https://api.example.com/graphql`) +- **Query**: The GraphQL query/mutation string +- **Variables**: JSON string of variables to pass with the query +- **Headers**: Key-value pairs with enable/disable toggle (used for auth tokens, custom headers) + +Unlike HTTP requests, GraphQL is always: + +- Method: **POST** +- Content-Type: **application/json** +- Body: `{ "query": "...", "variables": {...} }` + +### 2. Schema Introspection + +GraphQL's self-documenting nature is a key feature: + +1. User clicks "Fetch Schema" in the UI +2. Backend sends the standard introspection query to the endpoint (with user's headers for auth) +3. Backend returns the raw introspection JSON +4. Frontend builds a `GraphQLSchema` object via `buildClientSchema()` from the `graphql` JS library +5. Schema enables: autocompletion in the query editor, validation/linting, and the documentation explorer + +Schema introspection results are stored in SQLite (not localStorage like Bruno) for persistence and consistency. + +### 3. Execution & Response + +When a GraphQL request is "Run": + +1. **Interpolation**: Variables (`{{ varName }}`) are substituted into URL, query, variables, and header values +2. **Construction**: Build JSON body `{ "query": "...", "variables": {...} }` +3. **Transmission**: HTTP POST via the existing Go HTTP client (`httpclient` package) +4. **Response**: Status, headers, body (JSON), timing, and size are captured +5. **Persistence**: Response stored in `graphql_response` table, linked to the GraphQL request + +--- + +## Architecture + +### Design Decision: Separate Entity Type + +GraphQL is a **new entity type** rather than an extension of HTTP because: + +1. HTTP's `BodyKind` enum (`FormData`/`UrlEncoded`/`Raw`) doesn't conceptually fit GraphQL's `query + variables` model +2. GraphQL requires schema storage -- an entirely new concern that doesn't belong on HTTP +3. Execution is fundamentally simpler (always POST, always JSON, fixed body structure) +4. Follows the existing pattern where each protocol is its own entity + +GraphQL does **not** use the delta system initially to keep scope manageable. + +### File System Integration + +A new `GraphQL` value is added to the `FileKind` enum in `file-system.tsp`, allowing GraphQL requests to appear in the workspace sidebar tree alongside HTTP requests and flows. + +--- + +## Backend + +### API Layer (`packages/server/internal/api/rgraphql`) + +- **Role**: Entry point for Connect RPC +- **Responsibilities**: + - Validates incoming Protobuf messages + - Orchestrates transactions (Fetch-Check-Act pattern) + - Calls the Service Layer + - Publishes events to `eventstream` for real-time UI updates +- **Key RPC Operations**: + - `GraphQLRun`: Execute a GraphQL request + - `GraphQLIntrospect`: Fetch schema via introspection query + - `GraphQLDuplicate`: Clone a GraphQL request + - Standard CRUD for GraphQL entity and headers + - Streaming sync for TanStack DB real-time collections +- **Files**: `rgraphql.go` (service struct, streamers), `rgraphql_exec.go` (execution), `rgraphql_crud.go` (management), `rgraphql_sync.go` (streaming) + +### Service Layer (`packages/server/pkg/service/sgraphql`) + +- **Role**: Business logic and data access adapter +- **Pattern**: Reader (non-blocking, `*sql.DB`) + Writer (transactional, `*sql.Tx`) +- **Responsibilities**: + - Converts between Internal Models (`mgraphql`) and DB Models (`gen`) + - Executes `sqlc` queries + - Handles duplication logic (copying headers) + +### Domain Model (`packages/server/pkg/model/mgraphql`) + +Pure Go structs decoupled from DB and API: + +```go +type GraphQL struct { + ID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + FolderID *idwrap.IDWrap + Name string + Url string + Query string // GraphQL query/mutation string + Variables string // JSON string of variables + Description string + LastRunAt *int64 + CreatedAt int64 + UpdatedAt int64 +} + +type GraphQLHeader struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + Key string + Value string + Description string + Enabled bool + DisplayOrder float32 +} + +type GraphQLResponse struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + Status int32 + Body []byte + Time int64 + Duration int32 + Size int32 +} + +type GraphQLResponseHeader struct { + ID idwrap.IDWrap + ResponseID idwrap.IDWrap + Key string + Value string +} +``` + +### GraphQL Executor (`packages/server/pkg/graphql/executor.go`) + +Analogous to `packages/server/pkg/http/request/request.go` but simpler: + +```go +func PrepareGraphQLRequest(gql mgraphql.GraphQL, headers []mgraphql.GraphQLHeader, varMap map[string]any) (*http.Request, error) +func PrepareIntrospectionRequest(url string, headers []mgraphql.GraphQLHeader, varMap map[string]any) (*http.Request, error) +``` + +Both always produce HTTP POST with `Content-Type: application/json`. The introspection variant uses the well-known introspection query string. + +--- + +## Database Schema + +### Tables + +- **`graphql`**: Core request metadata (name, url, query, variables) +- **`graphql_header`**: Request headers (key, value, enabled, order) +- **`graphql_response`**: Execution results (status, body, duration, size) +- **`graphql_response_header`**: Response headers + +No delta fields. No assertions table (can be added later). + +Schema file: `packages/db/pkg/sqlc/schema/08_graphql.sql` + +--- + +## Frontend + +### CodeMirror 6 GraphQL Integration + +- **Package**: `cm6-graphql` (official CM6 GraphQL extension from GraphiQL monorepo) +- **Features**: Syntax highlighting, schema-aware autocompletion, linting/validation +- **Location**: `packages/client/src/features/graphql-editor/index.tsx` +- **Hook**: `useGraphQLEditorExtensions(schema?: GraphQLSchema)` returns CM6 extensions + +Also adds `'graphql'` to the prettier language support in `packages/client/src/features/expression/prettier.tsx`. + +### Page Components (`packages/client/src/pages/graphql/`) + +Following the pattern of `packages/client/src/pages/http/`: + +| Component | Description | +| ------------------------------ | ------------------------------------------------------ | +| `page.tsx` | Main page with resizable request/response split panels | +| `request/top-bar.tsx` | URL input, Send button, Fetch Schema button | +| `request/panel.tsx` | Tabbed panel: Query, Variables, Headers, Docs | +| `request/query-editor.tsx` | CodeMirror with `cm6-graphql` extensions | +| `request/variables-editor.tsx` | CodeMirror with JSON language | +| `request/header.tsx` | Headers key-value table | +| `request/doc-explorer.tsx` | Schema documentation browser | +| `response/body.tsx` | Response body viewer (JSON syntax highlighting) | + +### Documentation Explorer + +Custom component (not importing GraphiQL's, which has heavy context dependencies): + +- **Navigation**: Stack-based with breadcrumbs (root -> type -> field) +- **Root view**: Lists Query, Mutation, Subscription root types +- **Type view**: Fields with types, arguments, descriptions +- **Search**: Debounced filter across type/field names +- **Type links**: Clickable references that push onto navigation stack +- **Built with**: React Aria components, Tailwind CSS, `graphql` JS library's type introspection APIs + +### Routing + +Route: `/(dashboard)/(workspace)/workspace/$workspaceIdCan/(graphql)/graphql/$graphqlIdCan/` + +Added to `packages/client/src/shared/routes.tsx` and sidebar file tree handler. + +--- + +## TypeSpec Definition + +File: `packages/spec/api/graphql.tsp` + +```typespec +using DevTools; +namespace Api.GraphQL; + +@TanStackDB.collection +model GraphQL { + @primaryKey graphqlId: Id; + name: string; + url: string; + query: string; + variables: string; + lastRunAt?: Protobuf.WellKnown.Timestamp; +} + +@TanStackDB.collection +model GraphQLHeader { + @primaryKey graphqlHeaderId: Id; + @foreignKey graphqlId: Id; + key: string; + value: string; + enabled: boolean; + description: string; + order: float32; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLResponse { + @primaryKey graphqlResponseId: Id; + @foreignKey graphqlId: Id; + status: int32; + body: string; + time: Protobuf.WellKnown.Timestamp; + duration: int32; + size: int32; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLResponseHeader { + @primaryKey graphqlResponseHeaderId: Id; + @foreignKey graphqlResponseId: Id; + key: string; + value: string; +} + +model GraphQLRunRequest { + graphqlId: Id; +} +op GraphQLRun(...GraphQLRunRequest): {}; + +model GraphQLDuplicateRequest { + graphqlId: Id; +} +op GraphQLDuplicate(...GraphQLDuplicateRequest): {}; + +model GraphQLIntrospectRequest { + graphqlId: Id; +} +model GraphQLIntrospectResponse { + sdl: string; + introspectionJson: string; +} +op GraphQLIntrospect(...GraphQLIntrospectRequest): GraphQLIntrospectResponse; +``` + +--- + +## Implementation Order + +1. TypeSpec + code generation (`graphql.tsp`, `FileKind.GraphQL`, run `spec:build`) +2. Database schema + sqlc (`08_graphql.sql`, queries, `sqlc.yaml`, run `db:generate`) +3. Go models (`mgraphql/`) +4. Go services (`sgraphql/` - reader, writer, mapper for each entity) +5. Go executor (`pkg/graphql/executor.go`) +6. Go RPC handlers (`rgraphql/` - CRUD, exec, introspect, sync) +7. Server wiring (`server.go` - streamers, services, cascade handlers) +8. Frontend packages (`cm6-graphql`, `graphql` npm deps) +9. Frontend components (pages, editor, doc explorer, routes) + +--- + +## Files Changed / Created + +### New Files + +``` +packages/spec/api/graphql.tsp +packages/db/pkg/sqlc/schema/08_graphql.sql +packages/db/pkg/sqlc/queries/graphql.sql +packages/server/pkg/model/mgraphql/mgraphql.go +packages/server/pkg/service/sgraphql/ (sgraphql.go, reader.go, writer.go, mapper.go, header*.go, response*.go) +packages/server/pkg/graphql/executor.go +packages/server/internal/api/rgraphql/ (rgraphql.go, _crud.go, _crud_header.go, _exec.go, _converter.go, _sync.go) +packages/client/src/features/graphql-editor/index.tsx +packages/client/src/pages/graphql/ (page.tsx, tab.tsx, request/*, response/*, routes/*) +``` + +### Modified Files + +``` +packages/spec/api/main.tsp (add graphql.tsp import) +packages/spec/api/file-system.tsp (add GraphQL to FileKind) +packages/db/pkg/sqlc/sqlc.yaml (add graphql column overrides) +packages/server/cmd/server/server.go (wire services, streamers, cascade) +packages/client/package.json (add cm6-graphql, graphql deps) +packages/client/src/shared/routes.tsx (add GraphQL routes) +packages/client/src/features/expression/prettier.tsx (add graphql language) +``` + +--- + +## Verification + +1. `direnv exec . pnpm nx run spec:build` succeeds +2. `direnv exec . pnpm nx run db:generate` succeeds +3. `direnv exec . pnpm nx run server:dev` starts without errors +4. `direnv exec . pnpm nx run client:dev` builds successfully +5. `direnv exec . task lint` passes +6. `direnv exec . task test` passes +7. E2E: Create GraphQL request -> enter endpoint -> write query -> fetch schema -> verify autocompletion -> send request -> verify response display -> browse docs diff --git a/packages/server/internal/api/rfile/rfile.go b/packages/server/internal/api/rfile/rfile.go index 262457828..d25c87e2e 100644 --- a/packages/server/internal/api/rfile/rfile.go +++ b/packages/server/internal/api/rfile/rfile.go @@ -140,6 +140,8 @@ func toAPIFileKind(kind mfile.ContentType) apiv1.FileKind { return apiv1.FileKind_FILE_KIND_FLOW case mfile.ContentTypeCredential: return apiv1.FileKind_FILE_KIND_CREDENTIAL + case mfile.ContentTypeGraphQL: + return apiv1.FileKind_FILE_KIND_GRAPH_Q_L default: return apiv1.FileKind_FILE_KIND_UNSPECIFIED } @@ -158,6 +160,8 @@ func fromAPIFileKind(kind apiv1.FileKind) mfile.ContentType { return mfile.ContentTypeFlow case apiv1.FileKind_FILE_KIND_CREDENTIAL: return mfile.ContentTypeCredential + case apiv1.FileKind_FILE_KIND_GRAPH_Q_L: + return mfile.ContentTypeGraphQL default: return mfile.ContentTypeUnknown } diff --git a/packages/server/internal/api/rflowv2/assertion_race_test.go b/packages/server/internal/api/rflowv2/assertion_race_test.go index 128f04c9d..bf00486bf 100644 --- a/packages/server/internal/api/rflowv2/assertion_race_test.go +++ b/packages/server/internal/api/rflowv2/assertion_race_test.go @@ -20,7 +20,9 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rhttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/dbtime" "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream/memory" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/http/resolver" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" @@ -82,6 +84,11 @@ func TestFlowRun_AssertionOrder(t *testing.T) { nodeAIService := sflow.NewNodeAIService(queries) nodeAiProviderService := sflow.NewNodeAiProviderService(queries) nodeMemoryService := sflow.NewNodeMemoryService(queries) + nodeGraphQLService := sflow.NewNodeGraphQLService(queries) + graphqlService := sgraphql.New(queries, logger) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(queries) + graphqlResolver := gqlresolver.NewStandardResolver(graphqlService.Reader(), &graphqlHeaderService, &graphqlAssertService) envService := senv.NewEnvironmentService(queries, logger) varService := senv.NewVariableService(queries, logger) @@ -123,6 +130,7 @@ func TestFlowRun_AssertionOrder(t *testing.T) { NodeAI: &nodeAIService, NodeAiProvider: &nodeAiProviderService, NodeMemory: &nodeMemoryService, + NodeGraphQL: &nodeGraphQLService, NodeExecution: &nodeExecService, FlowVariable: &flowVarService, Env: &envService, @@ -135,8 +143,9 @@ func TestFlowRun_AssertionOrder(t *testing.T) { Execution: executionStream, HttpResponseAssert: assertStream, }, - Resolver: res, - Logger: logger, + Resolver: res, + GraphQLResolver: graphqlResolver, + Logger: logger, }) // 4. Setup Data diff --git a/packages/server/internal/api/rflowv2/chaos_test.go b/packages/server/internal/api/rflowv2/chaos_test.go index 8941a08f5..56effdf9e 100644 --- a/packages/server/internal/api/rflowv2/chaos_test.go +++ b/packages/server/internal/api/rflowv2/chaos_test.go @@ -28,8 +28,10 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" ) @@ -82,6 +84,11 @@ func TestChaos_EventOrdering(t *testing.T) { nodeAIService := sflow.NewNodeAIService(queries) nodeAiProviderService := sflow.NewNodeAiProviderService(queries) nodeMemoryService := sflow.NewNodeMemoryService(queries) + nodeGraphQLService := sflow.NewNodeGraphQLService(queries) + graphqlService := sgraphql.New(queries, logger) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(queries) + graphqlResolver := gqlresolver.NewStandardResolver(graphqlService.Reader(), &graphqlHeaderService, &graphqlAssertService) envService := senv.NewEnvironmentService(queries, logger) varService := senv.NewVariableService(queries, logger) @@ -123,6 +130,7 @@ func TestChaos_EventOrdering(t *testing.T) { NodeAI: &nodeAIService, NodeAiProvider: &nodeAiProviderService, NodeMemory: &nodeMemoryService, + NodeGraphQL: &nodeGraphQLService, NodeExecution: &nodeExecService, FlowVariable: &flowVarService, Env: &envService, @@ -135,8 +143,9 @@ func TestChaos_EventOrdering(t *testing.T) { Execution: executionStream, HttpResponse: responseStream, }, - Resolver: res, - Logger: logger, + Resolver: res, + GraphQLResolver: graphqlResolver, + Logger: logger, }) // 4. Setup Data diff --git a/packages/server/internal/api/rflowv2/delta_integration_test.go b/packages/server/internal/api/rflowv2/delta_integration_test.go index 0e72b5ca8..6be0364ec 100644 --- a/packages/server/internal/api/rflowv2/delta_integration_test.go +++ b/packages/server/internal/api/rflowv2/delta_integration_test.go @@ -23,8 +23,10 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" ) @@ -83,6 +85,11 @@ func TestFlowRun_DeltaOverride(t *testing.T) { nodeAIService := sflow.NewNodeAIService(queries) nodeAiProviderService := sflow.NewNodeAiProviderService(queries) nodeMemoryService := sflow.NewNodeMemoryService(queries) + nodeGraphQLService := sflow.NewNodeGraphQLService(queries) + graphqlService := sgraphql.New(queries, logger) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(queries) + graphqlResolver := gqlresolver.NewStandardResolver(graphqlService.Reader(), &graphqlHeaderService, &graphqlAssertService) // Response services httpResponseService := shttp.NewHttpResponseService(queries) @@ -125,6 +132,7 @@ func TestFlowRun_DeltaOverride(t *testing.T) { NodeAI: &nodeAIService, NodeAiProvider: &nodeAiProviderService, NodeMemory: &nodeMemoryService, + NodeGraphQL: &nodeGraphQLService, NodeExecution: &nodeExecService, FlowVariable: &flowVarService, Env: &envService, @@ -133,8 +141,9 @@ func TestFlowRun_DeltaOverride(t *testing.T) { HttpBodyRaw: shttpBodyRawSvc, HttpResponse: httpResponseService, }, - Resolver: res, - Logger: logger, + Resolver: res, + GraphQLResolver: graphqlResolver, + Logger: logger, }) // 4. Setup Data diff --git a/packages/server/internal/api/rflowv2/execution_cache_test.go b/packages/server/internal/api/rflowv2/execution_cache_test.go index f67f0f6fb..d26c0983a 100644 --- a/packages/server/internal/api/rflowv2/execution_cache_test.go +++ b/packages/server/internal/api/rflowv2/execution_cache_test.go @@ -26,8 +26,10 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" ) @@ -76,6 +78,11 @@ func TestExecutionCache(t *testing.T) { nodeAIService := sflow.NewNodeAIService(queries) nodeAiProviderService := sflow.NewNodeAiProviderService(queries) nodeMemoryService := sflow.NewNodeMemoryService(queries) + nodeGraphQLService := sflow.NewNodeGraphQLService(queries) + graphqlService := sgraphql.New(queries, logger) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(queries) + graphqlResolver := gqlresolver.NewStandardResolver(graphqlService.Reader(), &graphqlHeaderService, &graphqlAssertService) envService := senv.NewEnvironmentService(queries, logger) varService := senv.NewVariableService(queries, logger) @@ -117,6 +124,7 @@ func TestExecutionCache(t *testing.T) { NodeAI: &nodeAIService, NodeAiProvider: &nodeAiProviderService, NodeMemory: &nodeMemoryService, + NodeGraphQL: &nodeGraphQLService, NodeExecution: &nodeExecService, FlowVariable: &flowVarService, Env: &envService, @@ -129,8 +137,9 @@ func TestExecutionCache(t *testing.T) { Execution: executionStream, HttpResponseAssert: assertStream, }, - Resolver: res, - Logger: logger, + Resolver: res, + GraphQLResolver: graphqlResolver, + Logger: logger, }) // 4. Setup Data diff --git a/packages/server/internal/api/rflowv2/js_e2e_test.go b/packages/server/internal/api/rflowv2/js_e2e_test.go index 87a355deb..689585fe0 100644 --- a/packages/server/internal/api/rflowv2/js_e2e_test.go +++ b/packages/server/internal/api/rflowv2/js_e2e_test.go @@ -25,8 +25,10 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/private/node_js_executor/v1/node_js_executorv1connect" ) @@ -124,6 +126,11 @@ func TestJSNodeExecution_E2E(t *testing.T) { nodeAIService := sflow.NewNodeAIService(queries) nodeAiProviderService := sflow.NewNodeAiProviderService(queries) nodeMemoryService := sflow.NewNodeMemoryService(queries) + nodeGraphQLService := sflow.NewNodeGraphQLService(queries) + graphqlService := sgraphql.New(queries, logger) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(queries) + graphqlResolver := gqlresolver.NewStandardResolver(graphqlService.Reader(), &graphqlHeaderService, &graphqlAssertService) // Environment and variable services envService := senv.NewEnvironmentService(queries, logger) @@ -163,6 +170,7 @@ func TestJSNodeExecution_E2E(t *testing.T) { NodeAI: &nodeAIService, NodeAiProvider: &nodeAiProviderService, NodeMemory: &nodeMemoryService, + NodeGraphQL: &nodeGraphQLService, NodeExecution: &nodeExecService, FlowVariable: &flowVarService, Env: &envService, @@ -171,9 +179,10 @@ func TestJSNodeExecution_E2E(t *testing.T) { HttpBodyRaw: shttpBodyRawSvc, HttpResponse: httpResponseService, }, - Resolver: res, - Logger: logger, - JsClient: jsClient, + Resolver: res, + GraphQLResolver: graphqlResolver, + Logger: logger, + JsClient: jsClient, }) // Setup Data diff --git a/packages/server/internal/api/rflowv2/logging_test.go b/packages/server/internal/api/rflowv2/logging_test.go index b3c9315a3..61e7520be 100644 --- a/packages/server/internal/api/rflowv2/logging_test.go +++ b/packages/server/internal/api/rflowv2/logging_test.go @@ -80,10 +80,14 @@ func TestFlowRun_Logging(t *testing.T) { nil, // NodeAIService nil, // NodeAiProviderService nil, // NodeMemoryService + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService &wsService, &varService, &flowVarService, res, + nil, // GraphQLResolver logger, nil, // LLMProviderFactory ) diff --git a/packages/server/internal/api/rflowv2/rflowv2.go b/packages/server/internal/api/rflowv2/rflowv2.go index 34958e2ee..6cd086888 100644 --- a/packages/server/internal/api/rflowv2/rflowv2.go +++ b/packages/server/internal/api/rflowv2/rflowv2.go @@ -12,10 +12,12 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/internal/api" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rfile" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rgraphql" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rhttp" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rlog" "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/flowbuilder" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/http/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" @@ -24,6 +26,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" @@ -181,6 +184,12 @@ type nodeJsWithFlow struct { baseNode *mflow.Node } +type nodeGraphQLWithFlow struct { + nodeGraphQL mflow.NodeGraphQL + flowID idwrap.IDWrap + baseNode *mflow.Node +} + // Shared event type strings for all entity types. // Using mutation.Operation.String() values for consistency. const ( @@ -269,14 +278,18 @@ type FlowServiceV2Services struct { NodeAI *sflow.NodeAIService NodeAiProvider *sflow.NodeAiProviderService NodeMemory *sflow.NodeMemoryService + NodeGraphQL *sflow.NodeGraphQLService NodeExecution *sflow.NodeExecutionService FlowVariable *sflow.FlowVariableService Env *senv.EnvironmentService Var *senv.VariableService Http *shttp.HTTPService HttpBodyRaw *shttp.HttpBodyRawService - HttpResponse shttp.HttpResponseService - File *sfile.FileService + HttpResponse shttp.HttpResponseService + GraphQLResponse sgraphql.GraphQLResponseService + GraphQL *sgraphql.GraphQLService + GraphQLHeader *sgraphql.GraphQLHeaderService + File *sfile.FileService Importer WorkspaceImporter Credential scredential.CredentialService } @@ -318,6 +331,9 @@ func (s *FlowServiceV2Services) Validate() error { if s.NodeMemory == nil { return fmt.Errorf("node memory service is required") } + if s.NodeGraphQL == nil { + return fmt.Errorf("node graphql service is required") + } if s.NodeExecution == nil { return fmt.Errorf("node execution service is required") } @@ -352,23 +368,29 @@ type FlowServiceV2Streamers struct { Ai eventstream.SyncStreamer[AiTopic, AiEvent] AiProvider eventstream.SyncStreamer[AiProviderTopic, AiProviderEvent] Memory eventstream.SyncStreamer[MemoryTopic, MemoryEvent] + NodeGraphQL eventstream.SyncStreamer[NodeGraphQLTopic, NodeGraphQLEvent] + GraphQL eventstream.SyncStreamer[rgraphql.GraphQLTopic, rgraphql.GraphQLEvent] Execution eventstream.SyncStreamer[ExecutionTopic, ExecutionEvent] - Http eventstream.SyncStreamer[rhttp.HttpTopic, rhttp.HttpEvent] - HttpResponse eventstream.SyncStreamer[rhttp.HttpResponseTopic, rhttp.HttpResponseEvent] - HttpResponseHeader eventstream.SyncStreamer[rhttp.HttpResponseHeaderTopic, rhttp.HttpResponseHeaderEvent] - HttpResponseAssert eventstream.SyncStreamer[rhttp.HttpResponseAssertTopic, rhttp.HttpResponseAssertEvent] - Log eventstream.SyncStreamer[rlog.LogTopic, rlog.LogEvent] - File eventstream.SyncStreamer[rfile.FileTopic, rfile.FileEvent] + Http eventstream.SyncStreamer[rhttp.HttpTopic, rhttp.HttpEvent] + HttpResponse eventstream.SyncStreamer[rhttp.HttpResponseTopic, rhttp.HttpResponseEvent] + HttpResponseHeader eventstream.SyncStreamer[rhttp.HttpResponseHeaderTopic, rhttp.HttpResponseHeaderEvent] + HttpResponseAssert eventstream.SyncStreamer[rhttp.HttpResponseAssertTopic, rhttp.HttpResponseAssertEvent] + GraphQLResponse eventstream.SyncStreamer[rgraphql.GraphQLResponseTopic, rgraphql.GraphQLResponseEvent] + GraphQLResponseHeader eventstream.SyncStreamer[rgraphql.GraphQLResponseHeaderTopic, rgraphql.GraphQLResponseHeaderEvent] + GraphQLResponseAssert eventstream.SyncStreamer[rgraphql.GraphQLResponseAssertTopic, rgraphql.GraphQLResponseAssertEvent] + Log eventstream.SyncStreamer[rlog.LogTopic, rlog.LogEvent] + File eventstream.SyncStreamer[rfile.FileTopic, rfile.FileEvent] } type FlowServiceV2Deps struct { - DB *sql.DB - Readers FlowServiceV2Readers - Services FlowServiceV2Services - Streamers FlowServiceV2Streamers - Resolver resolver.RequestResolver - Logger *slog.Logger - JsClient node_js_executorv1connect.NodeJsExecutorServiceClient + DB *sql.DB + Readers FlowServiceV2Readers + Services FlowServiceV2Services + Streamers FlowServiceV2Streamers + Resolver resolver.RequestResolver + GraphQLResolver gqlresolver.GraphQLResolver + Logger *slog.Logger + JsClient node_js_executorv1connect.NodeJsExecutorServiceClient } func (d *FlowServiceV2Deps) Validate() error { @@ -384,6 +406,9 @@ func (d *FlowServiceV2Deps) Validate() error { if d.Resolver == nil { return fmt.Errorf("resolver is required") } + if d.GraphQLResolver == nil { + return fmt.Errorf("graphql resolver is required") + } if d.Logger == nil { return fmt.Errorf("logger is required") } @@ -412,6 +437,9 @@ type FlowServiceV2RPC struct { nais *sflow.NodeAIService naps *sflow.NodeAiProviderService nmems *sflow.NodeMemoryService + ngqs *sflow.NodeGraphQLService + gqls *sgraphql.GraphQLService + gqlhs *sgraphql.GraphQLHeaderService nes *sflow.NodeExecutionService fvs *sflow.FlowVariableService envs *senv.EnvironmentService @@ -423,6 +451,7 @@ type FlowServiceV2RPC struct { // V2 import services workspaceImportService WorkspaceImporter httpResponseService shttp.HttpResponseService + graphqlResponseService sgraphql.GraphQLResponseService flowStream eventstream.SyncStreamer[FlowTopic, FlowEvent] nodeStream eventstream.SyncStreamer[NodeTopic, NodeEvent] edgeStream eventstream.SyncStreamer[EdgeTopic, EdgeEvent] @@ -435,12 +464,17 @@ type FlowServiceV2RPC struct { aiStream eventstream.SyncStreamer[AiTopic, AiEvent] aiProviderStream eventstream.SyncStreamer[AiProviderTopic, AiProviderEvent] memoryStream eventstream.SyncStreamer[MemoryTopic, MemoryEvent] + nodeGraphQLStream eventstream.SyncStreamer[NodeGraphQLTopic, NodeGraphQLEvent] + graphqlStream eventstream.SyncStreamer[rgraphql.GraphQLTopic, rgraphql.GraphQLEvent] executionStream eventstream.SyncStreamer[ExecutionTopic, ExecutionEvent] - httpStream eventstream.SyncStreamer[rhttp.HttpTopic, rhttp.HttpEvent] - httpResponseStream eventstream.SyncStreamer[rhttp.HttpResponseTopic, rhttp.HttpResponseEvent] - httpResponseHeaderStream eventstream.SyncStreamer[rhttp.HttpResponseHeaderTopic, rhttp.HttpResponseHeaderEvent] - httpResponseAssertStream eventstream.SyncStreamer[rhttp.HttpResponseAssertTopic, rhttp.HttpResponseAssertEvent] - logStream eventstream.SyncStreamer[rlog.LogTopic, rlog.LogEvent] + httpStream eventstream.SyncStreamer[rhttp.HttpTopic, rhttp.HttpEvent] + httpResponseStream eventstream.SyncStreamer[rhttp.HttpResponseTopic, rhttp.HttpResponseEvent] + httpResponseHeaderStream eventstream.SyncStreamer[rhttp.HttpResponseHeaderTopic, rhttp.HttpResponseHeaderEvent] + httpResponseAssertStream eventstream.SyncStreamer[rhttp.HttpResponseAssertTopic, rhttp.HttpResponseAssertEvent] + graphqlResponseStream eventstream.SyncStreamer[rgraphql.GraphQLResponseTopic, rgraphql.GraphQLResponseEvent] + graphqlResponseHeaderStream eventstream.SyncStreamer[rgraphql.GraphQLResponseHeaderTopic, rgraphql.GraphQLResponseHeaderEvent] + graphqlResponseAssertStream eventstream.SyncStreamer[rgraphql.GraphQLResponseAssertTopic, rgraphql.GraphQLResponseAssertEvent] + logStream eventstream.SyncStreamer[rlog.LogTopic, rlog.LogEvent] fileService *sfile.FileService fileStream eventstream.SyncStreamer[rfile.FileTopic, rfile.FileEvent] @@ -466,9 +500,10 @@ func New(deps FlowServiceV2Deps) *FlowServiceV2RPC { builder := flowbuilder.New( deps.Services.Node, deps.Services.NodeRequest, deps.Services.NodeFor, deps.Services.NodeForEach, deps.Services.NodeIf, deps.Services.NodeJs, deps.Services.NodeAI, - deps.Services.NodeAiProvider, deps.Services.NodeMemory, + deps.Services.NodeAiProvider, deps.Services.NodeMemory, deps.Services.NodeGraphQL, + deps.Services.GraphQL, deps.Services.GraphQLHeader, deps.Services.Workspace, deps.Services.Var, deps.Services.FlowVariable, - deps.Resolver, deps.Logger, llmFactory, + deps.Resolver, deps.GraphQLResolver, deps.Logger, llmFactory, ) return &FlowServiceV2RPC{ @@ -491,6 +526,9 @@ func New(deps FlowServiceV2Deps) *FlowServiceV2RPC { nais: deps.Services.NodeAI, naps: deps.Services.NodeAiProvider, nmems: deps.Services.NodeMemory, + ngqs: deps.Services.NodeGraphQL, + gqls: deps.Services.GraphQL, + gqlhs: deps.Services.GraphQLHeader, nes: deps.Services.NodeExecution, fvs: deps.Services.FlowVariable, envs: deps.Services.Env, @@ -501,6 +539,7 @@ func New(deps FlowServiceV2Deps) *FlowServiceV2RPC { logger: deps.Logger, workspaceImportService: deps.Services.Importer, httpResponseService: deps.Services.HttpResponse, + graphqlResponseService: deps.Services.GraphQLResponse, flowStream: deps.Streamers.Flow, nodeStream: deps.Streamers.Node, edgeStream: deps.Streamers.Edge, @@ -513,12 +552,17 @@ func New(deps FlowServiceV2Deps) *FlowServiceV2RPC { aiStream: deps.Streamers.Ai, aiProviderStream: deps.Streamers.AiProvider, memoryStream: deps.Streamers.Memory, + nodeGraphQLStream: deps.Streamers.NodeGraphQL, + graphqlStream: deps.Streamers.GraphQL, executionStream: deps.Streamers.Execution, - httpStream: deps.Streamers.Http, - httpResponseStream: deps.Streamers.HttpResponse, - httpResponseHeaderStream: deps.Streamers.HttpResponseHeader, - httpResponseAssertStream: deps.Streamers.HttpResponseAssert, - logStream: deps.Streamers.Log, + httpStream: deps.Streamers.Http, + httpResponseStream: deps.Streamers.HttpResponse, + httpResponseHeaderStream: deps.Streamers.HttpResponseHeader, + httpResponseAssertStream: deps.Streamers.HttpResponseAssert, + graphqlResponseStream: deps.Streamers.GraphQLResponse, + graphqlResponseHeaderStream: deps.Streamers.GraphQLResponseHeader, + graphqlResponseAssertStream: deps.Streamers.GraphQLResponseAssert, + logStream: deps.Streamers.Log, fileService: deps.Services.File, fileStream: deps.Streamers.File, jsClient: deps.JsClient, @@ -549,7 +593,8 @@ func (s *FlowServiceV2RPC) mutationPublisher() mutation.Publisher { jsStream: s.jsStream, aiStream: s.aiStream, aiProviderStream: s.aiProviderStream, - memoryStream: s.memoryStream, + memoryStream: s.memoryStream, + nodeGraphQLStream: s.nodeGraphQLStream, } } @@ -564,8 +609,9 @@ type rflowPublisher struct { forEachStream eventstream.SyncStreamer[ForEachTopic, ForEachEvent] jsStream eventstream.SyncStreamer[JsTopic, JsEvent] aiStream eventstream.SyncStreamer[AiTopic, AiEvent] - aiProviderStream eventstream.SyncStreamer[AiProviderTopic, AiProviderEvent] - memoryStream eventstream.SyncStreamer[MemoryTopic, MemoryEvent] + aiProviderStream eventstream.SyncStreamer[AiProviderTopic, AiProviderEvent] + memoryStream eventstream.SyncStreamer[MemoryTopic, MemoryEvent] + nodeGraphQLStream eventstream.SyncStreamer[NodeGraphQLTopic, NodeGraphQLEvent] } func (p *rflowPublisher) PublishAll(events []mutation.Event) { @@ -592,6 +638,8 @@ func (p *rflowPublisher) PublishAll(events []mutation.Event) { p.publishNodeAiProvider(evt) case mutation.EntityFlowNodeMemory: p.publishNodeMemory(evt) + case mutation.EntityFlowNodeGraphQL: + p.publishNodeGraphQL(evt) case mutation.EntityFlowEdge: p.publishEdge(evt) case mutation.EntityFlowVariable: @@ -1024,3 +1072,34 @@ func (p *rflowPublisher) publishNodeMemory(evt mutation.Event) { }) } } + +func (p *rflowPublisher) publishNodeGraphQL(evt mutation.Event) { + if p.nodeStream == nil { + return + } + + var node *flowv1.Node + var flowID idwrap.IDWrap + + switch evt.Op { + case mutation.OpInsert, mutation.OpUpdate: + if data, ok := evt.Payload.(nodeGraphQLWithFlow); ok && data.baseNode != nil { + node = serializeNode(*data.baseNode) + flowID = data.flowID + } + case mutation.OpDelete: + node = &flowv1.Node{ + NodeId: evt.ID.Bytes(), + FlowId: evt.ParentID.Bytes(), + } + flowID = evt.ParentID + } + + if node != nil { + p.nodeStream.Publish(NodeTopic{FlowID: flowID}, NodeEvent{ + Type: nodeEventUpdate, + FlowID: flowID, + Node: node, + }) + } +} diff --git a/packages/server/internal/api/rflowv2/rflowv2_common.go b/packages/server/internal/api/rflowv2/rflowv2_common.go index 4a25ab466..e28075f8d 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_common.go +++ b/packages/server/internal/api/rflowv2/rflowv2_common.go @@ -125,6 +125,16 @@ func serializeNodeAI(n mflow.NodeAI) *flowv1.NodeAi { } } +func serializeNodeGraphQL(n mflow.NodeGraphQL) *flowv1.NodeGraphQL { + msg := &flowv1.NodeGraphQL{ + NodeId: n.FlowNodeID.Bytes(), + } + if n.GraphQLID != nil && !isZeroID(*n.GraphQLID) { + msg.GraphqlId = n.GraphQLID.Bytes() + } + return msg +} + func serializeNodeExecution(execution mflow.NodeExecution) *flowv1.NodeExecution { result := &flowv1.NodeExecution{ NodeExecutionId: execution.ID.Bytes(), @@ -183,6 +193,11 @@ func serializeNodeExecution(execution mflow.NodeExecution) *flowv1.NodeExecution result.HttpResponseId = execution.ResponseID.Bytes() } + // Handle GraphQL response ID + if execution.GraphQLResponseID != nil { + result.GraphqlResponseId = execution.GraphQLResponseID.Bytes() + } + // Handle completion timestamp if execution.CompletedAt != nil { result.CompletedAt = timestamppb.New(time.Unix(*execution.CompletedAt, 0)) diff --git a/packages/server/internal/api/rflowv2/rflowv2_copy_paste.go b/packages/server/internal/api/rflowv2/rflowv2_copy_paste.go index 8adf30693..e822e1372 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_copy_paste.go +++ b/packages/server/internal/api/rflowv2/rflowv2_copy_paste.go @@ -11,12 +11,14 @@ import ( "gopkg.in/yaml.v3" devtoolsdb "github.com/the-dev-tools/dev-tools/packages/db" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rgraphql" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rhttp" "github.com/the-dev-tools/dev-tools/packages/server/internal/converter" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/ioworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/menv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" @@ -148,6 +150,24 @@ func (s *FlowServiceV2RPC) FlowNodesCopy( bundle.FlowAIMemoryNodes = append(bundle.FlowAIMemoryNodes, *d) } } + case mflow.NODE_KIND_GRAPHQL: + if s.ngqs != nil { + if d, err := s.ngqs.GetNodeGraphQL(ctx, n.ID); err == nil { + bundle.FlowGraphQLNodes = append(bundle.FlowGraphQLNodes, *d) + if d.GraphQLID != nil { + if g, err := s.gqls.Get(ctx, *d.GraphQLID); err == nil { + bundle.GraphQLRequests = append(bundle.GraphQLRequests, *g) + s.populateGraphQLBundle(ctx, g.ID, bundle) + } + if d.DeltaGraphQLID != nil { + if dg, err := s.gqls.Get(ctx, *d.DeltaGraphQLID); err == nil { + bundle.GraphQLRequests = append(bundle.GraphQLRequests, *dg) + s.populateGraphQLBundle(ctx, dg.ID, bundle) + } + } + } + } + } } } @@ -196,6 +216,13 @@ func (s *FlowServiceV2RPC) populateHTTPBundle(ctx context.Context, httpID idwrap } } +// populateGraphQLBundle fetches headers for a GraphQL request and adds them to the bundle. +func (s *FlowServiceV2RPC) populateGraphQLBundle(ctx context.Context, graphqlID idwrap.IDWrap, bundle *ioworkspace.WorkspaceBundle) { + if headers, err := s.gqlhs.GetByGraphQLID(ctx, graphqlID); err == nil { + bundle.GraphQLHeaders = append(bundle.GraphQLHeaders, headers...) + } +} + // FlowNodesPaste parses YAML from clipboard and creates nodes in the target flow. func (s *FlowServiceV2RPC) FlowNodesPaste( ctx context.Context, @@ -249,9 +276,10 @@ func (s *FlowServiceV2RPC) FlowNodesPaste( existingNames[n.Name] = true } - // For USE_EXISTING reference mode, look up existing HTTP requests by name + // For USE_EXISTING reference mode, look up existing requests by name referenceMode := req.Msg.GetReferenceMode() existingHTTPByName := make(map[string]*idwrap.IDWrap) + existingGQLByName := make(map[string]*idwrap.IDWrap) if referenceMode == flowv1.ReferenceMode_REFERENCE_MODE_USE_EXISTING { existingHTTPs, err := s.hs.GetByWorkspaceID(ctx, targetFlow.WorkspaceID) if err == nil { @@ -260,6 +288,15 @@ func (s *FlowServiceV2RPC) FlowNodesPaste( existingHTTPByName[h.Name] = &id } } + if s.gqls != nil { + existingGQLs, err := s.gqls.GetByWorkspaceID(ctx, targetFlow.WorkspaceID) + if err == nil { + for _, g := range existingGQLs { + id := g.ID + existingGQLByName[g.Name] = &id + } + } + } } // Apply offset and deduplicate names @@ -348,6 +385,11 @@ func (s *FlowServiceV2RPC) FlowNodesPaste( parsed.FlowAIMemoryNodes[i].FlowNodeID = newID } } + for i := range parsed.FlowGraphQLNodes { + if newID, ok := nodeIDMapping[parsed.FlowGraphQLNodes[i].FlowNodeID]; ok { + parsed.FlowGraphQLNodes[i].FlowNodeID = newID + } + } // Remap variable references in expression fields when node names changed if len(nameMapping) > 0 { @@ -392,6 +434,12 @@ func (s *FlowServiceV2RPC) FlowNodesPaste( for i := range parsed.HTTPAsserts { parsed.HTTPAsserts[i].Value = remapVarRefs(parsed.HTTPAsserts[i].Value, nameMapping) } + for i := range parsed.GraphQLRequests { + parsed.GraphQLRequests[i].Url = remapVarRefs(parsed.GraphQLRequests[i].Url, nameMapping) + } + for i := range parsed.GraphQLHeaders { + parsed.GraphQLHeaders[i].Value = remapVarRefs(parsed.GraphQLHeaders[i].Value, nameMapping) + } } // Remap edges @@ -520,6 +568,55 @@ func (s *FlowServiceV2RPC) FlowNodesPaste( } } + // Handle GraphQL requests — resolve references based on referenceMode + gqlIDMapping := make(map[idwrap.IDWrap]idwrap.IDWrap) // parsed GQL ID -> actual GQL ID + gqlIDsToCreate := make(map[idwrap.IDWrap]bool) // new GQL IDs that need creation + for i := range parsed.GraphQLRequests { + gqlReq := &parsed.GraphQLRequests[i] + oldID := gqlReq.ID + if referenceMode == flowv1.ReferenceMode_REFERENCE_MODE_USE_EXISTING { + if existingID, ok := existingGQLByName[gqlReq.Name]; ok { + gqlIDMapping[oldID] = *existingID + continue + } + } + // CREATE_COPY or not found: create new GraphQL request + newGQLID := idwrap.NewNow() + gqlIDMapping[oldID] = newGQLID + gqlReq.ID = newGQLID + gqlReq.WorkspaceID = targetFlow.WorkspaceID + gqlReq.IsDelta = false + gqlReq.ParentGraphQLID = nil + gqlIDsToCreate[newGQLID] = true + } + + // Update GraphQL node references + for i := range parsed.FlowGraphQLNodes { + gn := &parsed.FlowGraphQLNodes[i] + if gn.GraphQLID != nil { + if newID, ok := gqlIDMapping[*gn.GraphQLID]; ok { + gn.GraphQLID = &newID + } + } + // Clear delta reference — paste always uses resolved (base) requests + gn.DeltaGraphQLID = nil + } + + // Remap GraphQL children's GraphQLID fields and filter to only those needing creation + var gqlHeadersToCreate []mgraphql.GraphQLHeader + for i := range parsed.GraphQLHeaders { + h := &parsed.GraphQLHeaders[i] + if newID, ok := gqlIDMapping[h.GraphQLID]; ok { + h.GraphQLID = newID + h.ID = idwrap.NewNow() + h.IsDelta = false + h.ParentGraphQLHeaderID = nil + if gqlIDsToCreate[newID] { + gqlHeadersToCreate = append(gqlHeadersToCreate, *h) + } + } + } + // Begin transaction for creating all entities tx, err := s.DB.BeginTx(ctx, nil) if err != nil { @@ -583,6 +680,26 @@ func (s *FlowServiceV2RPC) FlowNodesPaste( } } + // Create GraphQL requests that need creation + if s.gqls != nil && len(gqlIDsToCreate) > 0 { + gqlWriter := s.gqls.TX(tx) + for i := range parsed.GraphQLRequests { + if gqlIDsToCreate[parsed.GraphQLRequests[i].ID] { + if err := gqlWriter.Create(ctx, &parsed.GraphQLRequests[i]); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to create GraphQL request: %w", err)) + } + } + } + } + if s.gqlhs != nil && len(gqlHeadersToCreate) > 0 { + gqlHeaderWriter := s.gqlhs.TX(tx) + for i := range gqlHeadersToCreate { + if err := gqlHeaderWriter.Create(ctx, &gqlHeadersToCreate[i]); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to create GraphQL header: %w", err)) + } + } + } + // Create nodes var createdNodeIDs [][]byte for _, n := range parsed.FlowNodes { @@ -642,6 +759,14 @@ func (s *FlowServiceV2RPC) FlowNodesPaste( } } } + if s.ngqs != nil { + for _, gn := range parsed.FlowGraphQLNodes { + ngqsWriter := sflow.NewNodeGraphQLWriter(tx) + if err := ngqsWriter.CreateNodeGraphQL(ctx, gn); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to create graphql node: %w", err)) + } + } + } // Create edges for _, e := range validEdges { @@ -680,6 +805,16 @@ func (s *FlowServiceV2RPC) FlowNodesPaste( } } + // Publish GraphQL events for newly created requests + for i := range parsed.GraphQLRequests { + if gqlIDsToCreate[parsed.GraphQLRequests[i].ID] { + s.graphqlStream.Publish(rgraphql.GraphQLTopic{WorkspaceID: targetFlow.WorkspaceID}, rgraphql.GraphQLEvent{ + Type: eventTypeInsert, + GraphQL: rgraphql.ToAPIGraphQL(parsed.GraphQLRequests[i]), + }) + } + } + return connect.NewResponse(&flowv1.FlowNodesPasteResponse{ NodeIds: createdNodeIDs, }), nil diff --git a/packages/server/internal/api/rflowv2/rflowv2_exec.go b/packages/server/internal/api/rflowv2/rflowv2_exec.go index 415e6df0e..0d9cd119c 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_exec.go +++ b/packages/server/internal/api/rflowv2/rflowv2_exec.go @@ -16,6 +16,7 @@ import ( devtoolsdb "github.com/the-dev-tools/dev-tools/packages/db" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rlog" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/ngraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nrequest" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner/flowlocalrunner" @@ -23,6 +24,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mcondition" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" logv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/log/v1" @@ -235,6 +237,86 @@ func (s *FlowServiceV2RPC) executeFlow( respDrain.Wait() }() + gqlRespChan := make(chan ngraphql.NodeGraphQLSideResp, len(nodes)*2+1) + gqlResponsePublished := make(map[string]chan struct{}) + var gqlResponsePublishedMu sync.Mutex + var gqlRespDrain sync.WaitGroup + gqlRespDrain.Add(1) + go func() { + defer gqlRespDrain.Done() + for resp := range gqlRespChan { + responseID := resp.Response.ID.String() + + gqlResponsePublishedMu.Lock() + publishedChan := make(chan struct{}) + gqlResponsePublished[responseID] = publishedChan + gqlResponsePublishedMu.Unlock() + + // Save all entities first, THEN publish events in batch + // This ensures atomicity and ordering - the client can query for + // child entities (headers/assertions) immediately after receiving + // the response event, preventing race conditions in real-time updates + + // Save GraphQL Response + responseSuccess := false + if err := s.graphqlResponseService.Create(ctx, resp.Response); err != nil { + s.logger.Error("failed to save graphql response", "error", err) + } else { + responseSuccess = true + } + + // Save Response Headers + var successHeaders []mgraphql.GraphQLResponseHeader + for _, h := range resp.RespHeaders { + if err := s.graphqlResponseService.CreateHeader(ctx, h); err != nil { + s.logger.Error("failed to save graphql response header", "error", err) + } else { + successHeaders = append(successHeaders, h) + } + } + + // Save Asserts + var successAsserts []mgraphql.GraphQLResponseAssert + for _, a := range resp.RespAsserts { + if err := s.graphqlResponseService.CreateAssert(ctx, a); err != nil { + s.logger.Error("failed to save graphql response assert", "error", err) + } else { + successAsserts = append(successAsserts, a) + } + } + + // Publish all events atomically AFTER all saves complete + // This guarantees the client receives events in the correct order: + // 1. Response (parent) + // 2. Headers (children) + // 3. Assertions (children) + if responseSuccess { + // Publish response first + s.publishGraphQLResponseEvent("insert", resp.Response, flow.WorkspaceID) + + // Then headers + for _, h := range successHeaders { + s.publishGraphQLResponseHeaderEvent("insert", h, flow.WorkspaceID) + } + + // Then assertions + for _, a := range successAsserts { + s.publishGraphQLResponseAssertEvent("insert", a, flow.WorkspaceID) + } + } + + close(publishedChan) + + if resp.Done != nil { + close(resp.Done) + } + } + }() + defer func() { + close(gqlRespChan) + gqlRespDrain.Wait() + }() + sharedHTTPClient := httpclient.New() // Filter out orphaned edges (source or target node missing) to prevent @@ -271,6 +353,7 @@ func (s *FlowServiceV2RPC) executeFlow( timeoutDuration, sharedHTTPClient, requestRespChan, + gqlRespChan, s.jsClient, ) if err != nil { @@ -402,11 +485,20 @@ func (s *FlowServiceV2RPC) executeFlow( } model := mflow.NodeExecution{ - ID: execID, - NodeID: status.NodeID, - Name: executionName, - State: status.State, - ResponseID: status.AuxiliaryID, + ID: execID, + NodeID: status.NodeID, + Name: executionName, + State: status.State, + } + + // Set the appropriate response ID based on node kind + nodeKindForAux := nodeKindMap[status.NodeID] + if status.AuxiliaryID != nil { + if nodeKindForAux == mflow.NODE_KIND_GRAPHQL { + model.GraphQLResponseID = status.AuxiliaryID + } else { + model.ResponseID = status.AuxiliaryID + } } if status.Error != nil { @@ -448,24 +540,37 @@ func (s *FlowServiceV2RPC) executeFlow( } // If this execution has a ResponseID, wait for the response to be published first - // This ensures frontend receives HttpResponse before NodeExecution + // This ensures frontend receives HttpResponse/GraphQLResponse before NodeExecution if status.AuxiliaryID != nil { respIDStr := status.AuxiliaryID.String() + + // Check HTTP response published map responsePublishedMu.Lock() publishedChan, ok := responsePublished[respIDStr] responsePublishedMu.Unlock() if ok { select { case <-publishedChan: - // Response published, safe to continue case <-ctx.Done(): - // Context cancelled, continue anyway } - // Clean up map entry to prevent memory leak responsePublishedMu.Lock() delete(responsePublished, respIDStr) responsePublishedMu.Unlock() } + + // Check GraphQL response published map + gqlResponsePublishedMu.Lock() + gqlPublishedChan, gqlOK := gqlResponsePublished[respIDStr] + gqlResponsePublishedMu.Unlock() + if gqlOK { + select { + case <-gqlPublishedChan: + case <-ctx.Done(): + } + gqlResponsePublishedMu.Lock() + delete(gqlResponsePublished, respIDStr) + gqlResponsePublishedMu.Unlock() + } } // Publish execution event @@ -701,6 +806,7 @@ func (s *FlowServiceV2RPC) createFlowVersionSnapshot( aiData *mflow.NodeAI aiProviderData *mflow.NodeAiProvider memoryData *mflow.NodeMemory + graphqlData *mflow.NodeGraphQL } nodeConfigs := make([]nodeConfig, 0, len(sourceNodes)) @@ -770,6 +876,14 @@ func (s *FlowServiceV2RPC) createFlowVersionSnapshot( } else if memoryData != nil { config.memoryData = memoryData } + + case mflow.NODE_KIND_GRAPHQL: + graphqlData, err := s.ngqs.GetNodeGraphQL(ctx, sourceNode.ID) + if err != nil { + s.logger.Warn("failed to get graphql node config, using defaults", "node_id", sourceNode.ID.String(), "error", err) + } else if graphqlData != nil { + config.graphqlData = graphqlData + } } nodeConfigs = append(nodeConfigs, config) @@ -805,6 +919,11 @@ func (s *FlowServiceV2RPC) createFlowVersionSnapshot( txService := s.nmems.TX(tx) nmemsWriter = &txService } + var ngqsWriter *sflow.NodeGraphQLService + if s.ngqs != nil { + txService := s.ngqs.TX(tx) + ngqsWriter = &txService + } edgeWriter := s.es.TX(tx) varWriter := s.fvs.TX(tx) @@ -1011,6 +1130,19 @@ func (s *FlowServiceV2RPC) createFlowVersionSnapshot( } // Memory node events are handled through nodeStream subscription } + + case mflow.NODE_KIND_GRAPHQL: + if ngqsWriter == nil { + s.logger.Warn("NodeGraphQL service not available, skipping GraphQL node config", "node_id", sourceNode.ID.String()) + } else if config.graphqlData != nil { + newGraphQLData := mflow.NodeGraphQL{ + FlowNodeID: newNodeID, + GraphQLID: config.graphqlData.GraphQLID, + } + if err := ngqsWriter.CreateNodeGraphQL(ctx, newGraphQLData); err != nil { + return mflow.Flow{}, nil, fmt.Errorf("create graphql node: %w", err) + } + } } // Collect base node event diff --git a/packages/server/internal/api/rflowv2/rflowv2_exec_test.go b/packages/server/internal/api/rflowv2/rflowv2_exec_test.go index a46c27620..9406ae293 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_exec_test.go +++ b/packages/server/internal/api/rflowv2/rflowv2_exec_test.go @@ -75,10 +75,14 @@ func setupTestService(t *testing.T) (*FlowServiceV2RPC, *gen.Queries, context.Co nil, // NodeAIService &aiProviderService, &memoryService, + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService &wsService, &varService, &flowVarService, res, + nil, // GraphQLResolver logger, nil, // LLMProviderFactory ) diff --git a/packages/server/internal/api/rflowv2/rflowv2_node_condition_test.go b/packages/server/internal/api/rflowv2/rflowv2_node_condition_test.go index b3ea048c6..9af57a738 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_node_condition_test.go +++ b/packages/server/internal/api/rflowv2/rflowv2_node_condition_test.go @@ -71,10 +71,14 @@ func TestNodeCondition_CRUD(t *testing.T) { nil, // NodeAIService nil, // NodeAiProviderService nil, // NodeMemoryService + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService &wsService, &varService, &flowVarService, res, + nil, // GraphQLResolver logger, nil, // LLMProviderFactory ) diff --git a/packages/server/internal/api/rflowv2/rflowv2_node_exec.go b/packages/server/internal/api/rflowv2/rflowv2_node_exec.go index 87aeca9f7..834659791 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_node_exec.go +++ b/packages/server/internal/api/rflowv2/rflowv2_node_exec.go @@ -10,11 +10,13 @@ import ( "connectrpc.com/connect" emptypb "google.golang.org/protobuf/types/known/emptypb" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rgraphql" "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rhttp" "github.com/the-dev-tools/dev-tools/packages/server/internal/converter" "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" ) @@ -192,6 +194,39 @@ func (s *FlowServiceV2RPC) publishHttpResponseAssertEvent(eventType string, asse }) } +func (s *FlowServiceV2RPC) publishGraphQLResponseEvent(eventType string, response mgraphql.GraphQLResponse, workspaceID idwrap.IDWrap) { + if s.graphqlResponseStream == nil { + return + } + responsePB := rgraphql.ToAPIGraphQLResponse(response) + s.graphqlResponseStream.Publish(rgraphql.GraphQLResponseTopic{WorkspaceID: workspaceID}, rgraphql.GraphQLResponseEvent{ + Type: eventType, + GraphQLResponse: responsePB, + }) +} + +func (s *FlowServiceV2RPC) publishGraphQLResponseHeaderEvent(eventType string, header mgraphql.GraphQLResponseHeader, workspaceID idwrap.IDWrap) { + if s.graphqlResponseHeaderStream == nil { + return + } + headerPB := rgraphql.ToAPIGraphQLResponseHeader(header) + s.graphqlResponseHeaderStream.Publish(rgraphql.GraphQLResponseHeaderTopic{WorkspaceID: workspaceID}, rgraphql.GraphQLResponseHeaderEvent{ + Type: eventType, + GraphQLResponseHeader: headerPB, + }) +} + +func (s *FlowServiceV2RPC) publishGraphQLResponseAssertEvent(eventType string, assert mgraphql.GraphQLResponseAssert, workspaceID idwrap.IDWrap) { + if s.graphqlResponseAssertStream == nil { + return + } + assertPB := rgraphql.ToAPIGraphQLResponseAssert(assert) + s.graphqlResponseAssertStream.Publish(rgraphql.GraphQLResponseAssertTopic{WorkspaceID: workspaceID}, rgraphql.GraphQLResponseAssertEvent{ + Type: eventType, + GraphQLResponseAssert: assertPB, + }) +} + func (s *FlowServiceV2RPC) executionEventToSyncResponse( ctx context.Context, evt ExecutionEvent, @@ -224,6 +259,9 @@ func (s *FlowServiceV2RPC) executionEventToSyncResponse( if evt.Execution.HttpResponseId != nil { upsert.HttpResponseId = evt.Execution.HttpResponseId } + if evt.Execution.GraphqlResponseId != nil { + upsert.GraphqlResponseId = evt.Execution.GraphqlResponseId + } if evt.Execution.CompletedAt != nil { upsert.CompletedAt = evt.Execution.CompletedAt } diff --git a/packages/server/internal/api/rflowv2/rflowv2_node_exec_test.go b/packages/server/internal/api/rflowv2/rflowv2_node_exec_test.go index b1b64869c..d75541308 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_node_exec_test.go +++ b/packages/server/internal/api/rflowv2/rflowv2_node_exec_test.go @@ -71,10 +71,14 @@ func TestNodeExecution_Collection(t *testing.T) { nil, // NodeAIService nil, // NodeAiProviderService nil, // NodeMemoryService + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService &wsService, &varService, &flowVarService, res, + nil, // GraphQLResolver logger, nil, // LLMProviderFactory ) @@ -218,10 +222,14 @@ func TestNodeExecution_Collection_VersionFlow(t *testing.T) { nil, // NodeAIService nil, // NodeAiProviderService nil, // NodeMemoryService + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService &wsService, &varService, &flowVarService, res, + nil, // GraphQLResolver logger, nil, // LLMProviderFactory ) diff --git a/packages/server/internal/api/rflowv2/rflowv2_node_graphql.go b/packages/server/internal/api/rflowv2/rflowv2_node_graphql.go new file mode 100644 index 000000000..835a47e48 --- /dev/null +++ b/packages/server/internal/api/rflowv2/rflowv2_node_graphql.go @@ -0,0 +1,446 @@ +//nolint:revive // exported +package rflowv2 + +import ( + "context" + "database/sql" + "errors" + "fmt" + "sync" + + "connectrpc.com/connect" + emptypb "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" +) + +// NodeGraphQLTopic identifies the flow whose GraphQL nodes are being published. +type NodeGraphQLTopic struct { + FlowID idwrap.IDWrap +} + +// NodeGraphQLEvent describes a GraphQL node change for sync streaming. +type NodeGraphQLEvent struct { + Type string + FlowID idwrap.IDWrap + Node *flowv1.NodeGraphQL +} + +func (s *FlowServiceV2RPC) NodeGraphQLCollection( + ctx context.Context, + _ *connect.Request[emptypb.Empty], +) (*connect.Response[flowv1.NodeGraphQLCollectionResponse], error) { + flows, err := s.listAccessibleFlows(ctx) + if err != nil { + return nil, err + } + + var items []*flowv1.NodeGraphQL + for _, flow := range flows { + nodes, err := s.nsReader.GetNodesByFlowID(ctx, flow.ID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, node := range nodes { + if node.NodeKind != mflow.NODE_KIND_GRAPHQL { + continue + } + nodeGQL, err := s.ngqs.GetNodeGraphQL(ctx, node.ID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + continue + } + return nil, connect.NewError(connect.CodeInternal, err) + } + items = append(items, serializeNodeGraphQL(*nodeGQL)) + } + } + + return connect.NewResponse(&flowv1.NodeGraphQLCollectionResponse{Items: items}), nil +} + +func (s *FlowServiceV2RPC) NodeGraphQLInsert( + ctx context.Context, + req *connect.Request[flowv1.NodeGraphQLInsertRequest], +) (*connect.Response[emptypb.Empty], error) { + type insertData struct { + nodeID idwrap.IDWrap + graphqlID *idwrap.IDWrap + baseNode *mflow.Node + flowID idwrap.IDWrap + workspaceID idwrap.IDWrap + } + var validatedItems []insertData + + for _, item := range req.Msg.GetItems() { + nodeID, err := idwrap.NewFromBytes(item.GetNodeId()) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("invalid node id: %w", err)) + } + + var graphqlID *idwrap.IDWrap + if len(item.GetGraphqlId()) > 0 { + parsedID, err := idwrap.NewFromBytes(item.GetGraphqlId()) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("invalid graphql id: %w", err)) + } + if !isZeroID(parsedID) { + graphqlID = &parsedID + } + } + + // CRITICAL FIX: Get base node BEFORE transaction to avoid SQLite deadlock + // Allow nil baseNode to support out-of-order message arrival + baseNode, _ := s.ns.GetNode(ctx, nodeID) + + var flowID idwrap.IDWrap + var workspaceID idwrap.IDWrap + if baseNode != nil { + flowID = baseNode.FlowID + flow, err := s.fsReader.GetFlow(ctx, flowID) + if err == nil { + workspaceID = flow.WorkspaceID + } + } + + validatedItems = append(validatedItems, insertData{ + nodeID: nodeID, + graphqlID: graphqlID, + baseNode: baseNode, + flowID: flowID, + workspaceID: workspaceID, + }) + } + + if len(validatedItems) == 0 { + return connect.NewResponse(&emptypb.Empty{}), nil + } + + // Begin transaction with mutation context + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + ngqsWriter := s.ngqs.TX(mut.TX()) + + for _, data := range validatedItems { + nodeGraphQL := mflow.NodeGraphQL{ + FlowNodeID: data.nodeID, + GraphQLID: data.graphqlID, + } + + if err := ngqsWriter.CreateNodeGraphQL(ctx, nodeGraphQL); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Only track for event publishing if base node exists + if data.baseNode != nil { + mut.Track(mutation.Event{ + Entity: mutation.EntityFlowNodeGraphQL, + Op: mutation.OpInsert, + ID: data.nodeID, + WorkspaceID: data.workspaceID, + ParentID: data.flowID, + Payload: nodeGraphQLWithFlow{ + nodeGraphQL: nodeGraphQL, + flowID: data.flowID, + baseNode: data.baseNode, + }, + }) + } + } + + // Commit transaction (auto-publishes events) + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *FlowServiceV2RPC) NodeGraphQLUpdate( + ctx context.Context, + req *connect.Request[flowv1.NodeGraphQLUpdateRequest], +) (*connect.Response[emptypb.Empty], error) { + type updateData struct { + nodeID idwrap.IDWrap + graphqlID *idwrap.IDWrap + baseNode *mflow.Node + workspaceID idwrap.IDWrap + } + var validatedItems []updateData + + for _, item := range req.Msg.GetItems() { + nodeID, err := idwrap.NewFromBytes(item.GetNodeId()) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("invalid node id: %w", err)) + } + + nodeModel, err := s.ensureNodeAccess(ctx, nodeID) + if err != nil { + return nil, err + } + + // Get workspace ID for the flow + flow, err := s.fsReader.GetFlow(ctx, nodeModel.FlowID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var graphqlID *idwrap.IDWrap + if graphqlBytes := item.GetGraphqlId(); len(graphqlBytes) > 0 { + parsedID, err := idwrap.NewFromBytes(graphqlBytes) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("invalid graphql id: %w", err)) + } + if !isZeroID(parsedID) { + graphqlID = &parsedID + } + } + + validatedItems = append(validatedItems, updateData{ + nodeID: nodeID, + graphqlID: graphqlID, + baseNode: nodeModel, + workspaceID: flow.WorkspaceID, + }) + } + + if len(validatedItems) == 0 { + return connect.NewResponse(&emptypb.Empty{}), nil + } + + // Begin transaction with mutation context + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + ngqsWriter := s.ngqs.TX(mut.TX()) + + for _, data := range validatedItems { + nodeGraphQL := mflow.NodeGraphQL{ + FlowNodeID: data.nodeID, + GraphQLID: data.graphqlID, + } + + if err := ngqsWriter.UpdateNodeGraphQL(ctx, nodeGraphQL); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + mut.Track(mutation.Event{ + Entity: mutation.EntityFlowNodeGraphQL, + Op: mutation.OpUpdate, + ID: data.nodeID, + WorkspaceID: data.workspaceID, + ParentID: data.baseNode.FlowID, + Payload: nodeGraphQLWithFlow{ + nodeGraphQL: nodeGraphQL, + flowID: data.baseNode.FlowID, + baseNode: data.baseNode, + }, + }) + } + + // Commit transaction (auto-publishes events) + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *FlowServiceV2RPC) NodeGraphQLDelete( + ctx context.Context, + req *connect.Request[flowv1.NodeGraphQLDeleteRequest], +) (*connect.Response[emptypb.Empty], error) { + type deleteData struct { + nodeID idwrap.IDWrap + flowID idwrap.IDWrap + } + var validatedItems []deleteData + + for _, item := range req.Msg.GetItems() { + nodeID, err := idwrap.NewFromBytes(item.GetNodeId()) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("invalid node id: %w", err)) + } + + nodeModel, err := s.ensureNodeAccess(ctx, nodeID) + if err != nil { + return nil, err + } + + validatedItems = append(validatedItems, deleteData{ + nodeID: nodeID, + flowID: nodeModel.FlowID, + }) + } + + if len(validatedItems) == 0 { + return connect.NewResponse(&emptypb.Empty{}), nil + } + + // Begin transaction with mutation context + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + for _, data := range validatedItems { + mut.Track(mutation.Event{ + Entity: mutation.EntityFlowNodeGraphQL, + Op: mutation.OpDelete, + ID: data.nodeID, + ParentID: data.flowID, + }) + if err := mut.Queries().DeleteFlowNodeGraphQL(ctx, data.nodeID); err != nil && !errors.Is(err, sql.ErrNoRows) { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + // Commit transaction (auto-publishes events) + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *FlowServiceV2RPC) NodeGraphQLSync( + ctx context.Context, + _ *connect.Request[emptypb.Empty], + stream *connect.ServerStream[flowv1.NodeGraphQLSyncResponse], +) error { + if stream == nil { + return connect.NewError(connect.CodeInternal, errors.New("stream is required")) + } + return s.streamNodeGraphQLSync(ctx, func(resp *flowv1.NodeGraphQLSyncResponse) error { + return stream.Send(resp) + }) +} + +func (s *FlowServiceV2RPC) streamNodeGraphQLSync( + ctx context.Context, + send func(*flowv1.NodeGraphQLSyncResponse) error, +) error { + if s.nodeStream == nil { + return connect.NewError(connect.CodeUnavailable, errors.New("node stream not configured")) + } + + var flowSet sync.Map + + filter := func(topic NodeTopic) bool { + if _, ok := flowSet.Load(topic.FlowID.String()); ok { + return true + } + if err := s.ensureFlowAccess(ctx, topic.FlowID); err != nil { + return false + } + flowSet.Store(topic.FlowID.String(), struct{}{}) + return true + } + + events, err := s.nodeStream.Subscribe(ctx, filter) + if err != nil { + return connect.NewError(connect.CodeInternal, err) + } + + for { + select { + case evt, ok := <-events: + if !ok { + return nil + } + resp, err := s.nodeGraphQLEventToSyncResponse(ctx, evt.Payload) + if err != nil { + return connect.NewError(connect.CodeInternal, fmt.Errorf("failed to convert GraphQL node event: %w", err)) + } + if resp == nil { + continue + } + if err := send(resp); err != nil { + return err + } + case <-ctx.Done(): + return ctx.Err() + } + } +} + +func (s *FlowServiceV2RPC) nodeGraphQLEventToSyncResponse( + ctx context.Context, + evt NodeEvent, +) (*flowv1.NodeGraphQLSyncResponse, error) { + if evt.Node == nil { + return nil, nil + } + + // Only process GraphQL nodes + if evt.Node.GetKind() != flowv1.NodeKind_NODE_KIND_GRAPH_Q_L { + return nil, nil + } + + nodeID, err := idwrap.NewFromBytes(evt.Node.GetNodeId()) + if err != nil { + return nil, fmt.Errorf("invalid node id: %w", err) + } + + // Fetch the GraphQL configuration for this node (may not exist) + nodeGQL, err := s.ngqs.GetNodeGraphQL(ctx, nodeID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return nil, err + } + + var syncEvent *flowv1.NodeGraphQLSync + switch evt.Type { + case nodeEventInsert: + insert := &flowv1.NodeGraphQLSyncInsert{ + NodeId: nodeID.Bytes(), + } + if nodeGQL != nil && nodeGQL.GraphQLID != nil && !isZeroID(*nodeGQL.GraphQLID) { + insert.GraphqlId = nodeGQL.GraphQLID.Bytes() + } + syncEvent = &flowv1.NodeGraphQLSync{ + Value: &flowv1.NodeGraphQLSync_ValueUnion{ + Kind: flowv1.NodeGraphQLSync_ValueUnion_KIND_INSERT, + Insert: insert, + }, + } + case nodeEventUpdate: + update := &flowv1.NodeGraphQLSyncUpdate{ + NodeId: nodeID.Bytes(), + } + if nodeGQL != nil && nodeGQL.GraphQLID != nil && !isZeroID(*nodeGQL.GraphQLID) { + update.GraphqlId = nodeGQL.GraphQLID.Bytes() + } + syncEvent = &flowv1.NodeGraphQLSync{ + Value: &flowv1.NodeGraphQLSync_ValueUnion{ + Kind: flowv1.NodeGraphQLSync_ValueUnion_KIND_UPDATE, + Update: update, + }, + } + case nodeEventDelete: + syncEvent = &flowv1.NodeGraphQLSync{ + Value: &flowv1.NodeGraphQLSync_ValueUnion{ + Kind: flowv1.NodeGraphQLSync_ValueUnion_KIND_DELETE, + Delete: &flowv1.NodeGraphQLSyncDelete{ + NodeId: nodeID.Bytes(), + }, + }, + } + default: + return nil, nil + } + + return &flowv1.NodeGraphQLSyncResponse{ + Items: []*flowv1.NodeGraphQLSync{syncEvent}, + }, nil +} diff --git a/packages/server/internal/api/rflowv2/rflowv2_parity_test.go b/packages/server/internal/api/rflowv2/rflowv2_parity_test.go index fd4af6227..44b73cae6 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_parity_test.go +++ b/packages/server/internal/api/rflowv2/rflowv2_parity_test.go @@ -22,8 +22,10 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/testutil" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" ) @@ -59,6 +61,11 @@ func TestFlowInsert_FlowParity(t *testing.T) { envService := senv.NewEnvironmentService(queries, logger) varService := senv.NewVariableService(queries, logger) httpService := shttp.New(queries, logger) + nodeGraphQLService := sflow.NewNodeGraphQLService(queries) + graphqlService := sgraphql.New(queries, logger) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(queries) + graphqlResolver := gqlresolver.NewStandardResolver(graphqlService.Reader(), &graphqlHeaderService, &graphqlAssertService) flowStream = memory.NewInMemorySyncStreamer[FlowTopic, FlowEvent]() nodeStream := memory.NewInMemorySyncStreamer[NodeTopic, NodeEvent]() @@ -87,6 +94,7 @@ func TestFlowInsert_FlowParity(t *testing.T) { NodeAI: &aiService, NodeAiProvider: &aiProviderService, NodeMemory: &memoryService, + NodeGraphQL: &nodeGraphQLService, NodeExecution: &nodeExecService, FlowVariable: &flowVarService, Env: &envService, @@ -98,8 +106,9 @@ func TestFlowInsert_FlowParity(t *testing.T) { Flow: flowStream, Node: nodeStream, }, - Resolver: res, - Logger: logger, + Resolver: res, + GraphQLResolver: graphqlResolver, + Logger: logger, }) userID := idwrap.NewNow() @@ -208,6 +217,11 @@ func TestFlowInsert_StartNodeParity(t *testing.T) { envService := senv.NewEnvironmentService(queries, logger) varService := senv.NewVariableService(queries, logger) httpService := shttp.New(queries, logger) + nodeGraphQLService2 := sflow.NewNodeGraphQLService(queries) + graphqlService2 := sgraphql.New(queries, logger) + graphqlHeaderService2 := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService2 := sgraphql.NewGraphQLAssertService(queries) + graphqlResolver2 := gqlresolver.NewStandardResolver(graphqlService2.Reader(), &graphqlHeaderService2, &graphqlAssertService2) nodeStream = memory.NewInMemorySyncStreamer[NodeTopic, NodeEvent]() flowStream := memory.NewInMemorySyncStreamer[FlowTopic, FlowEvent]() @@ -236,6 +250,7 @@ func TestFlowInsert_StartNodeParity(t *testing.T) { NodeAI: &aiService, NodeAiProvider: &aiProviderService, NodeMemory: &memoryService, + NodeGraphQL: &nodeGraphQLService2, NodeExecution: &nodeExecService, FlowVariable: &flowVarService, Env: &envService, @@ -247,8 +262,9 @@ func TestFlowInsert_StartNodeParity(t *testing.T) { Flow: flowStream, Node: nodeStream, }, - Resolver: res, - Logger: logger, + Resolver: res, + GraphQLResolver: graphqlResolver2, + Logger: logger, }) userID := idwrap.NewNow() diff --git a/packages/server/internal/api/rflowv2/rflowv2_test.go b/packages/server/internal/api/rflowv2/rflowv2_test.go index 6f109061f..e110e38ec 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_test.go +++ b/packages/server/internal/api/rflowv2/rflowv2_test.go @@ -23,8 +23,10 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" ) @@ -182,6 +184,11 @@ func TestSubNodeInsert_WithoutBaseNode(t *testing.T) { // Mock resolver res := resolver.NewStandardResolver(nil, nil, nil, nil, nil, nil, nil) httpService := shttp.New(queries, logger) + nodeGraphQLService := sflow.NewNodeGraphQLService(queries) + graphqlService := sgraphql.New(queries, logger) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(queries) + graphqlResolver := gqlresolver.NewStandardResolver(graphqlService.Reader(), &graphqlHeaderService, &graphqlAssertService) svc := New(FlowServiceV2Deps{ DB: db, @@ -206,6 +213,7 @@ func TestSubNodeInsert_WithoutBaseNode(t *testing.T) { NodeAI: &aiService, NodeAiProvider: &aiProviderService, NodeMemory: &memoryService, + NodeGraphQL: &nodeGraphQLService, NodeExecution: &nodeExecService, FlowVariable: &flowVarService, Env: &envService, @@ -213,8 +221,9 @@ func TestSubNodeInsert_WithoutBaseNode(t *testing.T) { Http: &httpService, HttpBodyRaw: shttp.NewHttpBodyRawService(queries), }, - Resolver: res, - Logger: logger, + Resolver: res, + GraphQLResolver: graphqlResolver, + Logger: logger, }) userID := idwrap.NewNow() @@ -329,6 +338,11 @@ func TestFlowRun_CreatesVersionOnEveryRun(t *testing.T) { // Mock resolver res := resolver.NewStandardResolver(nil, nil, nil, nil, nil, nil, nil) httpService := shttp.New(queries, logger) + nodeGraphQLService := sflow.NewNodeGraphQLService(queries) + graphqlService := sgraphql.New(queries, logger) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(queries) + graphqlResolver := gqlresolver.NewStandardResolver(graphqlService.Reader(), &graphqlHeaderService, &graphqlAssertService) svc := New(FlowServiceV2Deps{ DB: db, @@ -353,6 +367,7 @@ func TestFlowRun_CreatesVersionOnEveryRun(t *testing.T) { NodeAI: &aiService, NodeAiProvider: &aiProviderService, NodeMemory: &memoryService, + NodeGraphQL: &nodeGraphQLService, NodeExecution: &nodeExecService, FlowVariable: &flowVarService, Env: &envService, @@ -360,8 +375,9 @@ func TestFlowRun_CreatesVersionOnEveryRun(t *testing.T) { Http: &httpService, HttpBodyRaw: shttp.NewHttpBodyRawService(queries), }, - Resolver: res, - Logger: logger, + Resolver: res, + GraphQLResolver: graphqlResolver, + Logger: logger, }) // Setup Data @@ -482,6 +498,11 @@ func TestFlowVersionNodes_HaveStateAndExecutions(t *testing.T) { // Mock resolver res := resolver.NewStandardResolver(nil, nil, nil, nil, nil, nil, nil) httpService := shttp.New(queries, logger) + nodeGraphQLService := sflow.NewNodeGraphQLService(queries) + graphqlService := sgraphql.New(queries, logger) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(queries) + graphqlResolver := gqlresolver.NewStandardResolver(graphqlService.Reader(), &graphqlHeaderService, &graphqlAssertService) svc := New(FlowServiceV2Deps{ DB: db, @@ -506,6 +527,7 @@ func TestFlowVersionNodes_HaveStateAndExecutions(t *testing.T) { NodeAI: &aiService, NodeAiProvider: &aiProviderService, NodeMemory: &memoryService, + NodeGraphQL: &nodeGraphQLService, NodeExecution: &nodeExecService, FlowVariable: &flowVarService, Env: &envService, @@ -513,8 +535,9 @@ func TestFlowVersionNodes_HaveStateAndExecutions(t *testing.T) { Http: &httpService, HttpBodyRaw: shttp.NewHttpBodyRawService(queries), }, - Resolver: res, - Logger: logger, + Resolver: res, + GraphQLResolver: graphqlResolver, + Logger: logger, }) // Setup Data diff --git a/packages/server/internal/api/rflowv2/rflowv2_testutil_test.go b/packages/server/internal/api/rflowv2/rflowv2_testutil_test.go index 0aaa7bb58..a644cb5f9 100644 --- a/packages/server/internal/api/rflowv2/rflowv2_testutil_test.go +++ b/packages/server/internal/api/rflowv2/rflowv2_testutil_test.go @@ -91,10 +91,14 @@ func NewRFlowTestContext(t *testing.T) *RFlowTestContext { nil, // NodeAIService - not needed for non-AI tests nil, // NodeAiProviderService - not needed for non-AI tests nil, // NodeMemoryService - not needed for non-AI tests + nil, // NodeGraphQLService - not needed for non-GraphQL tests + nil, // GraphQLService - not needed for non-GraphQL tests + nil, // GraphQLHeaderService - not needed for non-GraphQL tests &wsService, &varService, &flowVarService, res, + nil, // GraphQLResolver - not needed for non-GraphQL tests logger, nil, // LLMProviderFactory - not needed for non-AI tests ) diff --git a/packages/server/internal/api/rgraphql/rgraphql.go b/packages/server/internal/api/rgraphql/rgraphql.go new file mode 100644 index 000000000..4de3ef65c --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql.go @@ -0,0 +1,554 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "database/sql" + "errors" + "fmt" + "sync" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rfile" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/delta" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sfile" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/suser" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" + "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1/graph_q_lv1connect" +) + +const ( + eventTypeInsert = "insert" + eventTypeUpdate = "update" + eventTypeDelete = "delete" +) + +// Topic/Event types for each entity + +type GraphQLTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLEvent struct { + Type string + GraphQL *graphqlv1.GraphQL +} + +type GraphQLHeaderTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLHeaderEvent struct { + Type string + GraphQLHeader *graphqlv1.GraphQLHeader +} + +type GraphQLResponseTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLResponseEvent struct { + Type string + GraphQLResponse *graphqlv1.GraphQLResponse +} + +type GraphQLResponseHeaderTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLResponseHeaderEvent struct { + Type string + GraphQLResponseHeader *graphqlv1.GraphQLResponseHeader +} + +type GraphQLResponseAssertTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLResponseAssertEvent struct { + Type string + GraphQLResponseAssert *graphqlv1.GraphQLResponseAssert +} + +type GraphQLVersionTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLVersionEvent struct { + Type string + GraphQLVersion *graphqlv1.GraphQLVersion +} + +type GraphQLAssertTopic struct { + WorkspaceID idwrap.IDWrap +} + +type GraphQLAssertEvent struct { + Type string + GraphQLAssert *graphqlv1.GraphQLAssert + IsDelta bool +} + +// GraphQLStreamers groups all event streams +type GraphQLStreamers struct { + GraphQL eventstream.SyncStreamer[GraphQLTopic, GraphQLEvent] + GraphQLHeader eventstream.SyncStreamer[GraphQLHeaderTopic, GraphQLHeaderEvent] + GraphQLAssert eventstream.SyncStreamer[GraphQLAssertTopic, GraphQLAssertEvent] + GraphQLResponse eventstream.SyncStreamer[GraphQLResponseTopic, GraphQLResponseEvent] + GraphQLResponseHeader eventstream.SyncStreamer[GraphQLResponseHeaderTopic, GraphQLResponseHeaderEvent] + GraphQLResponseAssert eventstream.SyncStreamer[GraphQLResponseAssertTopic, GraphQLResponseAssertEvent] + GraphQLVersion eventstream.SyncStreamer[GraphQLVersionTopic, GraphQLVersionEvent] + File eventstream.SyncStreamer[rfile.FileTopic, rfile.FileEvent] +} + +// GraphQLServiceRPC handles GraphQL RPC operations +type GraphQLServiceRPC struct { + DB *sql.DB + + graphqlReader *sgraphql.Reader + graphqlService sgraphql.GraphQLService + headerService sgraphql.GraphQLHeaderService + graphqlAssertService sgraphql.GraphQLAssertService + responseService sgraphql.GraphQLResponseService + resolver GraphQLResolver + + us suser.UserService + ws sworkspace.WorkspaceService + wus sworkspace.UserService + userReader *sworkspace.UserReader + wsReader *sworkspace.WorkspaceReader + + es senv.EnvService + vs senv.VariableService + + fileService *sfile.FileService + streamers *GraphQLStreamers +} + +// GraphQLResolver defines the interface for resolving GraphQL delta requests +type GraphQLResolver interface { + Resolve(ctx context.Context, baseID idwrap.IDWrap, deltaID *idwrap.IDWrap) (*delta.ResolveGraphQLOutput, error) +} + +type GraphQLServiceRPCDeps struct { + DB *sql.DB + Services GraphQLServiceRPCServices + Readers GraphQLServiceRPCReaders + Resolver GraphQLResolver + Streamers *GraphQLStreamers +} + +type GraphQLServiceRPCServices struct { + GraphQL sgraphql.GraphQLService + Header sgraphql.GraphQLHeaderService + GraphQLAssert sgraphql.GraphQLAssertService + Response sgraphql.GraphQLResponseService + User suser.UserService + Workspace sworkspace.WorkspaceService + WorkspaceUser sworkspace.UserService + Env senv.EnvService + Variable senv.VariableService + File *sfile.FileService +} + +type GraphQLServiceRPCReaders struct { + GraphQL *sgraphql.Reader + User *sworkspace.UserReader + Workspace *sworkspace.WorkspaceReader +} + +func (d *GraphQLServiceRPCDeps) Validate() error { + if d.DB == nil { + return fmt.Errorf("db is required") + } + if d.Streamers == nil { + return fmt.Errorf("streamers is required") + } + return nil +} + +func New(deps GraphQLServiceRPCDeps) GraphQLServiceRPC { + if err := deps.Validate(); err != nil { + panic(fmt.Sprintf("GraphQLServiceRPC Deps validation failed: %v", err)) + } + + return GraphQLServiceRPC{ + DB: deps.DB, + graphqlReader: deps.Readers.GraphQL, + graphqlService: deps.Services.GraphQL, + headerService: deps.Services.Header, + graphqlAssertService: deps.Services.GraphQLAssert, + responseService: deps.Services.Response, + resolver: deps.Resolver, + us: deps.Services.User, + ws: deps.Services.Workspace, + wus: deps.Services.WorkspaceUser, + userReader: deps.Readers.User, + wsReader: deps.Readers.Workspace, + es: deps.Services.Env, + vs: deps.Services.Variable, + fileService: deps.Services.File, + streamers: deps.Streamers, + } +} + +func CreateService(srv GraphQLServiceRPC, options []connect.HandlerOption) (*api.Service, error) { + path, handler := graph_q_lv1connect.NewGraphQLServiceHandler(&srv, options...) + return &api.Service{Path: path, Handler: handler}, nil +} + +// Access control helpers + +func (s *GraphQLServiceRPC) checkWorkspaceReadAccess(ctx context.Context, workspaceID idwrap.IDWrap) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + wsUser, err := s.userReader.GetWorkspaceUsersByWorkspaceIDAndUserID(ctx, workspaceID, userID) + if err != nil { + if errors.Is(err, sworkspace.ErrWorkspaceUserNotFound) { + return connect.NewError(connect.CodeNotFound, errors.New("workspace not found or access denied")) + } + return connect.NewError(connect.CodeInternal, err) + } + + if wsUser.Role < mworkspace.RoleUser { + return connect.NewError(connect.CodePermissionDenied, errors.New("permission denied")) + } + return nil +} + +func (s *GraphQLServiceRPC) checkWorkspaceWriteAccess(ctx context.Context, workspaceID idwrap.IDWrap) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + wsUser, err := s.userReader.GetWorkspaceUsersByWorkspaceIDAndUserID(ctx, workspaceID, userID) + if err != nil { + if errors.Is(err, sworkspace.ErrWorkspaceUserNotFound) { + return connect.NewError(connect.CodeNotFound, errors.New("workspace not found or access denied")) + } + return connect.NewError(connect.CodeInternal, err) + } + + if wsUser.Role < mworkspace.RoleAdmin { + return connect.NewError(connect.CodePermissionDenied, errors.New("permission denied")) + } + return nil +} + +func (s *GraphQLServiceRPC) checkWorkspaceDeleteAccess(ctx context.Context, workspaceID idwrap.IDWrap) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + wsUser, err := s.userReader.GetWorkspaceUsersByWorkspaceIDAndUserID(ctx, workspaceID, userID) + if err != nil { + if errors.Is(err, sworkspace.ErrWorkspaceUserNotFound) { + return connect.NewError(connect.CodeNotFound, errors.New("workspace not found or access denied")) + } + return connect.NewError(connect.CodeInternal, err) + } + + if wsUser.Role != mworkspace.RoleOwner { + return connect.NewError(connect.CodePermissionDenied, errors.New("permission denied")) + } + return nil +} + +// Mutation publisher for auto-publish on commit + +func (s *GraphQLServiceRPC) mutationPublisher() mutation.Publisher { + return &rgraphqlPublisher{streamers: s.streamers} +} + +type rgraphqlPublisher struct { + streamers *GraphQLStreamers +} + +func (p *rgraphqlPublisher) PublishAll(events []mutation.Event) { + for _, evt := range events { + //nolint:exhaustive + switch evt.Entity { + case mutation.EntityGraphQL: + p.publishGraphQL(evt) + case mutation.EntityGraphQLHeader: + p.publishGraphQLHeader(evt) + case mutation.EntityGraphQLAssert: + p.publishGraphQLAssert(evt) + } + } +} + +func (p *rgraphqlPublisher) publishGraphQL(evt mutation.Event) { + if p.streamers.GraphQL == nil { + return + } + var model *graphqlv1.GraphQL + var eventType string + + switch evt.Op { + case mutation.OpInsert, mutation.OpUpdate: + if evt.Op == mutation.OpInsert { + eventType = eventTypeInsert + } else { + eventType = eventTypeUpdate + } + if g, ok := evt.Payload.(mgraphql.GraphQL); ok { + model = ToAPIGraphQL(g) + } else if gp, ok := evt.Payload.(*mgraphql.GraphQL); ok { + model = ToAPIGraphQL(*gp) + } + case mutation.OpDelete: + eventType = eventTypeDelete + model = &graphqlv1.GraphQL{GraphqlId: evt.ID.Bytes()} + } + + if model != nil { + p.streamers.GraphQL.Publish(GraphQLTopic{WorkspaceID: evt.WorkspaceID}, GraphQLEvent{ + Type: eventType, + GraphQL: model, + }) + } +} + +func (p *rgraphqlPublisher) publishGraphQLHeader(evt mutation.Event) { + if p.streamers.GraphQLHeader == nil { + return + } + var model *graphqlv1.GraphQLHeader + var eventType string + + switch evt.Op { + case mutation.OpInsert, mutation.OpUpdate: + if evt.Op == mutation.OpInsert { + eventType = eventTypeInsert + } else { + eventType = eventTypeUpdate + } + if h, ok := evt.Payload.(mgraphql.GraphQLHeader); ok { + model = ToAPIGraphQLHeader(h) + } else if hp, ok := evt.Payload.(*mgraphql.GraphQLHeader); ok { + model = ToAPIGraphQLHeader(*hp) + } + case mutation.OpDelete: + eventType = eventTypeDelete + model = &graphqlv1.GraphQLHeader{GraphqlHeaderId: evt.ID.Bytes(), GraphqlId: evt.ParentID.Bytes()} + } + + if model != nil { + p.streamers.GraphQLHeader.Publish(GraphQLHeaderTopic{WorkspaceID: evt.WorkspaceID}, GraphQLHeaderEvent{ + Type: eventType, + GraphQLHeader: model, + }) + } +} + +func (p *rgraphqlPublisher) publishGraphQLAssert(evt mutation.Event) { + if p.streamers.GraphQLAssert == nil { + return + } + var model *graphqlv1.GraphQLAssert + var eventType string + isDelta := false + + switch evt.Op { + case mutation.OpInsert, mutation.OpUpdate: + if evt.Op == mutation.OpInsert { + eventType = eventTypeInsert + } else { + eventType = eventTypeUpdate + } + if a, ok := evt.Payload.(mgraphql.GraphQLAssert); ok { + model = ToAPIGraphQLAssert(a) + isDelta = a.IsDelta + } else if ap, ok := evt.Payload.(*mgraphql.GraphQLAssert); ok { + model = ToAPIGraphQLAssert(*ap) + isDelta = ap.IsDelta + } + case mutation.OpDelete: + eventType = eventTypeDelete + model = &graphqlv1.GraphQLAssert{GraphqlAssertId: evt.ID.Bytes(), GraphqlId: evt.ParentID.Bytes()} + } + + if model != nil { + p.streamers.GraphQLAssert.Publish(GraphQLAssertTopic{WorkspaceID: evt.WorkspaceID}, GraphQLAssertEvent{ + Type: eventType, + GraphQLAssert: model, + IsDelta: isDelta, + }) + } +} + +// Sync stream handlers + +func (s *GraphQLServiceRPC) GraphQLSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + return s.streamGraphQLSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLEvent) *graphqlv1.GraphQLSyncResponse { + var items []*graphqlv1.GraphQLSync + for _, event := range events { + if resp := graphqlSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLSyncResponse{Items: items} + } + + return eventstream.StreamToClient(ctx, s.streamers.GraphQL, filter, converter, send, nil) +} + +func (s *GraphQLServiceRPC) GraphQLHeaderSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLHeaderSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + return s.streamGraphQLHeaderSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLHeaderSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLHeaderSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLHeaderTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLHeaderEvent) *graphqlv1.GraphQLHeaderSyncResponse { + var items []*graphqlv1.GraphQLHeaderSync + for _, event := range events { + if resp := graphqlHeaderSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLHeaderSyncResponse{Items: items} + } + + return eventstream.StreamToClient(ctx, s.streamers.GraphQLHeader, filter, converter, send, nil) +} + +func (s *GraphQLServiceRPC) GraphQLResponseSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLResponseSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + return s.streamGraphQLResponseSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLResponseSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLResponseSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLResponseTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLResponseEvent) *graphqlv1.GraphQLResponseSyncResponse { + var items []*graphqlv1.GraphQLResponseSync + for _, event := range events { + if resp := graphqlResponseSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLResponseSyncResponse{Items: items} + } + + return eventstream.StreamToClient(ctx, s.streamers.GraphQLResponse, filter, converter, send, nil) +} + +func (s *GraphQLServiceRPC) GraphQLResponseHeaderSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLResponseHeaderSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + return s.streamGraphQLResponseHeaderSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLResponseHeaderSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLResponseHeaderSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLResponseHeaderTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLResponseHeaderEvent) *graphqlv1.GraphQLResponseHeaderSyncResponse { + var items []*graphqlv1.GraphQLResponseHeaderSync + for _, event := range events { + if resp := graphqlResponseHeaderSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLResponseHeaderSyncResponse{Items: items} + } + + return eventstream.StreamToClient(ctx, s.streamers.GraphQLResponseHeader, filter, converter, send, nil) +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_converter.go b/packages/server/internal/api/rgraphql/rgraphql_converter.go new file mode 100644 index 000000000..f28287e0f --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_converter.go @@ -0,0 +1,382 @@ +//nolint:revive // exported +package rgraphql + +import ( + "time" + + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// Model -> Proto + +func ToAPIGraphQL(g mgraphql.GraphQL) *graphqlv1.GraphQL { + result := &graphqlv1.GraphQL{ + GraphqlId: g.ID.Bytes(), + Name: g.Name, + Url: g.Url, + Query: g.Query, + Variables: g.Variables, + } + if g.LastRunAt != nil { + result.LastRunAt = timestamppb.New(time.Unix(*g.LastRunAt, 0)) + } + return result +} + +func ToAPIGraphQLHeader(h mgraphql.GraphQLHeader) *graphqlv1.GraphQLHeader { + return &graphqlv1.GraphQLHeader{ + GraphqlHeaderId: h.ID.Bytes(), + GraphqlId: h.GraphQLID.Bytes(), + Key: h.Key, + Value: h.Value, + Enabled: h.Enabled, + Description: h.Description, + Order: h.DisplayOrder, + } +} + +func ToAPIGraphQLAssert(a mgraphql.GraphQLAssert) *graphqlv1.GraphQLAssert { + return &graphqlv1.GraphQLAssert{ + GraphqlAssertId: a.ID.Bytes(), + GraphqlId: a.GraphQLID.Bytes(), + Value: a.Value, + Enabled: a.Enabled, + Order: a.DisplayOrder, + } +} + +func ToAPIGraphQLResponse(r mgraphql.GraphQLResponse) *graphqlv1.GraphQLResponse { + return &graphqlv1.GraphQLResponse{ + GraphqlResponseId: r.ID.Bytes(), + GraphqlId: r.GraphQLID.Bytes(), + Status: r.Status, + Body: string(r.Body), + Time: timestamppb.New(time.Unix(r.Time, 0)), + Duration: r.Duration, + Size: r.Size, + } +} + +func ToAPIGraphQLResponseHeader(h mgraphql.GraphQLResponseHeader) *graphqlv1.GraphQLResponseHeader { + return &graphqlv1.GraphQLResponseHeader{ + GraphqlResponseHeaderId: h.ID.Bytes(), + GraphqlResponseId: h.ResponseID.Bytes(), + Key: h.HeaderKey, + Value: h.HeaderValue, + } +} + +func ToAPIGraphQLResponseAssert(a mgraphql.GraphQLResponseAssert) *graphqlv1.GraphQLResponseAssert { + return &graphqlv1.GraphQLResponseAssert{ + GraphqlResponseAssertId: a.ID.Bytes(), + GraphqlResponseId: a.ResponseID.Bytes(), + Value: a.Value, + Success: a.Success, + } +} + +// Sync response builders + +func graphqlSyncResponseFrom(event GraphQLEvent) *graphqlv1.GraphQLSyncResponse { + var value *graphqlv1.GraphQLSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + name := event.GraphQL.GetName() + url := event.GraphQL.GetUrl() + query := event.GraphQL.GetQuery() + variables := event.GraphQL.GetVariables() + lastRunAt := event.GraphQL.GetLastRunAt() + value = &graphqlv1.GraphQLSync_ValueUnion{ + Kind: graphqlv1.GraphQLSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLSyncInsert{ + GraphqlId: event.GraphQL.GetGraphqlId(), + Name: name, + Url: url, + Query: query, + Variables: variables, + LastRunAt: lastRunAt, + }, + } + case eventTypeUpdate: + name := event.GraphQL.GetName() + url := event.GraphQL.GetUrl() + query := event.GraphQL.GetQuery() + variables := event.GraphQL.GetVariables() + lastRunAt := event.GraphQL.GetLastRunAt() + + var lastRunAtUnion *graphqlv1.GraphQLSyncUpdate_LastRunAtUnion + if lastRunAt != nil { + lastRunAtUnion = &graphqlv1.GraphQLSyncUpdate_LastRunAtUnion{ + Kind: graphqlv1.GraphQLSyncUpdate_LastRunAtUnion_KIND_VALUE, + Value: lastRunAt, + } + } + + value = &graphqlv1.GraphQLSync_ValueUnion{ + Kind: graphqlv1.GraphQLSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLSyncUpdate{ + GraphqlId: event.GraphQL.GetGraphqlId(), + Name: &name, + Url: &url, + Query: &query, + Variables: &variables, + LastRunAt: lastRunAtUnion, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLSync_ValueUnion{ + Kind: graphqlv1.GraphQLSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLSyncDelete{GraphqlId: event.GraphQL.GetGraphqlId()}, + } + } + + return &graphqlv1.GraphQLSyncResponse{ + Items: []*graphqlv1.GraphQLSync{{Value: value}}, + } +} + +func graphqlHeaderSyncResponseFrom(event GraphQLHeaderEvent) *graphqlv1.GraphQLHeaderSyncResponse { + var value *graphqlv1.GraphQLHeaderSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + key := event.GraphQLHeader.GetKey() + val := event.GraphQLHeader.GetValue() + enabled := event.GraphQLHeader.GetEnabled() + description := event.GraphQLHeader.GetDescription() + order := event.GraphQLHeader.GetOrder() + value = &graphqlv1.GraphQLHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLHeaderSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLHeaderSyncInsert{ + GraphqlHeaderId: event.GraphQLHeader.GetGraphqlHeaderId(), + GraphqlId: event.GraphQLHeader.GetGraphqlId(), + Key: key, + Value: val, + Enabled: enabled, + Description: description, + Order: order, + }, + } + case eventTypeUpdate: + key := event.GraphQLHeader.GetKey() + val := event.GraphQLHeader.GetValue() + enabled := event.GraphQLHeader.GetEnabled() + description := event.GraphQLHeader.GetDescription() + order := event.GraphQLHeader.GetOrder() + value = &graphqlv1.GraphQLHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLHeaderSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLHeaderSyncUpdate{ + GraphqlHeaderId: event.GraphQLHeader.GetGraphqlHeaderId(), + Key: &key, + Value: &val, + Enabled: &enabled, + Description: &description, + Order: &order, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLHeaderSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLHeaderSyncDelete{GraphqlHeaderId: event.GraphQLHeader.GetGraphqlHeaderId()}, + } + } + + return &graphqlv1.GraphQLHeaderSyncResponse{ + Items: []*graphqlv1.GraphQLHeaderSync{{Value: value}}, + } +} + +func graphqlResponseSyncResponseFrom(event GraphQLResponseEvent) *graphqlv1.GraphQLResponseSyncResponse { + var value *graphqlv1.GraphQLResponseSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + status := event.GraphQLResponse.GetStatus() + body := event.GraphQLResponse.GetBody() + t := event.GraphQLResponse.GetTime() + duration := event.GraphQLResponse.GetDuration() + size := event.GraphQLResponse.GetSize() + value = &graphqlv1.GraphQLResponseSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLResponseSyncInsert{ + GraphqlResponseId: event.GraphQLResponse.GetGraphqlResponseId(), + GraphqlId: event.GraphQLResponse.GetGraphqlId(), + Status: status, + Body: body, + Time: t, + Duration: duration, + Size: size, + }, + } + case eventTypeUpdate: + status := event.GraphQLResponse.GetStatus() + body := event.GraphQLResponse.GetBody() + t := event.GraphQLResponse.GetTime() + duration := event.GraphQLResponse.GetDuration() + size := event.GraphQLResponse.GetSize() + value = &graphqlv1.GraphQLResponseSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLResponseSyncUpdate{ + GraphqlResponseId: event.GraphQLResponse.GetGraphqlResponseId(), + Status: &status, + Body: &body, + Time: t, + Duration: &duration, + Size: &size, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLResponseSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLResponseSyncDelete{GraphqlResponseId: event.GraphQLResponse.GetGraphqlResponseId()}, + } + } + + return &graphqlv1.GraphQLResponseSyncResponse{ + Items: []*graphqlv1.GraphQLResponseSync{{Value: value}}, + } +} + +func graphqlResponseHeaderSyncResponseFrom(event GraphQLResponseHeaderEvent) *graphqlv1.GraphQLResponseHeaderSyncResponse { + var value *graphqlv1.GraphQLResponseHeaderSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + key := event.GraphQLResponseHeader.GetKey() + val := event.GraphQLResponseHeader.GetValue() + value = &graphqlv1.GraphQLResponseHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseHeaderSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLResponseHeaderSyncInsert{ + GraphqlResponseHeaderId: event.GraphQLResponseHeader.GetGraphqlResponseHeaderId(), + GraphqlResponseId: event.GraphQLResponseHeader.GetGraphqlResponseId(), + Key: key, + Value: val, + }, + } + case eventTypeUpdate: + key := event.GraphQLResponseHeader.GetKey() + val := event.GraphQLResponseHeader.GetValue() + value = &graphqlv1.GraphQLResponseHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseHeaderSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLResponseHeaderSyncUpdate{ + GraphqlResponseHeaderId: event.GraphQLResponseHeader.GetGraphqlResponseHeaderId(), + Key: &key, + Value: &val, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLResponseHeaderSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseHeaderSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLResponseHeaderSyncDelete{GraphqlResponseHeaderId: event.GraphQLResponseHeader.GetGraphqlResponseHeaderId()}, + } + } + + return &graphqlv1.GraphQLResponseHeaderSyncResponse{ + Items: []*graphqlv1.GraphQLResponseHeaderSync{{Value: value}}, + } +} + +// graphqlDeltaSyncResponseFrom converts GraphQLEvent to GraphQLDeltaSync response +// TODO: Implement delta sync converter once delta event publishing is implemented +func graphqlDeltaSyncResponseFrom(event GraphQLEvent) *graphqlv1.GraphQLDeltaSyncResponse { + // For now, return nil as delta sync is not fully implemented + // Delta CRUD operations work, but real-time sync needs separate event streams + return nil +} + +// graphqlAssertSyncResponseFrom converts GraphQLAssertEvent to GraphQLAssertSync response +func graphqlAssertSyncResponseFrom(event GraphQLAssertEvent) *graphqlv1.GraphQLAssertSyncResponse { + var value *graphqlv1.GraphQLAssertSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + value = &graphqlv1.GraphQLAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLAssertSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLAssertSyncInsert{ + GraphqlAssertId: event.GraphQLAssert.GetGraphqlAssertId(), + GraphqlId: event.GraphQLAssert.GetGraphqlId(), + Value: event.GraphQLAssert.GetValue(), + Enabled: event.GraphQLAssert.GetEnabled(), + Order: event.GraphQLAssert.GetOrder(), + }, + } + case eventTypeUpdate: + value_ := event.GraphQLAssert.GetValue() + enabled := event.GraphQLAssert.GetEnabled() + order := event.GraphQLAssert.GetOrder() + value = &graphqlv1.GraphQLAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLAssertSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLAssertSyncUpdate{ + GraphqlAssertId: event.GraphQLAssert.GetGraphqlAssertId(), + Value: &value_, + Enabled: &enabled, + Order: &order, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLAssertSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLAssertSyncDelete{ + GraphqlAssertId: event.GraphQLAssert.GetGraphqlAssertId(), + }, + } + } + + return &graphqlv1.GraphQLAssertSyncResponse{ + Items: []*graphqlv1.GraphQLAssertSync{ + { + Value: value, + }, + }, + } +} + +// graphqlResponseAssertSyncResponseFrom converts GraphQLResponseAssertEvent to GraphQLResponseAssertSync response +func graphqlResponseAssertSyncResponseFrom(event GraphQLResponseAssertEvent) *graphqlv1.GraphQLResponseAssertSyncResponse { + var value *graphqlv1.GraphQLResponseAssertSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + value_ := event.GraphQLResponseAssert.GetValue() + success := event.GraphQLResponseAssert.GetSuccess() + value = &graphqlv1.GraphQLResponseAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseAssertSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLResponseAssertSyncInsert{ + GraphqlResponseAssertId: event.GraphQLResponseAssert.GetGraphqlResponseAssertId(), + GraphqlResponseId: event.GraphQLResponseAssert.GetGraphqlResponseId(), + Value: value_, + Success: success, + }, + } + case eventTypeUpdate: + value_ := event.GraphQLResponseAssert.GetValue() + success := event.GraphQLResponseAssert.GetSuccess() + value = &graphqlv1.GraphQLResponseAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseAssertSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLResponseAssertSyncUpdate{ + GraphqlResponseAssertId: event.GraphQLResponseAssert.GetGraphqlResponseAssertId(), + Value: &value_, + Success: &success, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLResponseAssertSync_ValueUnion{ + Kind: graphqlv1.GraphQLResponseAssertSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLResponseAssertSyncDelete{ + GraphqlResponseAssertId: event.GraphQLResponseAssert.GetGraphqlResponseAssertId(), + }, + } + } + + return &graphqlv1.GraphQLResponseAssertSyncResponse{ + Items: []*graphqlv1.GraphQLResponseAssertSync{ + { + Value: value, + }, + }, + } +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud.go b/packages/server/internal/api/rgraphql/rgraphql_crud.go new file mode 100644 index 000000000..d4507eff4 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud.go @@ -0,0 +1,244 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "errors" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +func (s *GraphQLServiceRPC) GraphQLCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allItems []*graphqlv1.GraphQL + for _, ws := range workspaces { + items, err := s.graphqlService.GetByWorkspaceID(ctx, ws.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, item := range items { + allItems = append(allItems, ToAPIGraphQL(item)) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLCollectionResponse{Items: allItems}), nil +} + +func (s *GraphQLServiceRPC) GraphQLInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // FETCH + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + if len(workspaces) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("user has no workspaces")) + } + + defaultWorkspaceID := workspaces[0].ID + + // CHECK + if err := s.checkWorkspaceWriteAccess(ctx, defaultWorkspaceID); err != nil { + return nil, err + } + + // Parse items before starting transaction + items := make([]mutation.GraphQLInsertItem, 0, len(req.Msg.Items)) + for _, item := range req.Msg.Items { + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + items = append(items, mutation.GraphQLInsertItem{ + GraphQL: &mgraphql.GraphQL{ + ID: gqlID, + WorkspaceID: defaultWorkspaceID, + Name: item.Name, + Url: item.Url, + Query: item.Query, + Variables: item.Variables, + }, + WorkspaceID: defaultWorkspaceID, + }) + } + + // ACT + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + if err := mut.InsertGraphQLBatch(ctx, items); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + // FETCH + CHECK: parse items, read existing records, check permissions + updateItems := make([]mutation.GraphQLUpdateItem, 0, len(req.Msg.Items)) + for _, item := range req.Msg.Items { + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + existing, err := s.graphqlService.Get(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, existing.WorkspaceID); err != nil { + return nil, err + } + + if item.Name != nil { + existing.Name = *item.Name + } + if item.Url != nil { + existing.Url = *item.Url + } + if item.Query != nil { + existing.Query = *item.Query + } + if item.Variables != nil { + existing.Variables = *item.Variables + } + + updateItems = append(updateItems, mutation.GraphQLUpdateItem{ + GraphQL: existing, + WorkspaceID: existing.WorkspaceID, + }) + } + + // ACT + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + if err := mut.UpdateGraphQLBatch(ctx, updateItems); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + // FETCH + CHECK: parse items, read existing records, check permissions + deleteItems := make([]mutation.GraphQLDeleteItem, 0, len(req.Msg.Items)) + for _, item := range req.Msg.Items { + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + existing, err := s.graphqlService.Get(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceDeleteAccess(ctx, existing.WorkspaceID); err != nil { + return nil, err + } + + deleteItems = append(deleteItems, mutation.GraphQLDeleteItem{ + ID: gqlID, + WorkspaceID: existing.WorkspaceID, + }) + } + + // ACT + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + if err := mut.DeleteGraphQLBatch(ctx, deleteItems); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// getGraphQLsWithDeltasForWorkspace returns both base and delta GraphQL entries for a workspace. +func (s *GraphQLServiceRPC) getGraphQLsWithDeltasForWorkspace(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQL, error) { + graphqlList, err := s.graphqlReader.GetByWorkspaceID(ctx, workspaceID) + if err != nil { + return nil, err + } + deltaList, err := s.graphqlReader.GetDeltasByWorkspaceID(ctx, workspaceID) + if err != nil { + return nil, err + } + all := make([]mgraphql.GraphQL, 0, len(graphqlList)+len(deltaList)) + return append(append(all, graphqlList...), deltaList...), nil +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_assert.go b/packages/server/internal/api/rgraphql/rgraphql_crud_assert.go new file mode 100644 index 000000000..746bf4097 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_assert.go @@ -0,0 +1,857 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "database/sql" + "errors" + "sync" + "time" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/internal/converter" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/patch" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// GraphQLAssert CRUD operations + +func (s *GraphQLServiceRPC) GraphQLAssertCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLAssertCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allAsserts []*graphqlv1.GraphQLAssert + for _, workspace := range workspaces { + allGraphQLs, err := s.getGraphQLsWithDeltasForWorkspace(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + for _, graphql := range allGraphQLs { + asserts, err := s.graphqlAssertService.GetByGraphQLID(ctx, graphql.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, assert := range asserts { + allAsserts = append(allAsserts, converter.ToAPIGraphQLAssert(assert)) + } + } + } + + return connect.NewResponse(&graphqlv1.GraphQLAssertCollectionResponse{Items: allAsserts}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL assert must be provided")) + } + + // FETCH: Gather data and check permissions OUTSIDE transaction + type insertItem struct { + assertID idwrap.IDWrap + graphqlID idwrap.IDWrap + value string + enabled bool + order float32 + workspaceID idwrap.IDWrap + } + insertData := make([]insertItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.GraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_assert_id is required")) + } + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + assertID, err := idwrap.NewFromBytes(item.GraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + graphqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Verify the GraphQL entry exists and user has access - use pool service + graphqlEntry, err := s.graphqlReader.Get(ctx, graphqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // CHECK: Validate write access to the workspace + if err := s.checkWorkspaceWriteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + insertData = append(insertData, insertItem{ + assertID: assertID, + graphqlID: graphqlID, + value: item.Value, + enabled: item.Enabled, + order: item.Order, + workspaceID: graphqlEntry.WorkspaceID, + }) + } + + // ACT: Insert asserts using mutation context with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + now := time.Now().UnixMilli() + for _, data := range insertData { + assert := mgraphql.GraphQLAssert{ + ID: data.assertID, + GraphQLID: data.graphqlID, + Value: data.value, + Enabled: data.enabled, + Description: "", + DisplayOrder: data.order, + } + + if err := mut.InsertGraphQLAssert(ctx, mutation.GraphQLAssertInsertItem{ + ID: data.assertID, + GraphQLID: data.graphqlID, + WorkspaceID: data.workspaceID, + IsDelta: false, + Params: gen.CreateGraphQLAssertParams{ + ID: data.assertID.Bytes(), + GraphqlID: data.graphqlID.Bytes(), + Value: data.value, + Enabled: data.enabled, + Description: "", + DisplayOrder: float64(data.order), + IsDelta: false, + CreatedAt: now, + UpdatedAt: now, + }, + }); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLAssert, + Op: mutation.OpInsert, + ID: data.assertID, + ParentID: data.graphqlID, + WorkspaceID: data.workspaceID, + Payload: assert, + }) + } + + if err := mut.Commit(ctx); err != nil { // Auto-publishes events! + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL assert must be provided")) + } + + // FETCH: Process request data and perform all reads/checks OUTSIDE transaction + type updateItem struct { + existingAssert mgraphql.GraphQLAssert + value *string + enabled *bool + order *float32 + workspaceID idwrap.IDWrap + } + updateData := make([]updateItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.GraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_assert_id is required")) + } + + assertID, err := idwrap.NewFromBytes(item.GraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing assert - use pool service + existingAssert, err := s.graphqlAssertService.GetByID(ctx, assertID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLAssertFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify the GraphQL entry exists and user has access - use pool service + graphqlEntry, err := s.graphqlReader.Get(ctx, existingAssert.GraphQLID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // CHECK: Validate write access to the workspace + if err := s.checkWorkspaceWriteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + updateData = append(updateData, updateItem{ + existingAssert: *existingAssert, + value: item.Value, + enabled: item.Enabled, + order: item.Order, + workspaceID: graphqlEntry.WorkspaceID, + }) + } + + // ACT: Update asserts using mutation context with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + now := time.Now().UnixMilli() + for _, data := range updateData { + assert := data.existingAssert + + // Build patch with only changed fields + assertPatch := patch.GraphQLAssertPatch{} + + // Update fields if provided and track in patch + if data.value != nil { + assert.Value = *data.value + assertPatch.Value = patch.NewOptional(*data.value) + } + if data.enabled != nil { + assert.Enabled = *data.enabled + assertPatch.Enabled = patch.NewOptional(*data.enabled) + } + if data.order != nil { + assert.DisplayOrder = *data.order + assertPatch.Order = patch.NewOptional(*data.order) + } + + if err := mut.UpdateGraphQLAssert(ctx, mutation.GraphQLAssertUpdateItem{ + ID: assert.ID, + GraphQLID: assert.GraphQLID, + WorkspaceID: data.workspaceID, + IsDelta: assert.IsDelta, + Params: gen.UpdateGraphQLAssertParams{ + ID: assert.ID.Bytes(), + Value: assert.Value, + Enabled: assert.Enabled, + Description: assert.Description, + DisplayOrder: float64(assert.DisplayOrder), + UpdatedAt: now, + }, + Patch: assertPatch, + }); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + if err := mut.Commit(ctx); err != nil { // Auto-publishes events! + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL assert must be provided")) + } + + // FETCH: Gather data and check permissions OUTSIDE transaction + type deleteItem struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + IsDelta bool + } + deleteItems := make([]deleteItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.GraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_assert_id is required")) + } + + assertID, err := idwrap.NewFromBytes(item.GraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing assert - use pool service + existingAssert, err := s.graphqlAssertService.GetByID(ctx, assertID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLAssertFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify the GraphQL entry exists and user has access - use pool service + graphqlEntry, err := s.graphqlReader.Get(ctx, existingAssert.GraphQLID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // CHECK: Validate delete access to the workspace + if err := s.checkWorkspaceDeleteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + deleteItems = append(deleteItems, deleteItem{ + ID: assertID, + GraphQLID: existingAssert.GraphQLID, + WorkspaceID: graphqlEntry.WorkspaceID, + IsDelta: existingAssert.IsDelta, + }) + } + + // ACT: Delete using mutation context with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + for _, item := range deleteItems { + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLAssert, + Op: mutation.OpDelete, + ID: item.ID, + ParentID: item.GraphQLID, + WorkspaceID: item.WorkspaceID, + IsDelta: item.IsDelta, + }) + if err := mut.Queries().DeleteGraphQLAssert(ctx, item.ID.Bytes()); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + if err := mut.Commit(ctx); err != nil { // Auto-publishes events! + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLAssertSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + return s.streamGraphQLAssertSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLAssertSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLAssertSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLAssertTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLAssertEvent) *graphqlv1.GraphQLAssertSyncResponse { + var items []*graphqlv1.GraphQLAssertSync + for _, event := range events { + // Skip delta asserts (they have separate sync) + if event.IsDelta { + continue + } + if resp := graphqlAssertSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLAssertSyncResponse{Items: items} + } + + return eventstream.StreamToClient( + ctx, + s.streamers.GraphQLAssert, + filter, + converter, + send, + nil, + ) +} + +// Delta operations +func (s *GraphQLServiceRPC) GraphQLAssertDeltaCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLAssertDeltaCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Get user's workspaces + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allDeltas []*graphqlv1.GraphQLAssertDelta + for _, workspace := range workspaces { + // Get GraphQL delta entries for this workspace + graphqlList, err := s.graphqlReader.GetDeltasByWorkspaceID(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Get asserts for each GraphQL entry + for _, graphql := range graphqlList { + asserts, err := s.graphqlAssertService.GetByGraphQLID(ctx, graphql.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Convert to delta format + for _, assert := range asserts { + if !assert.IsDelta { + continue + } + + delta := &graphqlv1.GraphQLAssertDelta{ + DeltaGraphqlAssertId: assert.ID.Bytes(), + GraphqlId: assert.GraphQLID.Bytes(), + } + + if assert.ParentGraphQLAssertID != nil { + delta.GraphqlAssertId = assert.ParentGraphQLAssertID.Bytes() + } + + // Only include delta fields if they exist + if assert.DeltaValue != nil { + delta.Value = assert.DeltaValue + } + if assert.DeltaEnabled != nil { + delta.Enabled = assert.DeltaEnabled + } + if assert.DeltaDisplayOrder != nil { + delta.Order = assert.DeltaDisplayOrder + } + + allDeltas = append(allDeltas, delta) + } + } + } + + return connect.NewResponse(&graphqlv1.GraphQLAssertDeltaCollectionResponse{ + Items: allDeltas, + }), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertDeltaInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertDeltaInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.Items) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one delta item is required")) + } + + // FETCH: Gather data and check permissions OUTSIDE transaction + type insertItem struct { + graphqlID idwrap.IDWrap + newID idwrap.IDWrap + parentID idwrap.IDWrap + workspaceID idwrap.IDWrap + baseAssert mgraphql.GraphQLAssert + item *graphqlv1.GraphQLAssertDeltaInsert + } + insertData := make([]insertItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required for each delta item")) + } + + graphqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + graphqlEntry, err := s.graphqlReader.Get(ctx, graphqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if !graphqlEntry.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL entry is not a delta")) + } + + if err := s.checkWorkspaceWriteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + if len(item.GraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_assert_id is required")) + } + + parentAssertID, err := idwrap.NewFromBytes(item.GraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + baseAssert, err := s.graphqlAssertService.GetByID(ctx, parentAssertID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLAssertFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + newID := idwrap.NewNow() + if len(item.DeltaGraphqlAssertId) > 0 { + newID, err = idwrap.NewFromBytes(item.DeltaGraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + } + + insertData = append(insertData, insertItem{ + graphqlID: graphqlID, + newID: newID, + parentID: parentAssertID, + workspaceID: graphqlEntry.WorkspaceID, + baseAssert: *baseAssert, + item: item, + }) + } + + // ACT: Insert new delta records using mutation context + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + now := time.Now().UnixMilli() + for _, data := range insertData { + params := gen.CreateGraphQLAssertParams{ + ID: data.newID.Bytes(), + GraphqlID: data.graphqlID.Bytes(), + Value: data.baseAssert.Value, + Enabled: data.baseAssert.Enabled, + Description: data.baseAssert.Description, + DisplayOrder: float64(data.baseAssert.DisplayOrder), + ParentGraphqlAssertID: data.parentID.Bytes(), + IsDelta: true, + DeltaValue: stringPtrToNullString(data.item.Value), + DeltaEnabled: boolPtrToNullBool(data.item.Enabled), + DeltaDescription: stringPtrToNullString(nil), + DeltaDisplayOrder: float32PtrToNullFloat64(data.item.Order), + CreatedAt: now, + UpdatedAt: now, + } + + if err := mut.InsertGraphQLAssert(ctx, mutation.GraphQLAssertInsertItem{ + ID: data.newID, + GraphQLID: data.graphqlID, + WorkspaceID: data.workspaceID, + IsDelta: true, + Params: params, + }); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + assertService := s.graphqlAssertService.TX(mut.TX()) + updated, err := assertService.GetByID(ctx, data.newID) + if err == nil { + mut.UpdateLastEventPayload(*updated) + } + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertDeltaUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertDeltaUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL assert delta must be provided")) + } + + // FETCH: Gather data and check permissions OUTSIDE transaction + type updateItem struct { + deltaID idwrap.IDWrap + existingAssert mgraphql.GraphQLAssert + workspaceID idwrap.IDWrap + item *graphqlv1.GraphQLAssertDeltaUpdate + } + updateData := make([]updateItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_assert_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta assert - use pool service + existingAssert, err := s.graphqlAssertService.GetByID(ctx, deltaID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLAssertFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify this is actually a delta record + if !existingAssert.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL assert is not a delta")) + } + + // Get the GraphQL entry to check workspace access - use pool service + graphqlEntry, err := s.graphqlReader.Get(ctx, existingAssert.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // CHECK: Validate write access to the workspace + if err := s.checkWorkspaceWriteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + updateData = append(updateData, updateItem{ + deltaID: deltaID, + existingAssert: *existingAssert, + workspaceID: graphqlEntry.WorkspaceID, + item: item, + }) + } + + // ACT: Update using mutation context + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + for _, data := range updateData { + item := data.item + deltaValue := data.existingAssert.DeltaValue + deltaEnabled := data.existingAssert.DeltaEnabled + deltaOrder := data.existingAssert.DeltaDisplayOrder + var patchData patch.GraphQLAssertPatch + + if item.Value != nil { + switch item.Value.GetKind() { + case graphqlv1.GraphQLAssertDeltaUpdate_ValueUnion_KIND_UNSET: + deltaValue = nil + patchData.Value = patch.Unset[string]() + case graphqlv1.GraphQLAssertDeltaUpdate_ValueUnion_KIND_VALUE: + valueStr := item.Value.GetValue() + deltaValue = &valueStr + patchData.Value = patch.NewOptional(valueStr) + } + } + if item.Enabled != nil { + switch item.Enabled.GetKind() { + case graphqlv1.GraphQLAssertDeltaUpdate_EnabledUnion_KIND_UNSET: + deltaEnabled = nil + patchData.Enabled = patch.Unset[bool]() + case graphqlv1.GraphQLAssertDeltaUpdate_EnabledUnion_KIND_VALUE: + enabledBool := item.Enabled.GetValue() + deltaEnabled = &enabledBool + patchData.Enabled = patch.NewOptional(enabledBool) + } + } + if item.Order != nil { + switch item.Order.GetKind() { + case graphqlv1.GraphQLAssertDeltaUpdate_OrderUnion_KIND_UNSET: + deltaOrder = nil + patchData.Order = patch.Unset[float32]() + case graphqlv1.GraphQLAssertDeltaUpdate_OrderUnion_KIND_VALUE: + orderFloat := item.Order.GetValue() + deltaOrder = &orderFloat + patchData.Order = patch.NewOptional(orderFloat) + } + } + + assertService := s.graphqlAssertService.TX(mut.TX()) + if err := mut.UpdateGraphQLAssertDelta(ctx, mutation.GraphQLAssertDeltaUpdateItem{ + ID: data.deltaID, + GraphQLID: data.existingAssert.GraphQLID, + WorkspaceID: data.workspaceID, + Params: gen.UpdateGraphQLAssertDeltaParams{ + ID: data.deltaID.Bytes(), + DeltaValue: stringPtrToNullString(deltaValue), + DeltaEnabled: boolPtrToNullBool(deltaEnabled), + DeltaDisplayOrder: float32PtrToNullFloat64(deltaOrder), + UpdatedAt: time.Now().UnixMilli(), + }, + Patch: patchData, + }); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Update payload in tracked event + updated, err := assertService.GetByID(ctx, data.deltaID) + if err == nil { + mut.UpdateLastEventPayload(*updated) + } + } + + if err := mut.Commit(ctx); err != nil { // Auto-publishes events! + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertDeltaDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLAssertDeltaDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL assert delta must be provided")) + } + + // FETCH: Gather data and check permissions OUTSIDE transaction + type deleteItem struct { + deltaID idwrap.IDWrap + graphqlID idwrap.IDWrap + workspaceID idwrap.IDWrap + assert mgraphql.GraphQLAssert + } + deleteData := make([]deleteItem, 0, len(req.Msg.Items)) + + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlAssertId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_assert_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlAssertId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta assert + existingAssert, err := s.graphqlAssertService.GetByID(ctx, deltaID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLAssertFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify this is actually a delta record + if !existingAssert.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL assert is not a delta")) + } + + // Get the GraphQL entry to check workspace access + graphqlEntry, err := s.graphqlReader.Get(ctx, existingAssert.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Check delete access to the workspace + if err := s.checkWorkspaceDeleteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + deleteData = append(deleteData, deleteItem{ + deltaID: deltaID, + graphqlID: existingAssert.GraphQLID, + workspaceID: graphqlEntry.WorkspaceID, + assert: *existingAssert, + }) + } + + // ACT: Execute deletes in transaction + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer mut.Rollback() + + for _, data := range deleteData { + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLAssert, + Op: mutation.OpDelete, + ID: data.deltaID, + ParentID: data.graphqlID, + WorkspaceID: data.workspaceID, + IsDelta: true, + Payload: data.assert, + }) + if err := mut.Queries().DeleteGraphQLAssert(ctx, data.deltaID.Bytes()); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLAssertDeltaSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLAssertDeltaSyncResponse]) error { + // TODO: Implement streaming delta sync + return nil +} + +// Helper functions for null conversions +func stringPtrToNullString(s *string) sql.NullString { + if s == nil { + return sql.NullString{Valid: false} + } + return sql.NullString{String: *s, Valid: true} +} + +func boolPtrToNullBool(b *bool) sql.NullBool { + if b == nil { + return sql.NullBool{Valid: false} + } + return sql.NullBool{Bool: *b, Valid: true} +} + +func float32PtrToNullFloat64(f *float32) sql.NullFloat64 { + if f == nil { + return sql.NullFloat64{Valid: false} + } + return sql.NullFloat64{Float64: float64(*f), Valid: true} +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_delta.go b/packages/server/internal/api/rgraphql/rgraphql_crud_delta.go new file mode 100644 index 000000000..d2b7e4ab9 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_delta.go @@ -0,0 +1,355 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "errors" + "sync" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// GraphQLDeltaCollection fetches all delta GraphQL entries for the user's workspaces +func (s *GraphQLServiceRPC) GraphQLDeltaCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLDeltaCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Get user's workspaces + workspaces, err := s.ws.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allDeltas []*graphqlv1.GraphQLDelta + for _, workspace := range workspaces { + // Get GraphQL delta entries for this workspace + graphqlList, err := s.graphqlService.Reader().GetDeltasByWorkspaceID(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Convert to delta format + for _, gql := range graphqlList { + delta := &graphqlv1.GraphQLDelta{ + DeltaGraphqlId: gql.ID.Bytes(), + } + + if gql.ParentGraphQLID != nil { + delta.GraphqlId = gql.ParentGraphQLID.Bytes() + } + + // Only include delta fields if they exist + if gql.DeltaName != nil { + delta.Name = gql.DeltaName + } + if gql.DeltaUrl != nil { + delta.Url = gql.DeltaUrl + } + if gql.DeltaQuery != nil { + delta.Query = gql.DeltaQuery + } + if gql.DeltaVariables != nil { + delta.Variables = gql.DeltaVariables + } + + allDeltas = append(allDeltas, delta) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLDeltaCollectionResponse{ + Items: allDeltas, + }), nil +} + +// GraphQLDeltaInsert creates new delta GraphQL entries +func (s *GraphQLServiceRPC) GraphQLDeltaInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLDeltaInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.Items) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one delta item is required")) + } + + // Process each delta item + for _, item := range req.Msg.Items { + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required for each delta item")) + } + + graphqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Check workspace write access + graphqlEntry, err := s.graphqlService.Reader().Get(ctx, graphqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, graphqlEntry.WorkspaceID); err != nil { + return nil, err + } + + var deltaID idwrap.IDWrap + if len(item.DeltaGraphqlId) > 0 { + var err error + deltaID, err = idwrap.NewFromBytes(item.DeltaGraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + } else { + deltaID = idwrap.NewNow() + } + + // Create delta GraphQL entry + deltaGraphQL := &mgraphql.GraphQL{ + ID: deltaID, + WorkspaceID: graphqlEntry.WorkspaceID, + FolderID: graphqlEntry.FolderID, + Name: graphqlEntry.Name, + Url: graphqlEntry.Url, + Query: graphqlEntry.Query, + Variables: graphqlEntry.Variables, + Description: graphqlEntry.Description, + ParentGraphQLID: &graphqlID, + IsDelta: true, + DeltaName: item.Name, + DeltaUrl: item.Url, + DeltaQuery: item.Query, + DeltaVariables: item.Variables, + CreatedAt: 0, // Will be set by service + UpdatedAt: 0, // Will be set by service + } + + // Use mutation pattern for create with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + err = s.graphqlService.TX(mut.TX()).Create(ctx, deltaGraphQL) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLDeltaUpdate updates existing delta GraphQL entries +func (s *GraphQLServiceRPC) GraphQLDeltaUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLDeltaUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL delta must be provided")) + } + + // Process each delta item + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta GraphQL entry + existingDelta, err := s.graphqlService.Reader().Get(ctx, deltaID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify this is actually a delta record + if !existingDelta.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL entry is not a delta")) + } + + // Check write access to the workspace + if err := s.checkWorkspaceWriteAccess(ctx, existingDelta.WorkspaceID); err != nil { + return nil, err + } + + // Apply updates + if item.Name != nil { + switch item.Name.GetKind() { + case graphqlv1.GraphQLDeltaUpdate_NameUnion_KIND_UNSET: + existingDelta.DeltaName = nil + case graphqlv1.GraphQLDeltaUpdate_NameUnion_KIND_VALUE: + nameStr := item.Name.GetValue() + existingDelta.DeltaName = &nameStr + } + } + if item.Url != nil { + switch item.Url.GetKind() { + case graphqlv1.GraphQLDeltaUpdate_UrlUnion_KIND_UNSET: + existingDelta.DeltaUrl = nil + case graphqlv1.GraphQLDeltaUpdate_UrlUnion_KIND_VALUE: + urlStr := item.Url.GetValue() + existingDelta.DeltaUrl = &urlStr + } + } + if item.Query != nil { + switch item.Query.GetKind() { + case graphqlv1.GraphQLDeltaUpdate_QueryUnion_KIND_UNSET: + existingDelta.DeltaQuery = nil + case graphqlv1.GraphQLDeltaUpdate_QueryUnion_KIND_VALUE: + queryStr := item.Query.GetValue() + existingDelta.DeltaQuery = &queryStr + } + } + if item.Variables != nil { + switch item.Variables.GetKind() { + case graphqlv1.GraphQLDeltaUpdate_VariablesUnion_KIND_UNSET: + existingDelta.DeltaVariables = nil + case graphqlv1.GraphQLDeltaUpdate_VariablesUnion_KIND_VALUE: + variablesStr := item.Variables.GetValue() + existingDelta.DeltaVariables = &variablesStr + } + } + + // Use mutation pattern for update with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.graphqlService.TX(mut.TX()).Update(ctx, existingDelta); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLDeltaDelete deletes delta GraphQL entries +func (s *GraphQLServiceRPC) GraphQLDeltaDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLDeltaDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL delta must be provided")) + } + + // Step 1: Gather data and check permissions OUTSIDE transaction + var deleteData []struct { + deltaID idwrap.IDWrap + existingDelta *mgraphql.GraphQL + } + + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta GraphQL entry + existingDelta, err := s.graphqlService.Reader().Get(ctx, deltaID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Verify this is actually a delta record + if !existingDelta.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL entry is not a delta")) + } + + // Check write access to the workspace + if err := s.checkWorkspaceWriteAccess(ctx, existingDelta.WorkspaceID); err != nil { + return nil, err + } + + deleteData = append(deleteData, struct { + deltaID idwrap.IDWrap + existingDelta *mgraphql.GraphQL + }{ + deltaID: deltaID, + existingDelta: existingDelta, + }) + } + + // Step 2: Execute deletes in transaction using mutation pattern + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + for _, data := range deleteData { + if err := s.graphqlService.TX(mut.TX()).Delete(ctx, data.deltaID); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLDeltaSync streams delta GraphQL changes in real-time +func (s *GraphQLServiceRPC) GraphQLDeltaSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLDeltaSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + return s.streamGraphQLDeltaSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLDeltaSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLDeltaSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLEvent) *graphqlv1.GraphQLDeltaSyncResponse { + var items []*graphqlv1.GraphQLDeltaSync + for _, event := range events { + if resp := graphqlDeltaSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLDeltaSyncResponse{Items: items} + } + + return eventstream.StreamToClient(ctx, s.streamers.GraphQL, filter, converter, send, nil) +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_header.go b/packages/server/internal/api/rgraphql/rgraphql_crud_header.go new file mode 100644 index 000000000..557a3f47e --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_header.go @@ -0,0 +1,212 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "errors" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +func (s *GraphQLServiceRPC) GraphQLHeaderCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLHeaderCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allItems []*graphqlv1.GraphQLHeader + for _, ws := range workspaces { + gqlList, err := s.graphqlService.GetByWorkspaceID(ctx, ws.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, gql := range gqlList { + headers, err := s.headerService.GetByGraphQLID(ctx, gql.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, h := range headers { + allItems = append(allItems, ToAPIGraphQLHeader(h)) + } + } + } + + return connect.NewResponse(&graphqlv1.GraphQLHeaderCollectionResponse{Items: allItems}), nil +} + +func (s *GraphQLServiceRPC) GraphQLHeaderInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + for _, item := range req.Msg.Items { + if len(item.GraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_header_id is required")) + } + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + headerID, err := idwrap.NewFromBytes(item.GraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + gqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + workspaceID, err := s.graphqlService.GetWorkspaceID(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + header := &mgraphql.GraphQLHeader{ + ID: headerID, + GraphQLID: gqlID, + Key: item.Key, + Value: item.Value, + Enabled: item.Enabled, + Description: item.Description, + DisplayOrder: item.Order, + } + + if err := s.headerService.Create(ctx, header); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if s.streamers.GraphQLHeader != nil { + s.streamers.GraphQLHeader.Publish(GraphQLHeaderTopic{WorkspaceID: workspaceID}, GraphQLHeaderEvent{ + Type: eventTypeInsert, + GraphQLHeader: ToAPIGraphQLHeader(*header), + }) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLHeaderUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + for _, item := range req.Msg.Items { + if len(item.GraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_header_id is required")) + } + + headerID, err := idwrap.NewFromBytes(item.GraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + existingHeaders, err := s.headerService.GetByIDs(ctx, []idwrap.IDWrap{headerID}) + if err != nil || len(existingHeaders) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("header not found")) + } + existing := existingHeaders[0] + + workspaceID, err := s.graphqlService.GetWorkspaceID(ctx, existing.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + if item.Key != nil { + existing.Key = *item.Key + } + if item.Value != nil { + existing.Value = *item.Value + } + if item.Enabled != nil { + existing.Enabled = *item.Enabled + } + if item.Description != nil { + existing.Description = *item.Description + } + if item.Order != nil { + existing.DisplayOrder = *item.Order + } + + if err := s.headerService.Update(ctx, &existing); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if s.streamers.GraphQLHeader != nil { + s.streamers.GraphQLHeader.Publish(GraphQLHeaderTopic{WorkspaceID: workspaceID}, GraphQLHeaderEvent{ + Type: eventTypeUpdate, + GraphQLHeader: ToAPIGraphQLHeader(existing), + }) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLHeaderDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one item must be provided")) + } + + for _, item := range req.Msg.Items { + if len(item.GraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_header_id is required")) + } + + headerID, err := idwrap.NewFromBytes(item.GraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + existingHeaders, err := s.headerService.GetByIDs(ctx, []idwrap.IDWrap{headerID}) + if err != nil || len(existingHeaders) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("header not found")) + } + existing := existingHeaders[0] + + workspaceID, err := s.graphqlService.GetWorkspaceID(ctx, existing.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceDeleteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + if err := s.headerService.Delete(ctx, headerID); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if s.streamers.GraphQLHeader != nil { + s.streamers.GraphQLHeader.Publish(GraphQLHeaderTopic{WorkspaceID: workspaceID}, GraphQLHeaderEvent{ + Type: eventTypeDelete, + GraphQLHeader: &graphqlv1.GraphQLHeader{GraphqlHeaderId: headerID.Bytes(), GraphqlId: existing.GraphQLID.Bytes()}, + }) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_header_delta.go b/packages/server/internal/api/rgraphql/rgraphql_crud_header_delta.go new file mode 100644 index 000000000..5789f81c3 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_header_delta.go @@ -0,0 +1,343 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "errors" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// GraphQLHeaderDeltaCollection fetches all delta GraphQL headers for the user's workspaces +func (s *GraphQLServiceRPC) GraphQLHeaderDeltaCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLHeaderDeltaCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Get user's workspaces + workspaces, err := s.ws.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allDeltas []*graphqlv1.GraphQLHeaderDelta + for _, workspace := range workspaces { + // Get GraphQL header delta entries for this workspace + headerList, err := s.headerService.GetDeltasByWorkspaceID(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Convert to delta format + for _, header := range headerList { + delta := &graphqlv1.GraphQLHeaderDelta{ + DeltaGraphqlHeaderId: header.ID.Bytes(), + GraphqlId: header.GraphQLID.Bytes(), + } + + if header.ParentGraphQLHeaderID != nil { + delta.GraphqlHeaderId = header.ParentGraphQLHeaderID.Bytes() + } + + // Only include delta fields if they exist + if header.DeltaKey != nil { + delta.Key = header.DeltaKey + } + if header.DeltaValue != nil { + delta.Value = header.DeltaValue + } + if header.DeltaEnabled != nil { + delta.Enabled = header.DeltaEnabled + } + if header.DeltaDescription != nil { + delta.Description = header.DeltaDescription + } + if header.DeltaDisplayOrder != nil { + delta.Order = header.DeltaDisplayOrder + } + + allDeltas = append(allDeltas, delta) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLHeaderDeltaCollectionResponse{ + Items: allDeltas, + }), nil +} + +// GraphQLHeaderDeltaInsert creates new delta GraphQL header entries +func (s *GraphQLServiceRPC) GraphQLHeaderDeltaInsert(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderDeltaInsertRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.Items) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one delta item is required")) + } + + // Process each delta item + for _, item := range req.Msg.Items { + if len(item.GraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_header_id is required for each delta item")) + } + if len(item.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required for each delta item")) + } + + headerID, err := idwrap.NewFromBytes(item.GraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + graphqlID, err := idwrap.NewFromBytes(item.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get parent header to copy base values + parentHeaders, err := s.headerService.GetByIDs(ctx, []idwrap.IDWrap{headerID}) + if err != nil || len(parentHeaders) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("parent header not found")) + } + parentHeader := parentHeaders[0] + + // Check workspace write access through the GraphQL entry + workspaceID, err := s.graphqlService.Reader().GetWorkspaceID(ctx, graphqlID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + var deltaID idwrap.IDWrap + if len(item.DeltaGraphqlHeaderId) > 0 { + var err error + deltaID, err = idwrap.NewFromBytes(item.DeltaGraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + } else { + deltaID = idwrap.NewNow() + } + + // Create delta GraphQL header entry + deltaHeader := &mgraphql.GraphQLHeader{ + ID: deltaID, + GraphQLID: graphqlID, + Key: parentHeader.Key, + Value: parentHeader.Value, + Enabled: parentHeader.Enabled, + Description: parentHeader.Description, + DisplayOrder: parentHeader.DisplayOrder, + ParentGraphQLHeaderID: &headerID, + IsDelta: true, + DeltaKey: item.Key, + DeltaValue: item.Value, + DeltaEnabled: item.Enabled, + DeltaDescription: item.Description, + DeltaDisplayOrder: item.Order, + CreatedAt: 0, // Will be set by service + UpdatedAt: 0, // Will be set by service + } + + // Use mutation pattern for create with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + err = s.headerService.TX(mut.TX()).Create(ctx, deltaHeader) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLHeaderDeltaUpdate updates existing delta GraphQL header entries +func (s *GraphQLServiceRPC) GraphQLHeaderDeltaUpdate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderDeltaUpdateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL header delta must be provided")) + } + + // Process each delta item + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_header_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta GraphQL header entry + existingDeltas, err := s.headerService.GetByIDs(ctx, []idwrap.IDWrap{deltaID}) + if err != nil || len(existingDeltas) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("delta header not found")) + } + existingDelta := existingDeltas[0] + + // Verify this is actually a delta record + if !existingDelta.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL header entry is not a delta")) + } + + // Check write access to the workspace + workspaceID, err := s.graphqlService.Reader().GetWorkspaceID(ctx, existingDelta.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + // Apply updates to delta fields + if item.Key != nil { + switch item.Key.GetKind() { + case graphqlv1.GraphQLHeaderDeltaUpdate_KeyUnion_KIND_UNSET: + existingDelta.DeltaKey = nil + case graphqlv1.GraphQLHeaderDeltaUpdate_KeyUnion_KIND_VALUE: + keyStr := item.Key.GetValue() + existingDelta.DeltaKey = &keyStr + } + } + + if item.Value != nil { + switch item.Value.GetKind() { + case graphqlv1.GraphQLHeaderDeltaUpdate_ValueUnion_KIND_UNSET: + existingDelta.DeltaValue = nil + case graphqlv1.GraphQLHeaderDeltaUpdate_ValueUnion_KIND_VALUE: + valueStr := item.Value.GetValue() + existingDelta.DeltaValue = &valueStr + } + } + + if item.Enabled != nil { + switch item.Enabled.GetKind() { + case graphqlv1.GraphQLHeaderDeltaUpdate_EnabledUnion_KIND_UNSET: + existingDelta.DeltaEnabled = nil + case graphqlv1.GraphQLHeaderDeltaUpdate_EnabledUnion_KIND_VALUE: + enabledVal := item.Enabled.GetValue() + existingDelta.DeltaEnabled = &enabledVal + } + } + + if item.Description != nil { + switch item.Description.GetKind() { + case graphqlv1.GraphQLHeaderDeltaUpdate_DescriptionUnion_KIND_UNSET: + existingDelta.DeltaDescription = nil + case graphqlv1.GraphQLHeaderDeltaUpdate_DescriptionUnion_KIND_VALUE: + descStr := item.Description.GetValue() + existingDelta.DeltaDescription = &descStr + } + } + + if item.Order != nil { + switch item.Order.GetKind() { + case graphqlv1.GraphQLHeaderDeltaUpdate_OrderUnion_KIND_UNSET: + existingDelta.DeltaDisplayOrder = nil + case graphqlv1.GraphQLHeaderDeltaUpdate_OrderUnion_KIND_VALUE: + orderVal := item.Order.GetValue() + existingDelta.DeltaDisplayOrder = &orderVal + } + } + + // Use mutation pattern for update with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + err = s.headerService.TX(mut.TX()).Update(ctx, &existingDelta) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLHeaderDeltaDelete deletes delta GraphQL header entries +func (s *GraphQLServiceRPC) GraphQLHeaderDeltaDelete(ctx context.Context, req *connect.Request[graphqlv1.GraphQLHeaderDeltaDeleteRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GetItems()) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("at least one GraphQL header delta must be provided")) + } + + // Process each delta item + for _, item := range req.Msg.Items { + if len(item.DeltaGraphqlHeaderId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("delta_graphql_header_id is required")) + } + + deltaID, err := idwrap.NewFromBytes(item.DeltaGraphqlHeaderId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + // Get existing delta GraphQL header entry + existingDeltas, err := s.headerService.GetByIDs(ctx, []idwrap.IDWrap{deltaID}) + if err != nil || len(existingDeltas) == 0 { + return nil, connect.NewError(connect.CodeNotFound, errors.New("delta header not found")) + } + existingDelta := existingDeltas[0] + + // Verify this is actually a delta record + if !existingDelta.IsDelta { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("specified GraphQL header entry is not a delta")) + } + + // Check delete access to the workspace + workspaceID, err := s.graphqlService.Reader().GetWorkspaceID(ctx, existingDelta.GraphQLID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceDeleteAccess(ctx, workspaceID); err != nil { + return nil, err + } + + // Use mutation pattern for delete with auto-publish + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + err = s.headerService.TX(mut.TX()).Delete(ctx, deltaID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +// GraphQLHeaderDeltaSync streams delta header changes to the client +func (s *GraphQLServiceRPC) GraphQLHeaderDeltaSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLHeaderDeltaSyncResponse]) error { + // TODO: Implement streaming delta sync with proper event filtering + // Similar to GraphQLDeltaSync, this requires a delta-specific event stream + // that only publishes delta-related changes to prevent flooding clients + // with non-delta header updates. + return nil +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_response.go b/packages/server/internal/api/rgraphql/rgraphql_crud_response.go new file mode 100644 index 000000000..1ea2978c4 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_response.go @@ -0,0 +1,62 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +func (s *GraphQLServiceRPC) GraphQLResponseCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLResponseCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allItems []*graphqlv1.GraphQLResponse + for _, ws := range workspaces { + responses, err := s.responseService.GetByWorkspaceID(ctx, ws.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, r := range responses { + allItems = append(allItems, ToAPIGraphQLResponse(r)) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLResponseCollectionResponse{Items: allItems}), nil +} + +func (s *GraphQLServiceRPC) GraphQLResponseHeaderCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLResponseHeaderCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allItems []*graphqlv1.GraphQLResponseHeader + for _, ws := range workspaces { + headers, err := s.responseService.GetHeadersByWorkspaceID(ctx, ws.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + for _, h := range headers { + allItems = append(allItems, ToAPIGraphQLResponseHeader(h)) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLResponseHeaderCollectionResponse{Items: allItems}), nil +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_response_assert.go b/packages/server/internal/api/rgraphql/rgraphql_crud_response_assert.go new file mode 100644 index 000000000..9bb0c0c3e --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_response_assert.go @@ -0,0 +1,91 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "sync" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// GraphQLResponseAssert operations + +func (s *GraphQLServiceRPC) GraphQLResponseAssertCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLResponseAssertCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Get user's workspaces + workspaces, err := s.ws.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Collect all response asserts across user's workspaces + var allAsserts []*graphqlv1.GraphQLResponseAssert + for _, workspace := range workspaces { + asserts, err := s.responseService.GetAssertsByWorkspaceID(ctx, workspace.ID) + if err != nil { + continue + } + for _, assert := range asserts { + allAsserts = append(allAsserts, ToAPIGraphQLResponseAssert(assert)) + } + } + + return connect.NewResponse(&graphqlv1.GraphQLResponseAssertCollectionResponse{Items: allAsserts}), nil +} + +func (s *GraphQLServiceRPC) GraphQLResponseAssertSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLResponseAssertSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + return s.streamGraphQLResponseAssertSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLResponseAssertSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLResponseAssertSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLResponseAssertTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLResponseAssertEvent) *graphqlv1.GraphQLResponseAssertSyncResponse { + var items []*graphqlv1.GraphQLResponseAssertSync + for _, event := range events { + if resp := graphqlResponseAssertSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLResponseAssertSyncResponse{Items: items} + } + + return eventstream.StreamToClient( + ctx, + s.streamers.GraphQLResponseAssert, + filter, + converter, + send, + nil, + ) +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_crud_version.go b/packages/server/internal/api/rgraphql/rgraphql_crud_version.go new file mode 100644 index 000000000..969967e54 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_crud_version.go @@ -0,0 +1,170 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "sync" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +// GraphQLVersion operations + +func (s *GraphQLServiceRPC) GraphQLVersionCollection(ctx context.Context, req *connect.Request[emptypb.Empty]) (*connect.Response[graphqlv1.GraphQLVersionCollectionResponse], error) { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Get user's workspaces + workspaces, err := s.wsReader.GetWorkspacesByUserIDOrdered(ctx, userID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + var allVersions []*graphqlv1.GraphQLVersion + for _, workspace := range workspaces { + // Get base GraphQL entries for this workspace + graphqlList, err := s.graphqlReader.GetByWorkspaceID(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Also get delta GraphQL entries (versions can be stored against delta IDs) + deltaList, err := s.graphqlReader.GetDeltasByWorkspaceID(ctx, workspace.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Combine base and delta entries + allGraphQLs := make([]mgraphql.GraphQL, 0, len(graphqlList)+len(deltaList)) + allGraphQLs = append(allGraphQLs, graphqlList...) + allGraphQLs = append(allGraphQLs, deltaList...) + + // Get versions for each GraphQL entry + for _, graphql := range allGraphQLs { + versions, err := s.graphqlReader.GetGraphQLVersionsByGraphQLID(ctx, graphql.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Convert to API format + for _, version := range versions { + apiVersion := ToAPIGraphQLVersion(version) + allVersions = append(allVersions, apiVersion) + } + } + } + + return connect.NewResponse(&graphqlv1.GraphQLVersionCollectionResponse{Items: allVersions}), nil +} + +func (s *GraphQLServiceRPC) GraphQLVersionSync(ctx context.Context, req *connect.Request[emptypb.Empty], stream *connect.ServerStream[graphqlv1.GraphQLVersionSyncResponse]) error { + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return connect.NewError(connect.CodeUnauthenticated, err) + } + + return s.streamGraphQLVersionSync(ctx, userID, stream.Send) +} + +func (s *GraphQLServiceRPC) streamGraphQLVersionSync(ctx context.Context, userID idwrap.IDWrap, send func(*graphqlv1.GraphQLVersionSyncResponse) error) error { + var workspaceSet sync.Map + + filter := func(topic GraphQLVersionTopic) bool { + if _, ok := workspaceSet.Load(topic.WorkspaceID.String()); ok { + return true + } + belongs, err := s.us.CheckUserBelongsToWorkspace(ctx, userID, topic.WorkspaceID) + if err != nil || !belongs { + return false + } + workspaceSet.Store(topic.WorkspaceID.String(), struct{}{}) + return true + } + + converter := func(events []GraphQLVersionEvent) *graphqlv1.GraphQLVersionSyncResponse { + var items []*graphqlv1.GraphQLVersionSync + for _, event := range events { + if resp := graphqlVersionSyncResponseFrom(event); resp != nil && len(resp.Items) > 0 { + items = append(items, resp.Items...) + } + } + if len(items) == 0 { + return nil + } + return &graphqlv1.GraphQLVersionSyncResponse{Items: items} + } + + return eventstream.StreamToClient( + ctx, + s.streamers.GraphQLVersion, + filter, + converter, + send, + nil, + ) +} + +// ToAPIGraphQLVersion converts model to API type +func ToAPIGraphQLVersion(version mgraphql.GraphQLVersion) *graphqlv1.GraphQLVersion{ + return &graphqlv1.GraphQLVersion{ + GraphqlVersionId: version.ID.Bytes(), + GraphqlId: version.GraphQLID.Bytes(), + Name: version.VersionName, + Description: version.VersionDescription, + CreatedAt: version.CreatedAt, + } +} + +// graphqlVersionSyncResponseFrom converts GraphQL version events to sync responses +func graphqlVersionSyncResponseFrom(event GraphQLVersionEvent) *graphqlv1.GraphQLVersionSyncResponse { + var value *graphqlv1.GraphQLVersionSync_ValueUnion + + switch event.Type { + case eventTypeInsert: + value = &graphqlv1.GraphQLVersionSync_ValueUnion{ + Kind: graphqlv1.GraphQLVersionSync_ValueUnion_KIND_INSERT, + Insert: &graphqlv1.GraphQLVersionSyncInsert{ + GraphqlVersionId: event.GraphQLVersion.GetGraphqlVersionId(), + GraphqlId: event.GraphQLVersion.GetGraphqlId(), + Name: event.GraphQLVersion.GetName(), + Description: event.GraphQLVersion.GetDescription(), + CreatedAt: event.GraphQLVersion.GetCreatedAt(), + }, + } + case eventTypeUpdate: + name := event.GraphQLVersion.GetName() + description := event.GraphQLVersion.GetDescription() + createdAt := event.GraphQLVersion.GetCreatedAt() + value = &graphqlv1.GraphQLVersionSync_ValueUnion{ + Kind: graphqlv1.GraphQLVersionSync_ValueUnion_KIND_UPDATE, + Update: &graphqlv1.GraphQLVersionSyncUpdate{ + GraphqlVersionId: event.GraphQLVersion.GetGraphqlVersionId(), + Name: &name, + Description: &description, + CreatedAt: &createdAt, + }, + } + case eventTypeDelete: + value = &graphqlv1.GraphQLVersionSync_ValueUnion{ + Kind: graphqlv1.GraphQLVersionSync_ValueUnion_KIND_DELETE, + Delete: &graphqlv1.GraphQLVersionSyncDelete{ + GraphqlVersionId: event.GraphQLVersion.GetGraphqlVersionId(), + }, + } + default: + return nil + } + + return &graphqlv1.GraphQLVersionSyncResponse{ + Items: []*graphqlv1.GraphQLVersionSync{{Value: value}}, + } +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_delta_converter.go b/packages/server/internal/api/rgraphql/rgraphql_delta_converter.go new file mode 100644 index 000000000..fc23dccd1 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_delta_converter.go @@ -0,0 +1,4 @@ +//nolint:revive // exported +package rgraphql + +// This file is deprecated. The graphqlDeltaSyncResponseFrom function is now in rgraphql_converter.go diff --git a/packages/server/internal/api/rgraphql/rgraphql_exec.go b/packages/server/internal/api/rgraphql/rgraphql_exec.go new file mode 100644 index 000000000..4a77a6440 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_exec.go @@ -0,0 +1,788 @@ +//nolint:revive // exported +package rgraphql + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "log/slog" + "net/http" + "strings" + "time" + + "connectrpc.com/connect" + "google.golang.org/protobuf/types/known/emptypb" + + devtoolsdb "github.com/the-dev-tools/dev-tools/packages/db" + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/internal/api/middleware/mwauth" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/mutation" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/httpclient" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/menv" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" +) + +const introspectionQuery = `query IntrospectionQuery { + __schema { + queryType { name } + mutationType { name } + subscriptionType { name } + types { + ...FullType + } + directives { + name + description + locations + args { + ...InputValue + } + } + } +} + +fragment FullType on __Type { + kind + name + description + fields(includeDeprecated: true) { + name + description + args { + ...InputValue + } + type { + ...TypeRef + } + isDeprecated + deprecationReason + } + inputFields { + ...InputValue + } + interfaces { + ...TypeRef + } + enumValues(includeDeprecated: true) { + name + description + isDeprecated + deprecationReason + } + possibleTypes { + ...TypeRef + } +} + +fragment InputValue on __InputValue { + name + description + type { ...TypeRef } + defaultValue +} + +fragment TypeRef on __Type { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + } + } + } + } + } + } +}` + +func (s *GraphQLServiceRPC) GraphQLRun(ctx context.Context, req *connect.Request[graphqlv1.GraphQLRunRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(req.Msg.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + gqlEntry, err := s.graphqlService.Get(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceReadAccess(ctx, gqlEntry.WorkspaceID); err != nil { + return nil, err + } + + // Get user ID for version creation + userID, err := mwauth.GetContextUserID(ctx) + if err != nil { + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + + // Build variable map from workspace env + varMap, err := s.buildWorkspaceVarMap(ctx, gqlEntry.WorkspaceID) + if err != nil { + varMap = make(map[string]any) + } + + // Resolve GraphQL request (handles both delta and non-delta) + var resolvedGraphQL mgraphql.GraphQL + var headers []mgraphql.GraphQLHeader + var asserts []mgraphql.GraphQLAssert + + if gqlEntry.IsDelta && gqlEntry.ParentGraphQLID != nil { + // Delta request: use resolver to merge base + delta + resolved, err := s.resolver.Resolve(ctx, *gqlEntry.ParentGraphQLID, &gqlEntry.ID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to resolve delta request: %w", err)) + } + resolvedGraphQL = resolved.Resolved + headers = resolved.ResolvedHeaders + asserts = resolved.ResolvedAsserts + + // Use workspace ID from original entry + resolvedGraphQL.WorkspaceID = gqlEntry.WorkspaceID + } else { + // Non-delta request: load components directly + resolvedGraphQL = *gqlEntry + + hdrs, err := s.headerService.GetByGraphQLID(ctx, gqlID) + if err != nil { + hdrs = []mgraphql.GraphQLHeader{} + } + headers = hdrs + + assrts, err := s.graphqlAssertService.GetByGraphQLID(ctx, gqlID) + if err != nil { + assrts = []mgraphql.GraphQLAssert{} + } + asserts = assrts + } + + // Build and execute GraphQL request + httpReq, err := prepareGraphQLRequest(&resolvedGraphQL, headers, varMap) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("failed to prepare request: %w", err)) + } + + client := httpclient.New() + startTime := time.Now() + + resp, err := client.Do(httpReq.WithContext(ctx)) + if err != nil { + return nil, connect.NewError(connect.CodeUnavailable, fmt.Errorf("request failed: %w", err)) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to read response: %w", err)) + } + + duration := time.Since(startTime).Milliseconds() + + // Store response + responseID := idwrap.NewNow() + nowUnix := time.Now().Unix() + + gqlResponse := mgraphql.GraphQLResponse{ + ID: responseID, + GraphQLID: gqlID, + Status: int32(resp.StatusCode), //nolint:gosec + Body: body, + Time: startTime.Unix(), + Duration: int32(duration), //nolint:gosec + Size: int32(len(body)), //nolint:gosec + CreatedAt: nowUnix, + } + + mut := mutation.New(s.DB, mutation.WithPublisher(s.mutationPublisher())) + if err := mut.Begin(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to begin transaction: %w", err)) + } + defer mut.Rollback() + + tx := mut.TX() + txResponseService := s.responseService.TX(tx) + + if err := txResponseService.Create(ctx, gqlResponse); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Store response headers + var respHeaderEvents []GraphQLResponseHeaderEvent + responseHeaders := make(map[string]string) + for key, values := range resp.Header { + for _, val := range values { + headerID := idwrap.NewNow() + respHeader := mgraphql.GraphQLResponseHeader{ + ID: headerID, + ResponseID: responseID, + HeaderKey: key, + HeaderValue: val, + CreatedAt: nowUnix, + } + if err := txResponseService.CreateHeader(ctx, respHeader); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + respHeaderEvents = append(respHeaderEvents, GraphQLResponseHeaderEvent{ + Type: eventTypeInsert, + GraphQLResponseHeader: ToAPIGraphQLResponseHeader(respHeader), + }) + // Store first value for each header key for assertion context + if _, exists := responseHeaders[key]; !exists { + responseHeaders[key] = val + } + } + } + + // Update last_run_at + now := time.Now().Unix() + gqlEntry.LastRunAt = &now + txGraphqlService := s.graphqlService.TX(tx) + if err := txGraphqlService.Update(ctx, gqlEntry); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Create version with snapshot + versionName := fmt.Sprintf("v%d", time.Now().UnixNano()) + versionDesc := "Auto-saved version (Run)" + txGraphqlWriter := sgraphql.NewWriterFromQueries(gen.New(tx)) + + version, err := txGraphqlWriter.CreateGraphQLVersion(ctx, gqlID, userID, versionName, versionDesc) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to create version: %w", err)) + } + + // Create snapshot GraphQL entry (using version ID as GraphQL ID) + snapshotGraphQL := &mgraphql.GraphQL{ + ID: version.ID, + WorkspaceID: gqlEntry.WorkspaceID, + FolderID: gqlEntry.FolderID, + Name: gqlEntry.Name, + Url: gqlEntry.Url, + Query: gqlEntry.Query, + Variables: gqlEntry.Variables, + Description: gqlEntry.Description, + IsSnapshot: true, + IsDelta: false, + } + if err := txGraphqlWriter.Create(ctx, snapshotGraphQL); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to create snapshot GraphQL: %w", err)) + } + + // Track snapshot GraphQL insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQL, + Op: mutation.OpInsert, + ID: version.ID, + ParentID: gqlEntry.WorkspaceID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: *snapshotGraphQL, + }) + + // Clone headers into snapshot + txHeaderService := s.headerService.TX(tx) + for _, header := range headers { + snapshotHeader := &mgraphql.GraphQLHeader{ + ID: idwrap.NewNow(), + GraphQLID: version.ID, + Key: header.Key, + Value: header.Value, + Enabled: header.Enabled, + Description: header.Description, + DisplayOrder: header.DisplayOrder, + } + if err := txHeaderService.Create(ctx, snapshotHeader); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to clone header: %w", err)) + } + + // Track snapshot header insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLHeader, + Op: mutation.OpInsert, + ID: snapshotHeader.ID, + ParentID: version.ID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: *snapshotHeader, + }) + } + + // Clone request assertions into snapshot (matches HTTP pattern) + txAssertService := s.graphqlAssertService.TX(tx) + for _, assert := range asserts { + snapshotAssert := &mgraphql.GraphQLAssert{ + ID: idwrap.NewNow(), + GraphQLID: version.ID, + Value: assert.Value, + Enabled: assert.Enabled, + Description: assert.Description, + DisplayOrder: assert.DisplayOrder, + } + if err := txAssertService.Create(ctx, snapshotAssert); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to clone assertion: %w", err)) + } + + // Track snapshot assertion insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLAssert, + Op: mutation.OpInsert, + ID: snapshotAssert.ID, + ParentID: version.ID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: *snapshotAssert, + }) + } + + // Clone response into snapshot + snapshotResponse := mgraphql.GraphQLResponse{ + ID: idwrap.NewNow(), + GraphQLID: version.ID, + Status: gqlResponse.Status, + Body: gqlResponse.Body, + Time: gqlResponse.Time, + Duration: gqlResponse.Duration, + Size: gqlResponse.Size, + CreatedAt: gqlResponse.CreatedAt, + } + txResponseSvc := s.responseService.TX(tx) + if err := txResponseSvc.Create(ctx, snapshotResponse); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to create snapshot response: %w", err)) + } + + // Track snapshot response insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLResponse, + Op: mutation.OpInsert, + ID: snapshotResponse.ID, + ParentID: version.ID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: snapshotResponse, + }) + + // Clone response headers into snapshot + for key, values := range resp.Header { + for _, val := range values { + snapshotRespHeader := mgraphql.GraphQLResponseHeader{ + ID: idwrap.NewNow(), + ResponseID: snapshotResponse.ID, + HeaderKey: key, + HeaderValue: val, + CreatedAt: nowUnix, + } + if err := txResponseSvc.CreateHeader(ctx, snapshotRespHeader); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to create snapshot response header: %w", err)) + } + + // Track snapshot response header insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLResponseHeader, + Op: mutation.OpInsert, + ID: snapshotRespHeader.ID, + ParentID: snapshotResponse.ID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: snapshotRespHeader, + }) + } + } + + // Evaluate assertions BEFORE commit (matches HTTP pattern) + // This ensures response assertions exist in DB before we clone them into snapshot + var responseAssertions []mgraphql.GraphQLResponseAssert + if len(asserts) > 0 { + // Prepare response data for assertion evaluation + respData := GraphQLResponseData{ + StatusCode: resp.StatusCode, + Body: body, + Headers: responseHeaders, + } + + // Evaluate and store assertions within the same transaction + responseAssertions, err = s.evaluateAndStoreAssertions(ctx, tx, gqlID, responseID, gqlEntry.WorkspaceID, respData, asserts) + if err != nil { + slog.WarnContext(ctx, "Failed to evaluate assertions", + "error", err, + "graphql_id", gqlID.String(), + "response_id", responseID.String()) + // Don't fail the request, assertions are supplementary + responseAssertions = []mgraphql.GraphQLResponseAssert{} + } + } + + // Clone response assertions into snapshot (matches HTTP pattern) + for _, responseAssert := range responseAssertions { + snapshotResponseAssert := mgraphql.GraphQLResponseAssert{ + ID: idwrap.NewNow(), + ResponseID: snapshotResponse.ID, + Value: responseAssert.Value, + Success: responseAssert.Success, + CreatedAt: nowUnix, + } + if err := txResponseSvc.CreateAssert(ctx, snapshotResponseAssert); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to clone response assertion: %w", err)) + } + + // Track snapshot response assertion insertion event + mut.Track(mutation.Event{ + Entity: mutation.EntityGraphQLResponseAssert, + Op: mutation.OpInsert, + ID: snapshotResponseAssert.ID, + ParentID: snapshotResponse.ID, + WorkspaceID: gqlEntry.WorkspaceID, + Payload: snapshotResponseAssert, + }) + } + + // Collect events before commit for manual publishing of snapshot entities + snapshotEvents := mut.Events() + + if err := mut.Commit(ctx); err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to commit transaction: %w", err)) + } + + // Publish events + if s.streamers.GraphQLResponse != nil { + s.streamers.GraphQLResponse.Publish(GraphQLResponseTopic{WorkspaceID: gqlEntry.WorkspaceID}, GraphQLResponseEvent{ + Type: eventTypeInsert, + GraphQLResponse: ToAPIGraphQLResponse(gqlResponse), + }) + } + if s.streamers.GraphQLResponseHeader != nil { + topic := GraphQLResponseHeaderTopic{WorkspaceID: gqlEntry.WorkspaceID} + for _, evt := range respHeaderEvents { + s.streamers.GraphQLResponseHeader.Publish(topic, evt) + } + } + if s.streamers.GraphQL != nil { + s.streamers.GraphQL.Publish(GraphQLTopic{WorkspaceID: gqlEntry.WorkspaceID}, GraphQLEvent{ + Type: eventTypeUpdate, + GraphQL: ToAPIGraphQL(*gqlEntry), + }) + } + + // Publish version insert event + if s.streamers.GraphQLVersion != nil { + s.streamers.GraphQLVersion.Publish(GraphQLVersionTopic{WorkspaceID: gqlEntry.WorkspaceID}, GraphQLVersionEvent{ + Type: eventTypeInsert, + GraphQLVersion: ToAPIGraphQLVersion(*version), + }) + } + + // Publish response assertion events (now that they're committed) + if len(responseAssertions) > 0 && s.streamers.GraphQLResponseAssert != nil { + topic := GraphQLResponseAssertTopic{WorkspaceID: gqlEntry.WorkspaceID} + for _, assert := range responseAssertions { + s.streamers.GraphQLResponseAssert.Publish(topic, GraphQLResponseAssertEvent{ + Type: eventTypeInsert, + GraphQLResponseAssert: ToAPIGraphQLResponseAssert(assert), + }) + } + } + + // Publish snapshot sync events for snapshot response/headers/assertions + // so the frontend receives real-time updates for the newly created snapshot data + s.publishSnapshotSyncEvents(snapshotEvents, gqlEntry.WorkspaceID) + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLDuplicate(ctx context.Context, req *connect.Request[graphqlv1.GraphQLDuplicateRequest]) (*connect.Response[emptypb.Empty], error) { + if len(req.Msg.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(req.Msg.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + gqlEntry, err := s.graphqlService.Get(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceWriteAccess(ctx, gqlEntry.WorkspaceID); err != nil { + return nil, err + } + + // Read headers outside TX + headers, err := s.headerService.GetByGraphQLID(ctx, gqlID) + if err != nil { + headers = []mgraphql.GraphQLHeader{} + } + + newGQLID := idwrap.NewNow() + + tx, err := s.DB.BeginTx(ctx, nil) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + defer devtoolsdb.TxnRollback(tx) + + txGraphqlService := s.graphqlService.TX(tx) + txHeaderService := s.headerService.TX(tx) + + newEntry := &mgraphql.GraphQL{ + ID: newGQLID, + WorkspaceID: gqlEntry.WorkspaceID, + FolderID: gqlEntry.FolderID, + Name: fmt.Sprintf("Copy of %s", gqlEntry.Name), + Url: gqlEntry.Url, + Query: gqlEntry.Query, + Variables: gqlEntry.Variables, + Description: gqlEntry.Description, + } + + if err := txGraphqlService.Create(ctx, newEntry); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + for _, h := range headers { + newHeader := &mgraphql.GraphQLHeader{ + ID: idwrap.NewNow(), + GraphQLID: newGQLID, + Key: h.Key, + Value: h.Value, + Enabled: h.Enabled, + Description: h.Description, + DisplayOrder: h.DisplayOrder, + } + if err := txHeaderService.Create(ctx, newHeader); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + } + + if err := tx.Commit(); err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + // Publish GraphQL insert event + if s.streamers.GraphQL != nil { + s.streamers.GraphQL.Publish(GraphQLTopic{WorkspaceID: gqlEntry.WorkspaceID}, GraphQLEvent{ + Type: eventTypeInsert, + GraphQL: ToAPIGraphQL(*newEntry), + }) + } + + return connect.NewResponse(&emptypb.Empty{}), nil +} + +func (s *GraphQLServiceRPC) GraphQLIntrospect(ctx context.Context, req *connect.Request[graphqlv1.GraphQLIntrospectRequest]) (*connect.Response[graphqlv1.GraphQLIntrospectResponse], error) { + if len(req.Msg.GraphqlId) == 0 { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("graphql_id is required")) + } + + gqlID, err := idwrap.NewFromBytes(req.Msg.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + gqlEntry, err := s.graphqlService.Get(ctx, gqlID) + if err != nil { + if errors.Is(err, sgraphql.ErrNoGraphQLFound) { + return nil, connect.NewError(connect.CodeNotFound, err) + } + return nil, connect.NewError(connect.CodeInternal, err) + } + + if err := s.checkWorkspaceReadAccess(ctx, gqlEntry.WorkspaceID); err != nil { + return nil, err + } + + varMap, err := s.buildWorkspaceVarMap(ctx, gqlEntry.WorkspaceID) + if err != nil { + varMap = make(map[string]any) + } + + headers, err := s.headerService.GetByGraphQLID(ctx, gqlID) + if err != nil { + headers = []mgraphql.GraphQLHeader{} + } + + // Build introspection request + body, _ := json.Marshal(map[string]any{ + "query": introspectionQuery, + }) + + url := interpolateString(gqlEntry.Url, varMap) + httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body)) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("failed to create request: %w", err)) + } + httpReq.Header.Set("Content-Type", "application/json") + + for _, h := range headers { + if h.Enabled && h.Key != "" { + httpReq.Header.Set(interpolateString(h.Key, varMap), interpolateString(h.Value, varMap)) + } + } + + client := httpclient.New() + resp, err := client.Do(httpReq) + if err != nil { + return nil, connect.NewError(connect.CodeUnavailable, fmt.Errorf("introspection request failed: %w", err)) + } + defer func() { _ = resp.Body.Close() }() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("failed to read response: %w", err)) + } + + return connect.NewResponse(&graphqlv1.GraphQLIntrospectResponse{ + IntrospectionJson: string(respBody), + Sdl: "", // SDL conversion would need a graphql library - return empty for now + }), nil +} + +// Helper functions + +func (s *GraphQLServiceRPC) buildWorkspaceVarMap(ctx context.Context, workspaceID idwrap.IDWrap) (map[string]any, error) { + workspace, err := s.ws.Get(ctx, workspaceID) + if err != nil { + return nil, fmt.Errorf("failed to get workspace: %w", err) + } + + var globalVars []menv.Variable + if workspace.GlobalEnv != (idwrap.IDWrap{}) { + globalVars, err = s.vs.GetVariableByEnvID(ctx, workspace.GlobalEnv) + if err != nil && !errors.Is(err, senv.ErrNoVarFound) { + return nil, fmt.Errorf("failed to get global environment variables: %w", err) + } + } + + varMap := make(map[string]any) + for _, envVar := range globalVars { + if envVar.IsEnabled() { + varMap[envVar.VarKey] = envVar.Value + } + } + + return varMap, nil +} + +func prepareGraphQLRequest(gql *mgraphql.GraphQL, headers []mgraphql.GraphQLHeader, varMap map[string]any) (*http.Request, error) { + url := interpolateString(gql.Url, varMap) + query := interpolateString(gql.Query, varMap) + variables := interpolateString(gql.Variables, varMap) + + var varsMap map[string]any + if variables != "" { + if err := json.Unmarshal([]byte(variables), &varsMap); err != nil { + varsMap = nil + } + } + + bodyMap := map[string]any{"query": query} + if varsMap != nil { + bodyMap["variables"] = varsMap + } + + bodyBytes, err := json.Marshal(bodyMap) + if err != nil { + return nil, fmt.Errorf("failed to marshal body: %w", err) + } + + req, err := http.NewRequestWithContext(context.Background(), http.MethodPost, url, bytes.NewReader(bodyBytes)) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("Content-Type", "application/json") + for _, h := range headers { + if h.Enabled && h.Key != "" { + req.Header.Set(interpolateString(h.Key, varMap), interpolateString(h.Value, varMap)) + } + } + + return req, nil +} + +func interpolateString(s string, varMap map[string]any) string { + result := s + for key, val := range varMap { + placeholder := "{{" + key + "}}" + valStr := fmt.Sprintf("%v", val) + result = strings.ReplaceAll(result, placeholder, valStr) + // Also support {{ key }} (with spaces) + placeholder = "{{ " + key + " }}" + result = strings.ReplaceAll(result, placeholder, valStr) + } + return result +} + +// publishSnapshotSyncEvents publishes sync events for snapshot entities +// so the frontend receives real-time updates for the newly created snapshot data. +// This function follows the same pattern as HTTP's publishSnapshotSyncEvents. +func (s *GraphQLServiceRPC) publishSnapshotSyncEvents(events []mutation.Event, workspaceID idwrap.IDWrap) { + for _, evt := range events { + //nolint:exhaustive + switch evt.Entity { + case mutation.EntityGraphQLResponse: + if s.streamers.GraphQLResponse != nil { + if resp, ok := evt.Payload.(mgraphql.GraphQLResponse); ok { + s.streamers.GraphQLResponse.Publish( + GraphQLResponseTopic{WorkspaceID: workspaceID}, + GraphQLResponseEvent{ + Type: eventTypeInsert, + GraphQLResponse: ToAPIGraphQLResponse(resp), + }, + ) + } + } + case mutation.EntityGraphQLResponseHeader: + if s.streamers.GraphQLResponseHeader != nil { + if rh, ok := evt.Payload.(mgraphql.GraphQLResponseHeader); ok { + s.streamers.GraphQLResponseHeader.Publish( + GraphQLResponseHeaderTopic{WorkspaceID: workspaceID}, + GraphQLResponseHeaderEvent{ + Type: eventTypeInsert, + GraphQLResponseHeader: ToAPIGraphQLResponseHeader(rh), + }, + ) + } + } + case mutation.EntityGraphQLResponseAssert: + if s.streamers.GraphQLResponseAssert != nil { + if ra, ok := evt.Payload.(mgraphql.GraphQLResponseAssert); ok { + s.streamers.GraphQLResponseAssert.Publish( + GraphQLResponseAssertTopic{WorkspaceID: workspaceID}, + GraphQLResponseAssertEvent{ + Type: eventTypeInsert, + GraphQLResponseAssert: ToAPIGraphQLResponseAssert(ra), + }, + ) + } + } + } + } +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_exec_assert.go b/packages/server/internal/api/rgraphql/rgraphql_exec_assert.go new file mode 100644 index 000000000..11d67f3aa --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_exec_assert.go @@ -0,0 +1,419 @@ +//nolint:revive // exported +package rgraphql + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "log/slog" + "strings" + "sync" + "time" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/expression" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +// AssertionResult represents the result of evaluating a single assertion +type AssertionResult struct { + AssertionID idwrap.IDWrap + Expression string + Success bool + Error error + EvaluatedAt time.Time +} + +// GraphQLResponseData wraps the response for assertion evaluation +type GraphQLResponseData struct { + StatusCode int + Body []byte + Headers map[string]string +} + +// evaluateAndStoreAssertions evaluates assertions and stores them within a transaction, returning the created assertions +// This is used by GraphQLRun to evaluate assertions before commit so they can be cloned into snapshots +func (s *GraphQLServiceRPC) evaluateAndStoreAssertions(ctx context.Context, tx *sql.Tx, graphqlID idwrap.IDWrap, responseID idwrap.IDWrap, workspaceID idwrap.IDWrap, resp GraphQLResponseData, asserts []mgraphql.GraphQLAssert) ([]mgraphql.GraphQLResponseAssert, error) { + if len(asserts) == 0 { + return []mgraphql.GraphQLResponseAssert{}, nil + } + + enabledAsserts := make([]mgraphql.GraphQLAssert, 0, len(asserts)) + for _, assert := range asserts { + if assert.IsEnabled() { + enabledAsserts = append(enabledAsserts, assert) + } + } + + if len(enabledAsserts) == 0 { + return []mgraphql.GraphQLResponseAssert{}, nil + } + + evalContext := s.createAssertionEvalContext(resp) + results := s.evaluateAssertionsParallel(ctx, enabledAsserts, evalContext) + + // Store results within the provided transaction + responseAsserts, err := s.storeAssertionResultsInTx(ctx, tx, responseID, results) + if err != nil { + return nil, fmt.Errorf("failed to store assertion results for GraphQL %s: %w", graphqlID.String(), err) + } + + return responseAsserts, nil +} + +// evaluateAssertionsParallel evaluates multiple assertions in parallel with timeout and error handling +func (s *GraphQLServiceRPC) evaluateAssertionsParallel(ctx context.Context, asserts []mgraphql.GraphQLAssert, evalContext map[string]any) []AssertionResult { + results := make([]AssertionResult, len(asserts)) + resultChan := make(chan AssertionResult, len(asserts)) + + var wg sync.WaitGroup + + // Create a context with timeout for assertion evaluation (30 seconds per assertion batch) + evalCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + + // Evaluate each assertion in a separate goroutine + for i, assert := range asserts { + wg.Add(1) + go func(idx int, assertion mgraphql.GraphQLAssert) { + defer wg.Done() + startTime := time.Now() + result := AssertionResult{ + AssertionID: assertion.ID, + EvaluatedAt: startTime, + } + + // Recover from panics in assertion evaluation + defer func() { + if r := recover(); r != nil { + result.Error = fmt.Errorf("panic during assertion evaluation: %v", r) + result.Success = false + resultChan <- result + } + }() + + // Use the assertion value directly as the expression + expression := assertion.Value + result.Expression = expression + + // Evaluate the assertion expression with context + success, err := s.evaluateAssertion(evalCtx, expression, evalContext) + if err != nil { + // Check for context timeout + if evalCtx.Err() == context.DeadlineExceeded { + result.Error = fmt.Errorf("assertion evaluation timed out: %w", err) + } else { + result.Error = fmt.Errorf("evaluation failed: %w", err) + } + result.Success = false + } else { + result.Success = success + } + + // Add evaluation duration for monitoring + duration := time.Since(startTime) + if duration > 5*time.Second { + slog.WarnContext(ctx, "Slow assertion evaluation", + "assertion_id", assertion.ID.String(), + "duration", duration) + } + + resultChan <- result + }(i, assert) + } + + // Close the result channel when all goroutines complete + go func() { + wg.Wait() + close(resultChan) + }() + + // Collect results preserving order with timeout + collectCtx, collectCancel := context.WithTimeout(ctx, 35*time.Second) + defer collectCancel() + + collectedCount := 0 + for { + select { + case result, ok := <-resultChan: + if !ok { + // Channel closed, all results collected + goto done + } + // Find the original index for this result + for j, assert := range asserts { + if assert.ID == result.AssertionID { + results[j] = result + collectedCount++ + break + } + } + + case <-collectCtx.Done(): + // Collection timeout - fill missing results with timeout error + slog.WarnContext(ctx, "Assertion result collection timed out after 35 seconds") + for j, assert := range asserts { + if results[j].AssertionID.String() == "" { + results[j] = AssertionResult{ + AssertionID: assert.ID, + Expression: assert.Value, + Success: false, + Error: fmt.Errorf("collection timeout"), + EvaluatedAt: time.Now(), + } + } + } + goto done + + case <-evalCtx.Done(): + // Evaluation context cancelled + slog.WarnContext(ctx, "Assertion evaluation context cancelled", "error", evalCtx.Err()) + for j, assert := range asserts { + if results[j].AssertionID.String() == "" { + results[j] = AssertionResult{ + AssertionID: assert.ID, + Expression: assert.Value, + Success: false, + Error: fmt.Errorf("evaluation cancelled: %w", evalCtx.Err()), + EvaluatedAt: time.Now(), + } + } + } + goto done + } + } + +done: + if collectedCount != len(asserts) { + slog.WarnContext(ctx, "Incomplete assertion result collection", + "collected", collectedCount, + "total", len(asserts)) + } + + return results +} + +// storeAssertionResultsInTx stores assertion results within an existing transaction and returns the created assertions +func (s *GraphQLServiceRPC) storeAssertionResultsInTx(ctx context.Context, tx *sql.Tx, responseID idwrap.IDWrap, results []AssertionResult) ([]mgraphql.GraphQLResponseAssert, error) { + if len(results) == 0 { + return []mgraphql.GraphQLResponseAssert{}, nil + } + + txResponseService := s.responseService.TX(tx) + now := time.Now().Unix() + responseAsserts := make([]mgraphql.GraphQLResponseAssert, 0, len(results)) + + for _, result := range results { + var value string + var success bool + + if result.Error != nil { + // Store error information in the value field + value = fmt.Sprintf("ERROR: %s", result.Error.Error()) + success = false + } else { + // Store successful assertion result + value = result.Expression + success = result.Success + } + + assertID := idwrap.NewNow() + assert := mgraphql.GraphQLResponseAssert{ + ID: assertID, + ResponseID: responseID, + Value: value, + Success: success, + CreatedAt: now, + } + + if err := txResponseService.CreateAssert(ctx, assert); err != nil { + return nil, fmt.Errorf("failed to insert assertion result for %s: %w", result.AssertionID.String(), err) + } + + responseAsserts = append(responseAsserts, assert) + } + + slog.InfoContext(ctx, "Stored assertion results in transaction", + "count", len(results), + "response_id", responseID.String()) + + return responseAsserts, nil +} + +// createAssertionEvalContext creates the evaluation context with response data +func (s *GraphQLServiceRPC) createAssertionEvalContext(resp GraphQLResponseData) map[string]any { + // Parse response body as JSON if possible + var body any + var bodyMap map[string]any + bodyString := string(resp.Body) + + if err := json.Unmarshal(resp.Body, &body); err != nil { + // If JSON parsing fails, use as string + body = bodyString + } else { + // Also try to parse as map for easier access + if mapBody, ok := body.(map[string]any); ok { + bodyMap = mapBody + } + } + + // Convert headers to map + headers := make(map[string]string) + headersLower := make(map[string]string) + contentType := "" + + for key, value := range resp.Headers { + lowerKey := strings.ToLower(key) + headers[key] = value + headersLower[lowerKey] = value + + if lowerKey == "content-type" { + contentType = value + } + } + + // Extract GraphQL-specific fields from response + var data any + var errors any + if bodyMap != nil { + if d, ok := bodyMap["data"]; ok { + data = d + } + if e, ok := bodyMap["errors"]; ok { + errors = e + } + } + + // Extract JSON path helpers (for full body navigation) + jsonPathHelpers := s.createJSONPathHelpers(bodyMap) + + // Extract JSON path helpers for data field specifically + var dataMap map[string]any + if data != nil { + if dm, ok := data.(map[string]any); ok { + dataMap = dm + } + } + dataPathHelpers := s.createJSONPathHelpers(dataMap) + + // Create comprehensive evaluation context + context := map[string]any{ + // Main response object + "response": map[string]any{ + "status": resp.StatusCode, + "body": body, + "headers": headers, + "data": data, + "errors": errors, + }, + + // Direct access to commonly used fields + "status": resp.StatusCode, + "body": body, + "body_string": bodyString, + "headers": headers, + "content_type": contentType, + + // GraphQL-specific fields (top-level for convenience) + "data": data, + "errors": errors, + + // Convenience variables + "success": resp.StatusCode >= 200 && resp.StatusCode < 300, + "client_error": resp.StatusCode >= 400 && resp.StatusCode < 500, + "server_error": resp.StatusCode >= 500 && resp.StatusCode < 600, + "is_json": strings.HasPrefix(contentType, "application/json"), + "has_body": len(resp.Body) > 0, + "has_data": data != nil, + "has_errors": errors != nil, + + // JSON path helpers (for full body) + "json": jsonPathHelpers, + // JSON path helpers specifically for data field + "dataJson": dataPathHelpers, + } + + return context +} + +// createJSONPathHelpers creates helper functions for JSON path navigation +func (s *GraphQLServiceRPC) createJSONPathHelpers(bodyMap map[string]any) map[string]any { + helpers := make(map[string]any) + + if bodyMap == nil { + return helpers + } + + // Helper function to get nested value by path + getPath := func(path string) any { + parts := strings.Split(path, ".") + current := bodyMap + + for _, part := range parts { + if next, ok := current[part]; ok { + if nextMap, ok := next.(map[string]any); ok { + current = nextMap + } else { + return next + } + } else { + return nil + } + } + return current + } + + // Helper to check if path exists + hasPath := func(path string) bool { + return getPath(path) != nil + } + + // Helper to get string value + getString := func(path string) string { + val := getPath(path) + if val == nil { + return "" + } + if str, ok := val.(string); ok { + return str + } + return fmt.Sprintf("%v", val) + } + + // Helper to get numeric value + getNumber := func(path string) float64 { + val := getPath(path) + if val == nil { + return 0 + } + switch num := val.(type) { + case float64: + return num + case int: + return float64(num) + case int64: + return float64(num) + default: + if str, ok := val.(string); ok { + var f float64 + _, _ = fmt.Sscanf(str, "%f", &f) + return f + } + } + return 0 + } + + helpers["path"] = getPath + helpers["has"] = hasPath + helpers["string"] = getString + helpers["number"] = getNumber + + return helpers +} + +// evaluateAssertion evaluates an assertion expression against the provided context +func (s *GraphQLServiceRPC) evaluateAssertion(ctx context.Context, expressionStr string, context map[string]any) (bool, error) { + env := expression.NewEnv(context) + return expression.ExpressionEvaluteAsBool(ctx, env, expressionStr) +} diff --git a/packages/server/internal/api/rgraphql/rgraphql_exec_assert_test.go b/packages/server/internal/api/rgraphql/rgraphql_exec_assert_test.go new file mode 100644 index 000000000..a9567c1f1 --- /dev/null +++ b/packages/server/internal/api/rgraphql/rgraphql_exec_assert_test.go @@ -0,0 +1,442 @@ +//nolint:revive // test file +package rgraphql + +import ( + "context" + "testing" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +func TestCreateAssertionEvalContext(t *testing.T) { + t.Parallel() + + srv := &GraphQLServiceRPC{} + + tests := []struct { + name string + response GraphQLResponseData + validate func(t *testing.T, ctx map[string]any) + }{ + { + name: "basic JSON response", + response: GraphQLResponseData{ + StatusCode: 200, + Body: []byte(`{"data": {"user": {"name": "Alice"}}}`), + Headers: map[string]string{ + "Content-Type": "application/json", + }, + }, + validate: func(t *testing.T, ctx map[string]any) { + // Check status code + if status, ok := ctx["status"].(int); !ok || status != 200 { + t.Errorf("expected status 200, got %v", ctx["status"]) + } + + // Check success flag + if success, ok := ctx["success"].(bool); !ok || !success { + t.Errorf("expected success=true for 2xx status, got %v", ctx["success"]) + } + + // Check is_json flag + if isJSON, ok := ctx["is_json"].(bool); !ok || !isJSON { + t.Errorf("expected is_json=true for JSON content-type, got %v", ctx["is_json"]) + } + + // Check body parsing + if body, ok := ctx["body"].(map[string]any); !ok { + t.Errorf("expected body to be parsed as map, got %T", ctx["body"]) + } else { + if data, ok := body["data"].(map[string]any); !ok { + t.Errorf("expected body.data to exist") + } else { + if user, ok := data["user"].(map[string]any); !ok { + t.Errorf("expected body.data.user to exist") + } else { + if name, ok := user["name"].(string); !ok || name != "Alice" { + t.Errorf("expected body.data.user.name='Alice', got %v", name) + } + } + } + } + + // Check JSON path helpers + if jsonHelpers, ok := ctx["json"].(map[string]any); !ok { + t.Errorf("expected json helpers to exist") + } else { + // Test path helper + if pathFn, ok := jsonHelpers["path"].(func(string) any); ok { + result := pathFn("data.user.name") + if name, ok := result.(string); !ok || name != "Alice" { + t.Errorf("json.path('data.user.name') expected 'Alice', got %v", result) + } + } else { + t.Errorf("expected json.path function to exist") + } + + // Test has helper + if hasFn, ok := jsonHelpers["has"].(func(string) bool); ok { + if !hasFn("data.user.name") { + t.Errorf("json.has('data.user.name') should return true") + } + if hasFn("data.missing") { + t.Errorf("json.has('data.missing') should return false") + } + } else { + t.Errorf("expected json.has function to exist") + } + } + }, + }, + { + name: "client error response", + response: GraphQLResponseData{ + StatusCode: 404, + Body: []byte(`{"error": "Not found"}`), + Headers: map[string]string{ + "Content-Type": "application/json", + }, + }, + validate: func(t *testing.T, ctx map[string]any) { + if status, ok := ctx["status"].(int); !ok || status != 404 { + t.Errorf("expected status 404, got %v", ctx["status"]) + } + + if success, ok := ctx["success"].(bool); !ok || success { + t.Errorf("expected success=false for 4xx status, got %v", ctx["success"]) + } + + if clientError, ok := ctx["client_error"].(bool); !ok || !clientError { + t.Errorf("expected client_error=true for 4xx status, got %v", ctx["client_error"]) + } + + if serverError, ok := ctx["server_error"].(bool); !ok || serverError { + t.Errorf("expected server_error=false for 4xx status, got %v", ctx["server_error"]) + } + }, + }, + { + name: "server error response", + response: GraphQLResponseData{ + StatusCode: 500, + Body: []byte(`Internal Server Error`), + Headers: map[string]string{ + "Content-Type": "text/plain", + }, + }, + validate: func(t *testing.T, ctx map[string]any) { + if status, ok := ctx["status"].(int); !ok || status != 500 { + t.Errorf("expected status 500, got %v", ctx["status"]) + } + + if success, ok := ctx["success"].(bool); !ok || success { + t.Errorf("expected success=false for 5xx status, got %v", ctx["success"]) + } + + if serverError, ok := ctx["server_error"].(bool); !ok || !serverError { + t.Errorf("expected server_error=true for 5xx status, got %v", ctx["server_error"]) + } + + if isJSON, ok := ctx["is_json"].(bool); !ok || isJSON { + t.Errorf("expected is_json=false for text/plain, got %v", ctx["is_json"]) + } + + // Body should be string since JSON parsing fails + if bodyStr, ok := ctx["body_string"].(string); !ok || bodyStr != "Internal Server Error" { + t.Errorf("expected body_string='Internal Server Error', got %v", ctx["body_string"]) + } + }, + }, + { + name: "empty response", + response: GraphQLResponseData{ + StatusCode: 204, + Body: []byte{}, + Headers: map[string]string{}, + }, + validate: func(t *testing.T, ctx map[string]any) { + if hasBody, ok := ctx["has_body"].(bool); !ok || hasBody { + t.Errorf("expected has_body=false for empty body, got %v", ctx["has_body"]) + } + + if success, ok := ctx["success"].(bool); !ok || !success { + t.Errorf("expected success=true for 204 status, got %v", ctx["success"]) + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := srv.createAssertionEvalContext(tt.response) + tt.validate(t, ctx) + }) + } +} + +func TestEvaluateAssertionsParallel(t *testing.T) { + t.Parallel() + + srv := &GraphQLServiceRPC{} + ctx := context.Background() + + tests := []struct { + name string + asserts []mgraphql.GraphQLAssert + evalContext map[string]any + validateCount int + checkResults func(t *testing.T, results []AssertionResult) + }{ + { + name: "empty assertions list", + asserts: []mgraphql.GraphQLAssert{}, + evalContext: map[string]any{}, + validateCount: 0, + checkResults: func(t *testing.T, results []AssertionResult) { + if len(results) != 0 { + t.Errorf("expected 0 results for empty assertions, got %d", len(results)) + } + }, + }, + { + name: "single successful assertion", + asserts: []mgraphql.GraphQLAssert{ + { + ID: idwrap.NewNow(), + Value: "status == 200", + Enabled: true, + }, + }, + evalContext: map[string]any{ + "status": 200, + }, + validateCount: 1, + checkResults: func(t *testing.T, results []AssertionResult) { + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Error != nil { + t.Errorf("expected no error, got %v", results[0].Error) + } + if !results[0].Success { + t.Errorf("expected success=true for status == 200") + } + }, + }, + { + name: "single failing assertion", + asserts: []mgraphql.GraphQLAssert{ + { + ID: idwrap.NewNow(), + Value: "status == 404", + Enabled: true, + }, + }, + evalContext: map[string]any{ + "status": 200, + }, + validateCount: 1, + checkResults: func(t *testing.T, results []AssertionResult) { + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + if results[0].Error != nil { + t.Errorf("expected no error, got %v", results[0].Error) + } + if results[0].Success { + t.Errorf("expected success=false for status == 404 when status is 200") + } + }, + }, + { + name: "multiple assertions", + asserts: []mgraphql.GraphQLAssert{ + { + ID: idwrap.NewNow(), + Value: "status == 200", + Enabled: true, + }, + { + ID: idwrap.NewNow(), + Value: "success == true", + Enabled: true, + }, + { + ID: idwrap.NewNow(), + Value: "is_json == true", + Enabled: true, + }, + }, + evalContext: map[string]any{ + "status": 200, + "success": true, + "is_json": true, + }, + validateCount: 3, + checkResults: func(t *testing.T, results []AssertionResult) { + if len(results) != 3 { + t.Fatalf("expected 3 results, got %d", len(results)) + } + for i, result := range results { + if result.Error != nil { + t.Errorf("result[%d]: expected no error, got %v", i, result.Error) + } + if !result.Success { + t.Errorf("result[%d]: expected success=true, expression=%s", i, result.Expression) + } + } + }, + }, + { + name: "invalid expression", + asserts: []mgraphql.GraphQLAssert{ + { + ID: idwrap.NewNow(), + Value: "invalid syntax %%%", + Enabled: true, + }, + }, + evalContext: map[string]any{ + "status": 200, + }, + validateCount: 1, + checkResults: func(t *testing.T, results []AssertionResult) { + if len(results) != 1 { + t.Fatalf("expected 1 result, got %d", len(results)) + } + // Should have an error for invalid syntax + if results[0].Error == nil { + t.Errorf("expected error for invalid expression syntax") + } + if results[0].Success { + t.Errorf("expected success=false for invalid expression") + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + results := srv.evaluateAssertionsParallel(ctx, tt.asserts, tt.evalContext) + + if len(results) != tt.validateCount { + t.Fatalf("expected %d results, got %d", tt.validateCount, len(results)) + } + + tt.checkResults(t, results) + + // Verify all results have timestamps + for i, result := range results { + if result.EvaluatedAt.IsZero() { + t.Errorf("result[%d]: expected non-zero EvaluatedAt timestamp", i) + } + } + }) + } +} + +func TestCreateJSONPathHelpers(t *testing.T) { + t.Parallel() + + srv := &GraphQLServiceRPC{} + + tests := []struct { + name string + bodyMap map[string]any + checks func(t *testing.T, helpers map[string]any) + }{ + { + name: "nil body map", + bodyMap: nil, + checks: func(t *testing.T, helpers map[string]any) { + if helpers == nil { + t.Errorf("expected non-nil helpers map") + } + if len(helpers) != 0 { + t.Errorf("expected empty helpers for nil body, got %d", len(helpers)) + } + }, + }, + { + name: "simple nested object", + bodyMap: map[string]any{ + "data": map[string]any{ + "user": map[string]any{ + "name": "Bob", + "age": 30, + }, + }, + }, + checks: func(t *testing.T, helpers map[string]any) { + // Test path function + if pathFn, ok := helpers["path"].(func(string) any); ok { + // Test valid path + if result := pathFn("data.user.name"); result != "Bob" { + t.Errorf("path('data.user.name') expected 'Bob', got %v", result) + } + + // Test nested path + if result := pathFn("data.user.age"); result != 30 { + t.Errorf("path('data.user.age') expected 30, got %v", result) + } + + // Test invalid path + if result := pathFn("data.missing"); result != nil { + t.Errorf("path('data.missing') expected nil, got %v", result) + } + } else { + t.Errorf("expected path function to exist") + } + + // Test has function + if hasFn, ok := helpers["has"].(func(string) bool); ok { + if !hasFn("data.user.name") { + t.Errorf("has('data.user.name') should return true") + } + if hasFn("data.missing") { + t.Errorf("has('data.missing') should return false") + } + } else { + t.Errorf("expected has function to exist") + } + + // Test string function + if strFn, ok := helpers["string"].(func(string) string); ok { + if result := strFn("data.user.name"); result != "Bob" { + t.Errorf("string('data.user.name') expected 'Bob', got %v", result) + } + // Non-string value should be converted + if result := strFn("data.user.age"); result != "30" { + t.Errorf("string('data.user.age') expected '30', got %v", result) + } + } else { + t.Errorf("expected string function to exist") + } + + // Test number function + if numFn, ok := helpers["number"].(func(string) float64); ok { + if result := numFn("data.user.age"); result != 30.0 { + t.Errorf("number('data.user.age') expected 30.0, got %v", result) + } + // Missing path should return 0 + if result := numFn("data.missing"); result != 0 { + t.Errorf("number('data.missing') expected 0, got %v", result) + } + } else { + t.Errorf("expected number function to exist") + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + helpers := srv.createJSONPathHelpers(tt.bodyMap) + tt.checks(t, helpers) + }) + } +} diff --git a/packages/server/internal/api/rimportv2/integrity.go b/packages/server/internal/api/rimportv2/integrity.go index c61ccfbd8..c8eb60224 100644 --- a/packages/server/internal/api/rimportv2/integrity.go +++ b/packages/server/internal/api/rimportv2/integrity.go @@ -105,6 +105,9 @@ func ValidateImportIntegrity( report.AddError("file", file.ID, "ContentID", *file.ContentID, fmt.Sprintf("file references non-existent HTTP (type=%s)", file.ContentType)) } + case mfile.ContentTypeGraphQL: + // GraphQL files reference GraphQL IDs - validation not yet implemented + continue case mfile.ContentTypeFlow, mfile.ContentTypeFolder, mfile.ContentTypeCredential: // Flow files reference flow IDs - we could validate these too // Folders don't have ContentID @@ -172,6 +175,9 @@ func ValidateTranslationResult(result *TranslationResult) *IntegrityReport { report.AddError("file", file.ID, "ContentID", *file.ContentID, fmt.Sprintf("file references HTTP not in translation result (type=%s)", file.ContentType)) } + case mfile.ContentTypeGraphQL: + // GraphQL files reference GraphQL IDs - validation not yet implemented + continue case mfile.ContentTypeFlow, mfile.ContentTypeFolder, mfile.ContentTypeCredential: continue } diff --git a/packages/server/internal/api/rimportv2/rimportv2_event.go b/packages/server/internal/api/rimportv2/rimportv2_event.go index 91e4f53d7..ced5dbc86 100644 --- a/packages/server/internal/api/rimportv2/rimportv2_event.go +++ b/packages/server/internal/api/rimportv2/rimportv2_event.go @@ -74,7 +74,7 @@ func (h *ImportV2RPC) publishEvents(ctx context.Context, results *ImportResults) kind = eventsync.KindFlowFile case mfile.ContentTypeFolder: kind = eventsync.KindFolder - case mfile.ContentTypeHTTP, mfile.ContentTypeHTTPDelta, mfile.ContentTypeCredential: + case mfile.ContentTypeHTTP, mfile.ContentTypeHTTPDelta, mfile.ContentTypeCredential, mfile.ContentTypeGraphQL: // Keep default KindHTTPFile } diff --git a/packages/server/internal/api/rreference/rreference.go b/packages/server/internal/api/rreference/rreference.go index fb77f3f41..4f0ad9fe0 100644 --- a/packages/server/internal/api/rreference/rreference.go +++ b/packages/server/internal/api/rreference/rreference.go @@ -23,6 +23,7 @@ import ( referencev1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/reference/v1" "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/reference/v1/referencev1connect" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "connectrpc.com/connect" @@ -48,6 +49,9 @@ type ReferenceServiceRPC struct { // http httpResponseReader *shttp.HttpResponseReader + + // graphql + graphqlResponseReader *sgraphql.GraphQLResponseService } type ReferenceServiceRPCReaders struct { @@ -60,8 +64,9 @@ type ReferenceServiceRPCReaders struct { NodeRequest *sflow.NodeRequestReader FlowVariable *sflow.FlowVariableReader FlowEdge *sflow.EdgeReader - NodeExecution *sflow.NodeExecutionReader - HttpResponse *shttp.HttpResponseReader + NodeExecution *sflow.NodeExecutionReader + HttpResponse *shttp.HttpResponseReader + GraphQLResponse *sgraphql.GraphQLResponseService } func (r *ReferenceServiceRPCReaders) Validate() error { @@ -98,6 +103,9 @@ func (r *ReferenceServiceRPCReaders) Validate() error { if r.HttpResponse == nil { return fmt.Errorf("http response reader is required") } + if r.GraphQLResponse == nil { + return fmt.Errorf("graphql response reader is required") + } return nil } @@ -137,6 +145,7 @@ func NewReferenceServiceRPC(deps ReferenceServiceRPCDeps) *ReferenceServiceRPC { flowEdgeReader: deps.Readers.FlowEdge, nodeExecutionReader: deps.Readers.NodeExecution, httpResponseReader: deps.Readers.HttpResponse, + graphqlResponseReader: deps.Readers.GraphQLResponse, } } @@ -233,6 +242,59 @@ func (c *ReferenceServiceRPC) getLatestResponse(ctx context.Context, httpID idwr }, nil } +func (c *ReferenceServiceRPC) getLatestGraphQLResponse(ctx context.Context, graphqlID idwrap.IDWrap) (map[string]interface{}, error) { + responses, err := c.graphqlResponseReader.GetByGraphQLID(ctx, graphqlID) + if err != nil { + return nil, err + } + if len(responses) == 0 { + return nil, nil + } + + // Find latest response + latest := responses[0] + for _, r := range responses { + if r.Time > latest.Time { + latest = r + } + } + + // Parse body + var body interface{} = string(latest.Body) + var bodyMap map[string]interface{} + if len(latest.Body) > 0 { + var jsonBody interface{} + if err := json.Unmarshal(latest.Body, &jsonBody); err == nil { + body = jsonBody + if m, ok := jsonBody.(map[string]interface{}); ok { + bodyMap = m + } + } + } + + // Extract GraphQL-specific fields (data and errors) + var data interface{} + var errors interface{} + if bodyMap != nil { + if d, ok := bodyMap["data"]; ok { + data = d + } + if e, ok := bodyMap["errors"]; ok { + errors = e + } + } + + return map[string]interface{}{ + "status": latest.Status, + "body": body, + "data": data, + "errors": errors, + "headers": map[string]string{}, // Headers not currently linkable to specific response + "duration": latest.Duration, + "size": latest.Size, + }, nil +} + func (c *ReferenceServiceRPC) ReferenceTree(ctx context.Context, req *connect.Request[referencev1.ReferenceTreeRequest]) (*connect.Response[referencev1.ReferenceTreeResponse], error) { var Items []*referencev1.ReferenceTreeItem @@ -510,7 +572,7 @@ func (c *ReferenceServiceRPC) HandleNode(ctx context.Context, nodeID idwrap.IDWr // ReferenceCompletion calls reference.v1.ReferenceService.ReferenceCompletion. func (c *ReferenceServiceRPC) ReferenceCompletion(ctx context.Context, req *connect.Request[referencev1.ReferenceCompletionRequest]) (*connect.Response[referencev1.ReferenceCompletionResponse], error) { - var workspaceID, httpID, flowNodeID *idwrap.IDWrap + var workspaceID, httpID, graphqlID, flowNodeID *idwrap.IDWrap msg := req.Msg if msg.WorkspaceId != nil { tempID, err := idwrap.NewFromBytes(msg.WorkspaceId) @@ -526,6 +588,13 @@ func (c *ReferenceServiceRPC) ReferenceCompletion(ctx context.Context, req *conn } httpID = &tempID } + if msg.GraphqlId != nil { + tempID, err := idwrap.NewFromBytes(msg.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + graphqlID = &tempID + } if msg.FlowNodeId != nil { tempID, err := idwrap.NewFromBytes(msg.FlowNodeId) if err != nil { @@ -592,6 +661,51 @@ func (c *ReferenceServiceRPC) ReferenceCompletion(ctx context.Context, req *conn }) } + if graphqlID != nil { + resp, err := c.getLatestGraphQLResponse(ctx, *graphqlID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if resp != nil { + // Add full response object + creator.AddWithKey("response", resp) + + // Add GraphQL-specific top-level fields for convenience + if data, ok := resp["data"]; ok && data != nil { + creator.AddWithKey("data", data) + } + if errors, ok := resp["errors"]; ok && errors != nil { + creator.AddWithKey("errors", errors) + } + + // Add convenience variables + status := int(0) + if s, ok := resp["status"].(int32); ok { + status = int(s) + } + creator.AddWithKey("status", status) + creator.AddWithKey("success", status >= 200 && status < 300) + creator.AddWithKey("has_data", resp["data"] != nil) + creator.AddWithKey("has_errors", resp["errors"] != nil) + } else { + // Fallback schema for GraphQL + creator.AddWithKey("response", map[string]interface{}{ + "status": 200, + "body": map[string]interface{}{}, + "data": map[string]interface{}{}, + "errors": nil, + "headers": map[string]string{}, + "duration": 0, + }) + creator.AddWithKey("data", map[string]interface{}{}) + creator.AddWithKey("status", 200) + creator.AddWithKey("success", true) + creator.AddWithKey("has_data", false) + creator.AddWithKey("has_errors", false) + } + } + if flowNodeID != nil { nodeID := *flowNodeID nodeInst, err := c.nodeReader.GetNode(ctx, nodeID) @@ -927,7 +1041,7 @@ func (c *ReferenceServiceRPC) ReferenceCompletion(ctx context.Context, req *conn // ReferenceValue calls reference.v1.ReferenceService.ReferenceValue. func (c *ReferenceServiceRPC) ReferenceValue(ctx context.Context, req *connect.Request[referencev1.ReferenceValueRequest]) (*connect.Response[referencev1.ReferenceValueResponse], error) { - var workspaceID, httpID, flowNodeID *idwrap.IDWrap + var workspaceID, httpID, graphqlID, flowNodeID *idwrap.IDWrap msg := req.Msg if msg.WorkspaceId != nil { tempID, err := idwrap.NewFromBytes(msg.WorkspaceId) @@ -943,6 +1057,13 @@ func (c *ReferenceServiceRPC) ReferenceValue(ctx context.Context, req *connect.R } httpID = &tempID } + if msg.GraphqlId != nil { + tempID, err := idwrap.NewFromBytes(msg.GraphqlId) + if err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + graphqlID = &tempID + } if msg.FlowNodeId != nil { tempID, err := idwrap.NewFromBytes(msg.FlowNodeId) if err != nil { @@ -1009,6 +1130,51 @@ func (c *ReferenceServiceRPC) ReferenceValue(ctx context.Context, req *connect.R }) } + if graphqlID != nil { + resp, err := c.getLatestGraphQLResponse(ctx, *graphqlID) + if err != nil { + return nil, connect.NewError(connect.CodeInternal, err) + } + + if resp != nil { + // Add full response object + lookup.AddWithKey("response", resp) + + // Add GraphQL-specific top-level fields for convenience + if data, ok := resp["data"]; ok && data != nil { + lookup.AddWithKey("data", data) + } + if errors, ok := resp["errors"]; ok && errors != nil { + lookup.AddWithKey("errors", errors) + } + + // Add convenience variables + status := int(0) + if s, ok := resp["status"].(int32); ok { + status = int(s) + } + lookup.AddWithKey("status", status) + lookup.AddWithKey("success", status >= 200 && status < 300) + lookup.AddWithKey("has_data", resp["data"] != nil) + lookup.AddWithKey("has_errors", resp["errors"] != nil) + } else { + // Fallback schema for GraphQL + lookup.AddWithKey("response", map[string]interface{}{ + "status": 200, + "body": map[string]interface{}{}, + "data": map[string]interface{}{}, + "errors": nil, + "headers": map[string]string{}, + "duration": 0, + }) + lookup.AddWithKey("data", map[string]interface{}{}) + lookup.AddWithKey("status", 200) + lookup.AddWithKey("success", true) + lookup.AddWithKey("has_data", false) + lookup.AddWithKey("has_errors", false) + } + } + if flowNodeID != nil { nodeID := *flowNodeID nodeInst, err := c.nodeReader.GetNode(ctx, nodeID) diff --git a/packages/server/internal/api/rreference/rreference_integration_test.go b/packages/server/internal/api/rreference/rreference_integration_test.go index 656ad499a..a03e298fb 100644 --- a/packages/server/internal/api/rreference/rreference_integration_test.go +++ b/packages/server/internal/api/rreference/rreference_integration_test.go @@ -13,6 +13,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/testutil" @@ -40,20 +41,24 @@ func TestReferenceCompletion_HttpId(t *testing.T) { httpService := services.HttpService httpResponseService := shttp.NewHttpResponseService(base.Queries) + // GraphQL services + graphqlResponseService := sgraphql.NewGraphQLResponseService(base.Queries) + svc := NewReferenceServiceRPC(ReferenceServiceRPCDeps{ DB: base.DB, Readers: ReferenceServiceRPCReaders{ - User: sworkspace.NewUserReader(base.DB), - Workspace: services.WorkspaceService.Reader(), - Env: envService.Reader(), - Variable: varService.Reader(), - Flow: flowService.Reader(), - Node: flowNodeService.Reader(), - NodeRequest: flowNodeRequestService.Reader(), - FlowVariable: flowVariableService.Reader(), - FlowEdge: edgeService.Reader(), - NodeExecution: nodeExecutionService.Reader(), - HttpResponse: httpResponseService.Reader(), + User: sworkspace.NewUserReader(base.DB), + Workspace: services.WorkspaceService.Reader(), + Env: envService.Reader(), + Variable: varService.Reader(), + Flow: flowService.Reader(), + Node: flowNodeService.Reader(), + NodeRequest: flowNodeRequestService.Reader(), + FlowVariable: flowVariableService.Reader(), + FlowEdge: edgeService.Reader(), + NodeExecution: nodeExecutionService.Reader(), + HttpResponse: httpResponseService.Reader(), + GraphQLResponse: &graphqlResponseService, }, }) diff --git a/packages/server/internal/api/rreference/rreference_rpc_test.go b/packages/server/internal/api/rreference/rreference_rpc_test.go index 68f3e8663..28ebaf363 100644 --- a/packages/server/internal/api/rreference/rreference_rpc_test.go +++ b/packages/server/internal/api/rreference/rreference_rpc_test.go @@ -20,6 +20,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/suser" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" @@ -62,6 +63,7 @@ func setupTestService(t *testing.T) (*ReferenceServiceRPC, context.Context, idwr nes := sflow.NewNodeExecutionService(queries) httpResponseService := shttp.NewHttpResponseService(queries) + graphqlResponseService := sgraphql.NewGraphQLResponseService(queries) svc := NewReferenceServiceRPC(ReferenceServiceRPCDeps{ DB: db, @@ -76,7 +78,8 @@ func setupTestService(t *testing.T) (*ReferenceServiceRPC, context.Context, idwr FlowVariable: fvs.Reader(), FlowEdge: edgeService.Reader(), NodeExecution: nes.Reader(), - HttpResponse: httpResponseService.Reader(), + HttpResponse: httpResponseService.Reader(), + GraphQLResponse: &graphqlResponseService, }, }) diff --git a/packages/server/internal/converter/converter.go b/packages/server/internal/converter/converter.go index f2aab59ee..1bb61310a 100644 --- a/packages/server/internal/converter/converter.go +++ b/packages/server/internal/converter/converter.go @@ -8,16 +8,18 @@ import ( "google.golang.org/protobuf/types/known/timestamppb" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mcredential" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/menv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" - "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mcredential" credentialv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/credential/v1" environmentv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/environment/v1" filev1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/file_system/v1" flowv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/flow/v1" + graphqlv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/graph_q_l/v1" httpv1 "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/http/v1" ) @@ -375,6 +377,8 @@ func ToAPINodeKind(kind mflow.NodeKind) flowv1.NodeKind { return flowv1.NodeKind_NODE_KIND_AI_PROVIDER case mflow.NODE_KIND_AI_MEMORY: return flowv1.NodeKind_NODE_KIND_AI_MEMORY + case mflow.NODE_KIND_GRAPHQL: + return flowv1.NodeKind_NODE_KIND_GRAPH_Q_L default: return flowv1.NodeKind_NODE_KIND_UNSPECIFIED } @@ -466,3 +470,14 @@ func ToAPIErrorHandling(eh mflow.ErrorHandling) flowv1.ErrorHandling { return flowv1.ErrorHandling_ERROR_HANDLING_UNSPECIFIED } } + +// ToAPIGraphQLAssert converts model GraphQLAssert to API GraphQLAssert +func ToAPIGraphQLAssert(assert mgraphql.GraphQLAssert) *graphqlv1.GraphQLAssert { + return &graphqlv1.GraphQLAssert{ + GraphqlAssertId: assert.ID.Bytes(), + GraphqlId: assert.GraphQLID.Bytes(), + Value: assert.Value, + Enabled: assert.Enabled, + Order: assert.DisplayOrder, + } +} diff --git a/packages/server/internal/migrations/01KHDYWX_add_graphql_tables.go b/packages/server/internal/migrations/01KHDYWX_add_graphql_tables.go new file mode 100644 index 000000000..1b2330b99 --- /dev/null +++ b/packages/server/internal/migrations/01KHDYWX_add_graphql_tables.go @@ -0,0 +1,449 @@ +package migrations + +import ( + "context" + "database/sql" + "fmt" + "strings" + + "github.com/the-dev-tools/dev-tools/packages/server/internal/migrate" +) + +// MigrationAddGraphQLTablesID is the ULID for the GraphQL tables migration. +const MigrationAddGraphQLTablesID = "01KHDYWX1KV5MX8H9MNTPCWDV9" + +// MigrationAddGraphQLTablesChecksum is a stable hash of this migration. +const MigrationAddGraphQLTablesChecksum = "sha256:add-graphql-tables-v2" + +func init() { + if err := migrate.Register(migrate.Migration{ + ID: MigrationAddGraphQLTablesID, + Checksum: MigrationAddGraphQLTablesChecksum, + Description: "Add GraphQL tables with delta support, assertions, and response history", + Apply: applyGraphQLTables, + Validate: validateGraphQLTables, + RequiresBackup: true, + }); err != nil { + panic("failed to register GraphQL tables migration: " + err.Error()) + } +} + +func applyGraphQLTables(ctx context.Context, tx *sql.Tx) error { + // 1. Create graphql table (with delta columns inline) + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS graphql ( + id BLOB NOT NULL PRIMARY KEY, + workspace_id BLOB NOT NULL, + folder_id BLOB, + name TEXT NOT NULL, + url TEXT NOT NULL, + query TEXT NOT NULL DEFAULT '', + variables TEXT NOT NULL DEFAULT '', + description TEXT NOT NULL DEFAULT '', + last_run_at BIGINT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + + -- Delta system + parent_graphql_id BLOB DEFAULT NULL, + is_delta BOOLEAN NOT NULL DEFAULT FALSE, + is_snapshot BOOLEAN NOT NULL DEFAULT FALSE, + delta_name TEXT NULL, + delta_url TEXT NULL, + delta_query TEXT NULL, + delta_variables TEXT NULL, + delta_description TEXT NULL, + + FOREIGN KEY (workspace_id) REFERENCES workspaces (id) ON DELETE CASCADE, + FOREIGN KEY (folder_id) REFERENCES files (id) ON DELETE SET NULL + ) + `); err != nil { + return err + } + + graphqlIndexes := []string{ + `CREATE INDEX IF NOT EXISTS graphql_workspace_idx ON graphql (workspace_id)`, + `CREATE INDEX IF NOT EXISTS graphql_folder_idx ON graphql (folder_id) WHERE folder_id IS NOT NULL`, + `CREATE INDEX IF NOT EXISTS graphql_parent_delta_idx ON graphql (parent_graphql_id, is_delta)`, + `CREATE INDEX IF NOT EXISTS graphql_delta_resolution_idx ON graphql (parent_graphql_id, is_delta, updated_at DESC)`, + `CREATE INDEX IF NOT EXISTS graphql_active_streaming_idx ON graphql (workspace_id, updated_at DESC) WHERE is_delta = FALSE`, + } + for _, idx := range graphqlIndexes { + if _, err := tx.ExecContext(ctx, idx); err != nil { + return fmt.Errorf("create graphql index: %w", err) + } + } + + // 2. Create graphql_version table + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS graphql_version ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + version_name TEXT NOT NULL, + version_description TEXT NOT NULL DEFAULT '', + is_active BOOLEAN NOT NULL DEFAULT FALSE, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + created_by BLOB, + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE, + FOREIGN KEY (created_by) REFERENCES users (id) ON DELETE SET NULL, + CHECK (version_name != '') + ) + `); err != nil { + return err + } + + versionIndexes := []string{ + `CREATE INDEX IF NOT EXISTS graphql_version_graphql_idx ON graphql_version (graphql_id)`, + `CREATE INDEX IF NOT EXISTS graphql_version_active_idx ON graphql_version (is_active) WHERE is_active = TRUE`, + `CREATE INDEX IF NOT EXISTS graphql_version_created_by_idx ON graphql_version (created_by)`, + } + for _, idx := range versionIndexes { + if _, err := tx.ExecContext(ctx, idx); err != nil { + return fmt.Errorf("create graphql_version index: %w", err) + } + } + + // 3. Create graphql_header table (with delta columns inline) + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS graphql_header ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + header_key TEXT NOT NULL, + header_value TEXT NOT NULL, + description TEXT NOT NULL DEFAULT '', + enabled BOOLEAN NOT NULL DEFAULT TRUE, + display_order REAL NOT NULL DEFAULT 0, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + + -- Delta system + parent_graphql_header_id BLOB DEFAULT NULL, + is_delta BOOLEAN NOT NULL DEFAULT FALSE, + delta_header_key TEXT NULL, + delta_header_value TEXT NULL, + delta_description TEXT NULL, + delta_enabled BOOLEAN NULL, + delta_display_order REAL NULL, + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE + ) + `); err != nil { + return err + } + + headerIndexes := []string{ + `CREATE INDEX IF NOT EXISTS graphql_header_graphql_idx ON graphql_header (graphql_id)`, + `CREATE INDEX IF NOT EXISTS graphql_header_order_idx ON graphql_header (graphql_id, display_order)`, + `CREATE INDEX IF NOT EXISTS graphql_header_parent_delta_idx ON graphql_header (parent_graphql_header_id, is_delta)`, + `CREATE INDEX IF NOT EXISTS graphql_header_delta_streaming_idx ON graphql_header (parent_graphql_header_id, is_delta, updated_at DESC)`, + } + for _, idx := range headerIndexes { + if _, err := tx.ExecContext(ctx, idx); err != nil { + return fmt.Errorf("create graphql_header index: %w", err) + } + } + + // 4. Create graphql_assert table (with delta columns inline) + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS graphql_assert ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + value TEXT NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT TRUE, + description TEXT NOT NULL DEFAULT '', + display_order REAL NOT NULL DEFAULT 0, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + + -- Delta system + parent_graphql_assert_id BLOB DEFAULT NULL, + is_delta BOOLEAN NOT NULL DEFAULT FALSE, + delta_value TEXT NULL, + delta_enabled BOOLEAN NULL, + delta_description TEXT NULL, + delta_display_order REAL NULL, + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE + ) + `); err != nil { + return err + } + + assertIndexes := []string{ + `CREATE INDEX IF NOT EXISTS graphql_assert_graphql_idx ON graphql_assert (graphql_id)`, + `CREATE INDEX IF NOT EXISTS graphql_assert_order_idx ON graphql_assert (graphql_id, display_order)`, + `CREATE INDEX IF NOT EXISTS graphql_assert_parent_delta_idx ON graphql_assert (parent_graphql_assert_id, is_delta)`, + `CREATE INDEX IF NOT EXISTS graphql_assert_delta_streaming_idx ON graphql_assert (parent_graphql_assert_id, is_delta, updated_at DESC)`, + } + for _, idx := range assertIndexes { + if _, err := tx.ExecContext(ctx, idx); err != nil { + return fmt.Errorf("create graphql_assert index: %w", err) + } + } + + // 5. Create graphql_response table + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS graphql_response ( + id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + status INT32 NOT NULL, + body BLOB, + time DATETIME NOT NULL, + duration INT32 NOT NULL, + size INT32 NOT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE + ) + `); err != nil { + return err + } + + responseIndexes := []string{ + `CREATE INDEX IF NOT EXISTS graphql_response_graphql_idx ON graphql_response (graphql_id)`, + `CREATE INDEX IF NOT EXISTS graphql_response_time_idx ON graphql_response (graphql_id, time DESC)`, + } + for _, idx := range responseIndexes { + if _, err := tx.ExecContext(ctx, idx); err != nil { + return fmt.Errorf("create graphql_response index: %w", err) + } + } + + // 6. Create graphql_response_header table + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS graphql_response_header ( + id BLOB NOT NULL PRIMARY KEY, + response_id BLOB NOT NULL, + key TEXT NOT NULL, + value TEXT NOT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (response_id) REFERENCES graphql_response (id) ON DELETE CASCADE + ) + `); err != nil { + return err + } + + if _, err := tx.ExecContext(ctx, ` + CREATE INDEX IF NOT EXISTS graphql_response_header_response_idx ON graphql_response_header (response_id) + `); err != nil { + return err + } + + // 7. Create graphql_response_assert table + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS graphql_response_assert ( + id BLOB NOT NULL PRIMARY KEY, + response_id BLOB NOT NULL, + value TEXT NOT NULL, + success BOOLEAN NOT NULL, + created_at BIGINT NOT NULL DEFAULT (unixepoch()), + + FOREIGN KEY (response_id) REFERENCES graphql_response (id) ON DELETE CASCADE + ) + `); err != nil { + return err + } + + responseAssertIndexes := []string{ + `CREATE INDEX IF NOT EXISTS graphql_response_assert_response_idx ON graphql_response_assert (response_id)`, + `CREATE INDEX IF NOT EXISTS graphql_response_assert_success_idx ON graphql_response_assert (response_id, success)`, + } + for _, idx := range responseAssertIndexes { + if _, err := tx.ExecContext(ctx, idx); err != nil { + return fmt.Errorf("create graphql_response_assert index: %w", err) + } + } + + // 8. Create flow_node_graphql table + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE IF NOT EXISTS flow_node_graphql ( + flow_node_id BLOB NOT NULL PRIMARY KEY, + graphql_id BLOB NOT NULL, + FOREIGN KEY (graphql_id) REFERENCES graphql (id) ON DELETE CASCADE + ) + `); err != nil { + return err + } + + // 9. Add graphql_response_id column to node_execution table + var colCount int + err := tx.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM pragma_table_info('node_execution') + WHERE name = 'graphql_response_id' + `).Scan(&colCount) + if err != nil { + return fmt.Errorf("check node_execution column: %w", err) + } + if colCount == 0 { + if _, err := tx.ExecContext(ctx, ` + ALTER TABLE node_execution ADD COLUMN graphql_response_id BLOB + REFERENCES graphql_response (id) ON DELETE SET NULL + `); err != nil { + return err + } + } + + // 10. Update files table CHECK constraint to allow content_kind = 5 (graphql) + if err := updateFilesCheckConstraint(ctx, tx); err != nil { + return fmt.Errorf("update files check constraint: %w", err) + } + + return nil +} + +// updateFilesCheckConstraint recreates the files table with GraphQL content_kind support. +func updateFilesCheckConstraint(ctx context.Context, tx *sql.Tx) error { + var tableSql string + err := tx.QueryRowContext(ctx, ` + SELECT sql FROM sqlite_master WHERE type='table' AND name='files' + `).Scan(&tableSql) + if err != nil { + return fmt.Errorf("read files table schema: %w", err) + } + if strings.Contains(tableSql, "4, 5)") { + return nil + } + + if _, err := tx.ExecContext(ctx, ` + CREATE TABLE files_new ( + id BLOB NOT NULL PRIMARY KEY, + workspace_id BLOB NOT NULL, + parent_id BLOB, + content_id BLOB, + content_kind INT8 NOT NULL DEFAULT 0, + name TEXT NOT NULL, + display_order REAL NOT NULL DEFAULT 0, + path_hash TEXT, + updated_at BIGINT NOT NULL DEFAULT (unixepoch()), + CHECK (length (id) == 16), + CHECK (content_kind IN (0, 1, 2, 3, 4, 5)), + CHECK ( + (content_kind = 0 AND content_id IS NOT NULL) OR + (content_kind = 1 AND content_id IS NOT NULL) OR + (content_kind = 2 AND content_id IS NOT NULL) OR + (content_kind = 3 AND content_id IS NOT NULL) OR + (content_kind = 4 AND content_id IS NOT NULL) OR + (content_kind = 5 AND content_id IS NOT NULL) OR + (content_id IS NULL) + ), + FOREIGN KEY (workspace_id) REFERENCES workspaces (id) ON DELETE CASCADE, + FOREIGN KEY (parent_id) REFERENCES files (id) ON DELETE SET NULL + ) + `); err != nil { + return fmt.Errorf("create files_new: %w", err) + } + + if _, err := tx.ExecContext(ctx, `INSERT INTO files_new SELECT * FROM files`); err != nil { + return fmt.Errorf("copy files data: %w", err) + } + + if _, err := tx.ExecContext(ctx, `DROP TABLE files`); err != nil { + return fmt.Errorf("drop old files: %w", err) + } + + if _, err := tx.ExecContext(ctx, `ALTER TABLE files_new RENAME TO files`); err != nil { + return fmt.Errorf("rename files_new: %w", err) + } + + indexes := []string{ + `CREATE INDEX files_workspace_idx ON files (workspace_id)`, + `CREATE UNIQUE INDEX files_path_hash_idx ON files (workspace_id, path_hash) WHERE path_hash IS NOT NULL`, + `CREATE INDEX files_hierarchy_idx ON files (workspace_id, parent_id, display_order)`, + `CREATE INDEX files_content_lookup_idx ON files (content_kind, content_id) WHERE content_id IS NOT NULL`, + `CREATE INDEX files_parent_lookup_idx ON files (parent_id, display_order) WHERE parent_id IS NOT NULL`, + `CREATE INDEX files_name_search_idx ON files (workspace_id, name)`, + `CREATE INDEX files_kind_filter_idx ON files (workspace_id, content_kind)`, + `CREATE INDEX files_workspace_hierarchy_idx ON files (workspace_id, parent_id, content_kind, display_order)`, + } + for _, idx := range indexes { + if _, err := tx.ExecContext(ctx, idx); err != nil { + return fmt.Errorf("recreate index: %w", err) + } + } + + return nil +} + +func validateGraphQLTables(ctx context.Context, db *sql.DB) error { + tables := []string{ + "graphql", + "graphql_version", + "graphql_header", + "graphql_assert", + "graphql_response", + "graphql_response_header", + "graphql_response_assert", + "flow_node_graphql", + } + + for _, table := range tables { + var name string + err := db.QueryRowContext(ctx, ` + SELECT name FROM sqlite_master + WHERE type='table' AND name=? + `, table).Scan(&name) + if err != nil { + return fmt.Errorf("table %s not found: %w", table, err) + } + } + + indexes := []string{ + "graphql_workspace_idx", + "graphql_folder_idx", + "graphql_parent_delta_idx", + "graphql_delta_resolution_idx", + "graphql_active_streaming_idx", + "graphql_version_graphql_idx", + "graphql_header_graphql_idx", + "graphql_header_order_idx", + "graphql_header_parent_delta_idx", + "graphql_header_delta_streaming_idx", + "graphql_assert_graphql_idx", + "graphql_assert_order_idx", + "graphql_assert_parent_delta_idx", + "graphql_assert_delta_streaming_idx", + "graphql_response_graphql_idx", + "graphql_response_time_idx", + "graphql_response_header_response_idx", + "graphql_response_assert_response_idx", + "graphql_response_assert_success_idx", + } + + for _, idx := range indexes { + var name string + err := db.QueryRowContext(ctx, ` + SELECT name FROM sqlite_master + WHERE type='index' AND name=? + `, idx).Scan(&name) + if err != nil { + return fmt.Errorf("index %s not found: %w", idx, err) + } + } + + // Verify delta columns exist + deltaColumns := map[string][]string{ + "graphql": {"parent_graphql_id", "is_delta", "is_snapshot", "delta_name", "delta_url", "delta_query", "delta_variables", "delta_description"}, + "graphql_header": {"parent_graphql_header_id", "is_delta", "delta_header_key", "delta_header_value", "delta_description", "delta_enabled", "delta_display_order"}, + "graphql_assert": {"parent_graphql_assert_id", "is_delta", "delta_value", "delta_enabled", "delta_description", "delta_display_order"}, + } + + for table, cols := range deltaColumns { + for _, col := range cols { + var colCount int + err := db.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM pragma_table_info(?) + WHERE name = ? + `, table, col).Scan(&colCount) + if err != nil { + return fmt.Errorf("check %s.%s: %w", table, col, err) + } + if colCount == 0 { + return fmt.Errorf("column %s.%s not found", table, col) + } + } + } + + return nil +} diff --git a/packages/server/internal/migrations/migrations_test.go b/packages/server/internal/migrations/migrations_test.go index c9948a0cd..00932679b 100644 --- a/packages/server/internal/migrations/migrations_test.go +++ b/packages/server/internal/migrations/migrations_test.go @@ -210,7 +210,7 @@ func TestFilesTableConstraintUpdated(t *testing.T) { t.Fatalf("failed to run migrations: %v", err) } - // Verify files table supports content_kind=4 + // Verify files table supports content_kind=5 (graphql) var tableDef string err = db.QueryRowContext(ctx, ` SELECT sql FROM sqlite_master @@ -220,9 +220,9 @@ func TestFilesTableConstraintUpdated(t *testing.T) { t.Fatalf("failed to get files table definition: %v", err) } - // Check that the constraint includes content_kind=4 - if !contains(tableDef, "content_kind IN (0, 1, 2, 3, 4)") { - t.Errorf("files table CHECK constraint doesn't include content_kind=4: %s", tableDef) + // Check that the constraint includes content_kind=5 + if !contains(tableDef, "content_kind IN (0, 1, 2, 3, 4, 5)") { + t.Errorf("files table CHECK constraint doesn't include content_kind=5: %s", tableDef) } } @@ -238,3 +238,118 @@ func containsHelper(s, substr string) bool { } return false } + +func TestGraphQLDeltaColumnsCreated(t *testing.T) { + ctx := context.Background() + + db, cleanup, err := sqlitemem.NewSQLiteMem(ctx) + if err != nil { + t.Fatalf("failed to create test db: %v", err) + } + t.Cleanup(cleanup) + + cfg := Config{ + DatabasePath: ":memory:", + DataDir: t.TempDir(), + } + if err := Run(ctx, db, cfg); err != nil { + t.Fatalf("failed to run migrations: %v", err) + } + + // Verify graphql table delta columns + graphqlColumns := []string{ + "parent_graphql_id", + "is_delta", + "is_snapshot", + "delta_name", + "delta_url", + "delta_query", + "delta_variables", + "delta_description", + } + + for _, col := range graphqlColumns { + var count int + err := db.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM pragma_table_info('graphql') + WHERE name = ? + `, col).Scan(&count) + if err != nil { + t.Fatalf("failed to check graphql.%s: %v", col, err) + } + if count == 0 { + t.Errorf("graphql table missing column: %s", col) + } + } + + // Verify graphql_header table delta columns + headerColumns := []string{ + "parent_graphql_header_id", + "is_delta", + "delta_header_key", + "delta_header_value", + "delta_description", + "delta_enabled", + "delta_display_order", + } + + for _, col := range headerColumns { + var count int + err := db.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM pragma_table_info('graphql_header') + WHERE name = ? + `, col).Scan(&count) + if err != nil { + t.Fatalf("failed to check graphql_header.%s: %v", col, err) + } + if count == 0 { + t.Errorf("graphql_header table missing column: %s", col) + } + } + + // Verify graphql_assert table delta columns + assertColumns := []string{ + "parent_graphql_assert_id", + "is_delta", + "delta_value", + "delta_enabled", + "delta_description", + "delta_display_order", + } + + for _, col := range assertColumns { + var count int + err := db.QueryRowContext(ctx, ` + SELECT COUNT(*) FROM pragma_table_info('graphql_assert') + WHERE name = ? + `, col).Scan(&count) + if err != nil { + t.Fatalf("failed to check graphql_assert.%s: %v", col, err) + } + if count == 0 { + t.Errorf("graphql_assert table missing column: %s", col) + } + } + + // Verify delta indexes were created + indexes := []string{ + "graphql_parent_delta_idx", + "graphql_delta_resolution_idx", + "graphql_active_streaming_idx", + "graphql_header_parent_delta_idx", + "graphql_header_delta_streaming_idx", + "graphql_assert_parent_delta_idx", + "graphql_assert_delta_streaming_idx", + } + + for _, idx := range indexes { + var name string + err := db.QueryRowContext(ctx, ` + SELECT name FROM sqlite_master + WHERE type='index' AND name=? + `, idx).Scan(&name) + if err != nil { + t.Errorf("index %s not found: %v", idx, err) + } + } +} diff --git a/packages/server/pkg/delta/delta.go b/packages/server/pkg/delta/delta.go index 0bed30ba9..76c0e5528 100644 --- a/packages/server/pkg/delta/delta.go +++ b/packages/server/pkg/delta/delta.go @@ -5,6 +5,7 @@ import ( "sort" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" ) @@ -428,3 +429,206 @@ func orderAsserts(asserts []mhttp.HTTPAssert) []mhttp.HTTPAssert { return ordered } + +// GraphQL Delta Resolution + +// ResolveGraphQLInput holds the base and delta information required for GraphQL resolution. +type ResolveGraphQLInput struct { + Base, Delta mgraphql.GraphQL + BaseHeaders, DeltaHeaders []mgraphql.GraphQLHeader + BaseAsserts, DeltaAsserts []mgraphql.GraphQLAssert +} + +// ResolveGraphQLOutput holds the fully resolved GraphQL request. +type ResolveGraphQLOutput struct { + Resolved mgraphql.GraphQL + ResolvedHeaders []mgraphql.GraphQLHeader + ResolvedAsserts []mgraphql.GraphQLAssert +} + +// ResolveGraphQL merges a base GraphQL request with a delta, applying overrides +// based on the Delta System architecture (Overlay Pattern). +func ResolveGraphQL(input ResolveGraphQLInput) ResolveGraphQLOutput { + output := ResolveGraphQLOutput{} + + // 1. Resolve Root GraphQL Entity + output.Resolved = resolveGraphQLScalar(input.Base, input.Delta) + + // 2. Resolve Collections + output.ResolvedHeaders = resolveGraphQLHeaders(input.BaseHeaders, input.DeltaHeaders) + output.ResolvedAsserts = resolveGraphQLAsserts(input.BaseAsserts, input.DeltaAsserts) + + return output +} + +// resolveGraphQLScalar applies delta scalar overrides to the base entity. +func resolveGraphQLScalar(base, delta mgraphql.GraphQL) mgraphql.GraphQL { + resolved := base + + // Explicitly set ID to Base ID (The "Identity" remains the Base) + resolved.ID = base.ID + resolved.IsDelta = false // The resolved object is a "Live" representation + + // Apply Overrides if Delta* fields are present (non-nil) + if delta.DeltaName != nil { + resolved.Name = *delta.DeltaName + } + if delta.DeltaUrl != nil { + resolved.Url = *delta.DeltaUrl + } + if delta.DeltaQuery != nil { + resolved.Query = *delta.DeltaQuery + } + if delta.DeltaVariables != nil { + resolved.Variables = *delta.DeltaVariables + } + if delta.DeltaDescription != nil { + resolved.Description = *delta.DeltaDescription + } + + // Clear delta fields in the resolved object to avoid ambiguity + resolved.DeltaName = nil + resolved.DeltaUrl = nil + resolved.DeltaQuery = nil + resolved.DeltaVariables = nil + resolved.DeltaDescription = nil + + return resolved +} + +// resolveGraphQLHeaders resolves GraphQL Headers. +func resolveGraphQLHeaders(base []mgraphql.GraphQLHeader, delta []mgraphql.GraphQLHeader) []mgraphql.GraphQLHeader { + overrideMap := make(map[idwrap.IDWrap]mgraphql.GraphQLHeader) + additions := make([]mgraphql.GraphQLHeader, 0) + + for _, d := range delta { + if d.ParentGraphQLHeaderID != nil { + overrideMap[*d.ParentGraphQLHeaderID] = d + } else { + additions = append(additions, d) + } + } + + resolved := make([]mgraphql.GraphQLHeader, 0, len(base)+len(additions)) + + for _, b := range base { + if override, ok := overrideMap[b.ID]; ok { + merged := b + if override.DeltaKey != nil { + merged.Key = *override.DeltaKey + } + if override.DeltaValue != nil { + merged.Value = *override.DeltaValue + } + if override.DeltaDescription != nil { + merged.Description = *override.DeltaDescription + } + if override.DeltaEnabled != nil { + merged.Enabled = *override.DeltaEnabled + } + + merged.IsDelta = false + merged.ParentGraphQLHeaderID = nil + merged.DeltaKey = nil + merged.DeltaValue = nil + merged.DeltaDescription = nil + merged.DeltaEnabled = nil + + resolved = append(resolved, merged) + } else { + resolved = append(resolved, b) + } + } + + for _, a := range additions { + item := a + item.IsDelta = false + resolved = append(resolved, item) + } + + return resolved +} + +// resolveGraphQLAsserts resolves GraphQL Asserts using specific ordering logic. +func resolveGraphQLAsserts(base, delta []mgraphql.GraphQLAssert) []mgraphql.GraphQLAssert { + // 1. Order the inputs first to ensure we process them in the correct logical order + orderedBase := orderGraphQLAsserts(base) + if len(delta) == 0 { + return orderedBase + } + orderedDelta := orderGraphQLAsserts(delta) + + // 2. Map Base items + baseMap := make(map[idwrap.IDWrap]mgraphql.GraphQLAssert, len(orderedBase)) + baseOrder := make([]idwrap.IDWrap, 0, len(orderedBase)) + for _, assert := range orderedBase { + baseMap[assert.ID] = assert + baseOrder = append(baseOrder, assert.ID) + } + + // 3. Process Deltas (Overrides and Additions) + additions := make([]mgraphql.GraphQLAssert, 0) + for _, d := range orderedDelta { + if d.ParentGraphQLAssertID != nil { + if b, exists := baseMap[*d.ParentGraphQLAssertID]; exists { + // Apply Overrides + merged := b + if d.DeltaValue != nil { + merged.Value = *d.DeltaValue + } + if d.DeltaDescription != nil { + merged.Description = *d.DeltaDescription + } + if d.DeltaEnabled != nil { + merged.Enabled = *d.DeltaEnabled + } + + merged.IsDelta = false + merged.ParentGraphQLAssertID = nil + merged.DeltaValue = nil + merged.DeltaDescription = nil + merged.DeltaEnabled = nil + + baseMap[*d.ParentGraphQLAssertID] = merged + } + } else { + // New Addition + item := d + item.IsDelta = false + additions = append(additions, item) + } + } + + // 4. Reconstruct the list + merged := make([]mgraphql.GraphQLAssert, 0, len(baseMap)+len(additions)) + + // Add base items (which may be merged/updated) in original order + for _, id := range baseOrder { + if assert, exists := baseMap[id]; exists { + merged = append(merged, assert) + } + } + + // Append additions (ensure they are also ordered relative to each other if possible) + if len(additions) > 0 { + merged = append(merged, orderGraphQLAsserts(additions)...) + } + + return merged +} + +// orderGraphQLAsserts orders asserts by DisplayOrder field. +func orderGraphQLAsserts(asserts []mgraphql.GraphQLAssert) []mgraphql.GraphQLAssert { + if len(asserts) <= 1 { + return append([]mgraphql.GraphQLAssert(nil), asserts...) + } + + // Create a copy and sort by DisplayOrder field + ordered := make([]mgraphql.GraphQLAssert, len(asserts)) + copy(ordered, asserts) + sort.Slice(ordered, func(i, j int) bool { + return ordered[i].DisplayOrder < ordered[j].DisplayOrder + }) + + return ordered +} diff --git a/packages/server/pkg/flow/flowbuilder/builder.go b/packages/server/pkg/flow/flowbuilder/builder.go index 0c76a4f0f..018b4e809 100644 --- a/packages/server/pkg/flow/flowbuilder/builder.go +++ b/packages/server/pkg/flow/flowbuilder/builder.go @@ -13,12 +13,14 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nai" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nfor" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nforeach" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/ngraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nif" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/njs" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nmemory" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/naiprovider" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nrequest" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nstart" + gqlresolver "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/http/resolver" "github.com/the-dev-tools/dev-tools/packages/server/pkg/httpclient" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" @@ -27,6 +29,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/scredential" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" "github.com/the-dev-tools/dev-tools/packages/spec/dist/buf/go/api/private/node_js_executor/v1/node_js_executorv1connect" ) @@ -41,12 +44,16 @@ type Builder struct { NodeAI *sflow.NodeAIService NodeAiProvider *sflow.NodeAiProviderService NodeMemory *sflow.NodeMemoryService + NodeGraphQL *sflow.NodeGraphQLService + GraphQL *sgraphql.GraphQLService + GraphQLHeader *sgraphql.GraphQLHeaderService Workspace *sworkspace.WorkspaceService Variable *senv.VariableService FlowVariable *sflow.FlowVariableService Resolver resolver.RequestResolver + GraphQLResolver gqlresolver.GraphQLResolver Logger *slog.Logger LLMProviderFactory *scredential.LLMProviderFactory } @@ -61,10 +68,14 @@ func New( nais *sflow.NodeAIService, naps *sflow.NodeAiProviderService, nmems *sflow.NodeMemoryService, + ngqs *sflow.NodeGraphQLService, + gqls *sgraphql.GraphQLService, + gqlhs *sgraphql.GraphQLHeaderService, ws *sworkspace.WorkspaceService, vs *senv.VariableService, fvs *sflow.FlowVariableService, resolver resolver.RequestResolver, + graphQLResolver gqlresolver.GraphQLResolver, logger *slog.Logger, llmFactory *scredential.LLMProviderFactory, ) *Builder { @@ -78,10 +89,14 @@ func New( NodeAI: nais, NodeAiProvider: naps, NodeMemory: nmems, + NodeGraphQL: ngqs, + GraphQL: gqls, + GraphQLHeader: gqlhs, Workspace: ws, Variable: vs, FlowVariable: fvs, Resolver: resolver, + GraphQLResolver: graphQLResolver, Logger: logger, LLMProviderFactory: llmFactory, } @@ -94,6 +109,7 @@ func (b *Builder) BuildNodes( timeout time.Duration, httpClient httpclient.HttpClient, respChan chan nrequest.NodeRequestSideResp, + gqlRespChan chan ngraphql.NodeGraphQLSideResp, jsClient node_js_executorv1connect.NodeJsExecutorServiceClient, ) (map[idwrap.IDWrap]node.FlowNode, idwrap.IDWrap, error) { flowNodeMap := make(map[idwrap.IDWrap]node.FlowNode, len(nodes)) @@ -264,6 +280,31 @@ func (b *Builder) BuildNodes( memoryCfg.WindowSize, ) } + case mflow.NODE_KIND_GRAPHQL: + gqlCfg, err := b.NodeGraphQL.GetNodeGraphQL(ctx, nodeModel.ID) + if err != nil { + return nil, idwrap.IDWrap{}, err + } + if gqlCfg == nil || gqlCfg.GraphQLID == nil || isZeroID(*gqlCfg.GraphQLID) { + return nil, idwrap.IDWrap{}, fmt.Errorf("graphql node %s missing graphql configuration", nodeModel.ID.String()) + } + + // Resolve GraphQL entity with delta + resolved, err := b.GraphQLResolver.Resolve(ctx, *gqlCfg.GraphQLID, gqlCfg.DeltaGraphQLID) + if err != nil { + return nil, idwrap.IDWrap{}, fmt.Errorf("resolve graphql %s: %w", gqlCfg.GraphQLID.String(), err) + } + + flowNodeMap[nodeModel.ID] = ngraphql.New( + nodeModel.ID, + nodeModel.Name, + resolved.Resolved, + resolved.ResolvedHeaders, + resolved.ResolvedAsserts, + httpClient, + gqlRespChan, + b.Logger, + ) default: return nil, idwrap.IDWrap{}, fmt.Errorf("node kind %d not supported", nodeModel.NodeKind) } diff --git a/packages/server/pkg/flow/node/ngraphql/ngraphql.go b/packages/server/pkg/flow/node/ngraphql/ngraphql.go new file mode 100644 index 000000000..201518b54 --- /dev/null +++ b/packages/server/pkg/flow/node/ngraphql/ngraphql.go @@ -0,0 +1,379 @@ +//nolint:revive // exported +package ngraphql + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "log/slog" + "net/http" + "time" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/expression" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node" + graphqlresponse "github.com/the-dev-tools/dev-tools/packages/server/pkg/graphql/response" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/httpclient" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +type NodeGraphQL struct { + FlowNodeID idwrap.IDWrap + Name string + + GraphQL mgraphql.GraphQL + Headers []mgraphql.GraphQLHeader + Asserts []mgraphql.GraphQLAssert + HttpClient httpclient.HttpClient + SideRespChan chan NodeGraphQLSideResp + logger *slog.Logger +} + +type NodeGraphQLSideResp struct { + ExecutionID idwrap.IDWrap + GraphQL mgraphql.GraphQL + Headers []mgraphql.GraphQLHeader + Response mgraphql.GraphQLResponse + RespHeaders []mgraphql.GraphQLResponseHeader + RespAsserts []mgraphql.GraphQLResponseAssert + Done chan struct{} +} + +const ( + outputResponseName = "response" + outputRequestName = "request" +) + +type graphqlRequestBody struct { + Query string `json:"query"` + Variables json.RawMessage `json:"variables,omitempty"` +} + +func New( + id idwrap.IDWrap, + name string, + gql mgraphql.GraphQL, + headers []mgraphql.GraphQLHeader, + asserts []mgraphql.GraphQLAssert, + httpClient httpclient.HttpClient, + sideRespChan chan NodeGraphQLSideResp, + logger *slog.Logger, +) *NodeGraphQL { + return &NodeGraphQL{ + FlowNodeID: id, + Name: name, + GraphQL: gql, + Headers: headers, + Asserts: asserts, + HttpClient: httpClient, + SideRespChan: sideRespChan, + logger: logger, + } +} + +func (n *NodeGraphQL) GetID() idwrap.IDWrap { + return n.FlowNodeID +} + +func (n *NodeGraphQL) SetID(id idwrap.IDWrap) { + n.FlowNodeID = id +} + +func (n *NodeGraphQL) GetName() string { + return n.Name +} + +func (n *NodeGraphQL) RunSync(ctx context.Context, req *node.FlowNodeRequest) node.FlowNodeResult { + nextID := mflow.GetNextNodeID(req.EdgeSourceMap, n.GetID(), mflow.HandleUnspecified) + result := node.FlowNodeResult{ + NextNodeID: nextID, + Err: nil, + } + + varMapCopy := node.DeepCopyVarMap(req) + + // Build unified environment for interpolation + env := expression.NewUnifiedEnv(varMapCopy) + + // Track input variable reads if tracker is available + readVars := make(map[string]any) + + // Helper to interpolate and collect reads (same pattern as HTTP REQUEST nodes) + interpolate := func(raw string) (string, error) { + if !expression.HasVars(raw) { + return raw, nil + } + result, err := env.InterpolateWithResult(raw) + if err != nil { + return "", err + } + // Collect tracked reads + for k, v := range result.ReadVars { + readVars[k] = v + } + return result.Value, nil + } + + // Interpolate URL, query, variables, and headers + var err error + url, err := interpolate(n.GraphQL.Url) + if err != nil { + result.Err = fmt.Errorf("failed to interpolate url: %w", err) + return result + } + + query, err := interpolate(n.GraphQL.Query) + if err != nil { + result.Err = fmt.Errorf("failed to interpolate query: %w", err) + return result + } + + variables, err := interpolate(n.GraphQL.Variables) + if err != nil { + result.Err = fmt.Errorf("failed to interpolate variables: %w", err) + return result + } + + // Build request body + var varsJSON json.RawMessage + if variables != "" { + // Try to parse as JSON; if invalid, use as string + if json.Valid([]byte(variables)) { + varsJSON = json.RawMessage(variables) + } else { + // Wrap as JSON string + b, _ := json.Marshal(variables) + varsJSON = b + } + } + + body := graphqlRequestBody{ + Query: query, + Variables: varsJSON, + } + bodyBytes, err := json.Marshal(body) + if err != nil { + result.Err = fmt.Errorf("failed to marshal graphql request body: %w", err) + return result + } + + // Build HTTP request + httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(bodyBytes)) + if err != nil { + result.Err = fmt.Errorf("failed to create graphql http request: %w", err) + return result + } + httpReq.Header.Set("Content-Type", "application/json") + + // Apply headers with tracking + for _, h := range n.Headers { + if h.Enabled && h.Key != "" { + key, err := interpolate(h.Key) + if err != nil { + result.Err = fmt.Errorf("failed to interpolate header key: %w", err) + return result + } + value, err := interpolate(h.Value) + if err != nil { + result.Err = fmt.Errorf("failed to interpolate header value: %w", err) + return result + } + httpReq.Header.Set(key, value) + } + } + + // Track variable reads if tracker is available (before HTTP execution) + if req.VariableTracker != nil { + for varKey, varValue := range readVars { + req.VariableTracker.TrackRead(varKey, varValue) + } + } + + if ctx.Err() != nil { + return result + } + + // Execute request + startTime := time.Now() + httpResp, err := n.HttpClient.Do(httpReq) + duration := time.Since(startTime) + if err != nil { + result.Err = fmt.Errorf("graphql request failed: %w", err) + return result + } + defer func() { _ = httpResp.Body.Close() }() + + // Read response body + respBody, err := io.ReadAll(httpResp.Body) + if err != nil { + result.Err = fmt.Errorf("failed to read graphql response body: %w", err) + return result + } + + if ctx.Err() != nil { + return result + } + + // Build response headers + respHeaderModels := make([]mgraphql.GraphQLResponseHeader, 0) + for key, values := range httpResp.Header { + for _, value := range values { + respHeaderModels = append(respHeaderModels, mgraphql.GraphQLResponseHeader{ + ID: idwrap.NewNow(), + HeaderKey: key, + HeaderValue: value, + }) + } + } + + // Build output map + var respBodyParsed any + if err := json.Unmarshal(respBody, &respBodyParsed); err != nil { + // If not valid JSON, use as string + respBodyParsed = string(respBody) + } + + requestHeaders := make(map[string]any) + for _, h := range n.Headers { + if h.Enabled && h.Key != "" { + requestHeaders[h.Key] = h.Value + } + } + + respHeaders := make(map[string]any) + for key, values := range httpResp.Header { + if len(values) == 1 { + respHeaders[key] = values[0] + } else { + anyValues := make([]any, len(values)) + for i, v := range values { + anyValues[i] = v + } + respHeaders[key] = anyValues + } + } + + outputMap := map[string]any{ + outputRequestName: map[string]any{ + "url": url, + "query": query, + "variables": variables, + "headers": requestHeaders, + }, + outputResponseName: map[string]any{ + "status": float64(httpResp.StatusCode), + "body": respBodyParsed, + "headers": respHeaders, + "duration": float64(duration.Milliseconds()), + }, + } + + // Use tracking version if tracker is available (same pattern as HTTP REQUEST nodes) + if req.VariableTracker != nil { + if err := node.WriteNodeVarBulkWithTracking(req, n.Name, outputMap, req.VariableTracker); err != nil { + result.Err = err + return result + } + } else { + if err := node.WriteNodeVarBulk(req, n.Name, outputMap); err != nil { + result.Err = err + return result + } + } + + // Create response with assertions evaluated using UnifiedEnv (same pattern as HTTP) + respCreate, err := graphqlresponse.ResponseCreateGraphQL( + ctx, + respBody, + httpResp.StatusCode, + duration, + respHeaderModels, + n.GraphQL.ID, + n.Asserts, + varMapCopy, + ) + if err != nil { + result.Err = err + return result + } + + result.AuxiliaryID = &respCreate.GraphQLResponse.ID + + // Check if any assertions failed (same pattern as HTTP) + done := make(chan struct{}) + for _, assertRes := range respCreate.ResponseAsserts { + if !assertRes.Success { + result.Err = fmt.Errorf("assertion failed: %s", assertRes.Value) + + // Still send the response data even though we're failing + n.SideRespChan <- NodeGraphQLSideResp{ + ExecutionID: req.ExecutionID, + GraphQL: n.GraphQL, + Headers: n.Headers, + Response: respCreate.GraphQLResponse, + RespHeaders: respCreate.ResponseHeaders, + RespAsserts: respCreate.ResponseAsserts, + Done: done, + } + select { + case <-done: + case <-ctx.Done(): + } + return result + } + } + + // Send through side channel for persistence + n.SideRespChan <- NodeGraphQLSideResp{ + ExecutionID: req.ExecutionID, + GraphQL: n.GraphQL, + Headers: n.Headers, + Response: respCreate.GraphQLResponse, + RespHeaders: respCreate.ResponseHeaders, + RespAsserts: respCreate.ResponseAsserts, + Done: done, + } + select { + case <-done: + case <-ctx.Done(): + } + + return result +} + +func (n *NodeGraphQL) RunAsync(ctx context.Context, req *node.FlowNodeRequest, resultChan chan node.FlowNodeResult) { + result := n.RunSync(ctx, req) + if ctx.Err() != nil { + return + } + resultChan <- result +} + +// GetRequiredVariables implements node.VariableIntrospector. +func (n *NodeGraphQL) GetRequiredVariables() []string { + var sources []string + sources = append(sources, n.GraphQL.Url, n.GraphQL.Query, n.GraphQL.Variables) + for _, h := range n.Headers { + if h.Enabled { + sources = append(sources, h.Key, h.Value) + } + } + return expression.ExtractVarKeysFromMultiple(sources...) +} + +// GetOutputVariables implements node.VariableIntrospector. +func (n *NodeGraphQL) GetOutputVariables() []string { + return []string{ + "response.status", + "response.body", + "response.headers", + "response.duration", + "request.url", + "request.query", + "request.variables", + "request.headers", + } +} diff --git a/packages/server/pkg/graphql/resolver/resolver.go b/packages/server/pkg/graphql/resolver/resolver.go new file mode 100644 index 000000000..14400be92 --- /dev/null +++ b/packages/server/pkg/graphql/resolver/resolver.go @@ -0,0 +1,127 @@ +//nolint:revive // exported +package resolver + +import ( + "context" + "sort" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/delta" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" +) + +// GraphQLResolver defines the interface for resolving GraphQL requests with their delta overlays. +type GraphQLResolver interface { + Resolve(ctx context.Context, baseID idwrap.IDWrap, deltaID *idwrap.IDWrap) (*delta.ResolveGraphQLOutput, error) +} + +// StandardResolver implements GraphQLResolver using standard DB services. +type StandardResolver struct { + graphqlService *sgraphql.Reader + graphqlHeaderService *sgraphql.GraphQLHeaderService + graphqlAssertService *sgraphql.GraphQLAssertService +} + +// NewStandardResolver creates a new instance of StandardResolver. +func NewStandardResolver( + graphqlService *sgraphql.Reader, + graphqlHeaderService *sgraphql.GraphQLHeaderService, + graphqlAssertService *sgraphql.GraphQLAssertService, +) *StandardResolver { + return &StandardResolver{ + graphqlService: graphqlService, + graphqlHeaderService: graphqlHeaderService, + graphqlAssertService: graphqlAssertService, + } +} + +// Resolve fetches base and delta components and resolves them into a final GraphQL request. +func (r *StandardResolver) Resolve(ctx context.Context, baseID idwrap.IDWrap, deltaID *idwrap.IDWrap) (*delta.ResolveGraphQLOutput, error) { + // 1. Fetch Base Components + baseGraphQL, err := r.graphqlService.Get(ctx, baseID) + if err != nil { + return nil, err + } + + baseHeaders, _ := r.graphqlHeaderService.GetByGraphQLID(ctx, baseID) + baseAsserts, _ := r.graphqlAssertService.GetByGraphQLID(ctx, baseID) + + // 2. Fetch Delta Components (if present) + var deltaGraphQL *mgraphql.GraphQL + var deltaHeaders []mgraphql.GraphQLHeader + var deltaAsserts []mgraphql.GraphQLAssert + + if deltaID != nil { + d, err := r.graphqlService.Get(ctx, *deltaID) + if err != nil { + return nil, err + } + deltaGraphQL = d + + deltaHeaders, _ = r.graphqlHeaderService.GetByGraphQLID(ctx, *deltaID) + deltaAsserts, _ = r.graphqlAssertService.GetByGraphQLID(ctx, *deltaID) + } + + // 3. Prepare Input for Delta Resolution + input := delta.ResolveGraphQLInput{ + Base: *baseGraphQL, + BaseHeaders: convertGraphQLHeaders(baseHeaders), + BaseAsserts: convertGraphQLAsserts(baseAsserts), + } + + if deltaGraphQL != nil { + input.Delta = *deltaGraphQL + input.DeltaHeaders = convertGraphQLHeaders(deltaHeaders) + input.DeltaAsserts = convertGraphQLAsserts(deltaAsserts) + } + + // 4. Resolve + output := delta.ResolveGraphQL(input) + return &output, nil +} + +// Helper functions for type conversion + +func convertGraphQLHeaders(in []mgraphql.GraphQLHeader) []mgraphql.GraphQLHeader { + if in == nil { + return []mgraphql.GraphQLHeader{} + } + out := make([]mgraphql.GraphQLHeader, len(in)) + for i, v := range in { + out[i] = mgraphql.GraphQLHeader{ + ID: v.ID, + GraphQLID: v.GraphQLID, + Key: v.Key, + Value: v.Value, + Description: v.Description, + Enabled: v.Enabled, + ParentGraphQLHeaderID: v.ParentGraphQLHeaderID, + IsDelta: v.IsDelta, + DeltaKey: v.DeltaKey, + DeltaValue: v.DeltaValue, + DeltaDescription: v.DeltaDescription, + DeltaEnabled: v.DeltaEnabled, + DisplayOrder: v.DisplayOrder, + CreatedAt: v.CreatedAt, + UpdatedAt: v.UpdatedAt, + } + } + return out +} + +// convertGraphQLAsserts converts DB model asserts (ordered by float) to mgraphql model asserts. +func convertGraphQLAsserts(in []mgraphql.GraphQLAssert) []mgraphql.GraphQLAssert { + if len(in) == 0 { + return []mgraphql.GraphQLAssert{} + } + + // Sort by DisplayOrder (DB model uses float ordering) + sorted := make([]mgraphql.GraphQLAssert, len(in)) + copy(sorted, in) + sort.Slice(sorted, func(i, j int) bool { + return sorted[i].DisplayOrder < sorted[j].DisplayOrder + }) + + return sorted +} diff --git a/packages/server/pkg/graphql/response/response.go b/packages/server/pkg/graphql/response/response.go new file mode 100644 index 000000000..cd7b10554 --- /dev/null +++ b/packages/server/pkg/graphql/response/response.go @@ -0,0 +1,194 @@ +//nolint:revive // exported +package response + +import ( + "context" + "encoding/json" + "fmt" + "sort" + "strings" + "time" + + "connectrpc.com/connect" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/expression" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +type ResponseCreateGraphQLOutput struct { + GraphQLResponse mgraphql.GraphQLResponse + ResponseHeaders []mgraphql.GraphQLResponseHeader + ResponseAsserts []mgraphql.GraphQLResponseAssert +} + +func ResponseCreateGraphQL( + ctx context.Context, + respBody []byte, + statusCode int, + duration time.Duration, + headers []mgraphql.GraphQLResponseHeader, + graphqlID idwrap.IDWrap, + assertions []mgraphql.GraphQLAssert, + flowVars map[string]any, +) (*ResponseCreateGraphQLOutput, error) { + responseID := idwrap.NewNow() + now := time.Now().Unix() + + // Create response model + graphqlResponse := mgraphql.GraphQLResponse{ + ID: responseID, + GraphQLID: graphqlID, + Status: int32(statusCode), //nolint:gosec // G115: HTTP status codes are small + Body: respBody, + Time: now, + Duration: int32(duration.Milliseconds()), //nolint:gosec // G115: duration in ms fits int32 + Size: int32(len(respBody)), //nolint:gosec // G115: response body size fits int32 + CreatedAt: now, + } + + // Set response ID on headers + responseHeaders := make([]mgraphql.GraphQLResponseHeader, len(headers)) + for i, h := range headers { + responseHeaders[i] = h + responseHeaders[i].ResponseID = responseID + responseHeaders[i].CreatedAt = now + } + + // Parse response body as JSON (similar to HTTP) + var respBodyParsed any + if err := json.Unmarshal(respBody, &respBodyParsed); err != nil { + respBodyParsed = string(respBody) + } + + // Build response variable (similar to HTTP's ConvertResponseToVar) + responseVar := map[string]any{ + "status": float64(statusCode), + "body": respBodyParsed, + "headers": convertHeadersToMap(headers), + "duration": float64(duration.Milliseconds()), + } + + // Build unified environment with flowVars and response binding + // For GraphQL, also extract "data" and "errors" fields to top level for easier access + evalEnvMap := buildAssertionEnv(flowVars, responseVar, respBodyParsed) + env := expression.NewUnifiedEnv(evalEnvMap) + + responseAsserts := make([]mgraphql.GraphQLResponseAssert, 0) + + // Evaluate assertions (SAME pattern as HTTP) + for _, assertion := range assertions { + if assertion.Enabled { + expr := assertion.Value + + // Skip assertions with empty expressions + if strings.TrimSpace(expr) == "" { + continue + } + + // If expression contains {{ }}, interpolate first + evaluatedExpr := expr + if expression.HasVars(expr) { + interpolated, err := env.Interpolate(expr) + if err != nil { + return nil, err + } + evaluatedExpr = interpolated + } + + // Evaluate as boolean expression + ok, err := env.EvalBool(ctx, evaluatedExpr) + if err != nil { + annotatedErr := annotateUnknownNameError(err, evalEnvMap) + return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("expression %q failed: %w", evaluatedExpr, annotatedErr)) + } + + responseAsserts = append(responseAsserts, mgraphql.GraphQLResponseAssert{ + ID: idwrap.NewNow(), + ResponseID: responseID, + Value: evaluatedExpr, + Success: ok, + CreatedAt: now, + }) + } + } + + return &ResponseCreateGraphQLOutput{ + GraphQLResponse: graphqlResponse, + ResponseHeaders: responseHeaders, + ResponseAsserts: responseAsserts, + }, nil +} + +func buildAssertionEnv(flowVars map[string]any, responseBinding map[string]any, respBodyParsed any) map[string]any { + env := make(map[string]any) + + // Add flow variables first + for k, v := range flowVars { + env[k] = v + } + + // Add response binding for backward compatibility + env["response"] = responseBinding + + // Extract GraphQL-specific fields from response body (matching GraphQL tab behavior) + var data any + var errors any + if bodyMap, ok := respBodyParsed.(map[string]any); ok { + if d, hasData := bodyMap["data"]; hasData { + data = d + } + if e, hasErrors := bodyMap["errors"]; hasErrors { + errors = e + } + } + + // Add GraphQL-specific fields at top level for easier access (matching GraphQL tab behavior) + // This allows assertions like: data.users[0].id == "1" + env["data"] = data + env["errors"] = errors + + return env +} + +func convertHeadersToMap(headers []mgraphql.GraphQLResponseHeader) map[string]any { + headersMap := make(map[string]any) + for _, h := range headers { + if existing, ok := headersMap[h.HeaderKey]; ok { + // Multiple values for same key - convert to array + if arr, isArr := existing.([]any); isArr { + headersMap[h.HeaderKey] = append(arr, h.HeaderValue) + } else { + headersMap[h.HeaderKey] = []any{existing, h.HeaderValue} + } + } else { + headersMap[h.HeaderKey] = h.HeaderValue + } + } + return headersMap +} + +func annotateUnknownNameError(err error, env map[string]any) error { + if err == nil { + return nil + } + lower := strings.ToLower(err.Error()) + if strings.Contains(lower, "unknown name") { + keys := collectEnvKeys(env) + if len(keys) > 0 { + return fmt.Errorf("%w (available variables: %s)", err, strings.Join(keys, ", ")) + } + } + return err +} + +func collectEnvKeys(env map[string]any) []string { + if len(env) == 0 { + return nil + } + keys := make([]string, 0, len(env)) + for k := range env { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} diff --git a/packages/server/pkg/ioworkspace/exporter.go b/packages/server/pkg/ioworkspace/exporter.go index 50fc8d8ac..014ac5fa5 100644 --- a/packages/server/pkg/ioworkspace/exporter.go +++ b/packages/server/pkg/ioworkspace/exporter.go @@ -13,6 +13,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sworkspace" ) @@ -53,6 +54,10 @@ func (s *IOWorkspaceService) Export(ctx context.Context, opts ExportOptions) (*W if err := s.exportHTTP(ctx, opts, bundle); err != nil { return nil, fmt.Errorf("failed to export HTTP requests: %w", err) } + + if err := s.exportGraphQL(ctx, opts, bundle); err != nil { + return nil, fmt.Errorf("failed to export GraphQL requests: %w", err) + } } // Export flows if requested @@ -222,6 +227,7 @@ func (s *IOWorkspaceService) exportFlows(ctx context.Context, opts ExportOptions nodeAIService := sflow.NewNodeAIService(s.queries) nodeAIProviderService := sflow.NewNodeAiProviderService(s.queries) nodeMemoryService := sflow.NewNodeMemoryService(s.queries) + nodeGraphQLService := sflow.NewNodeGraphQLService(s.queries) var flowIDs []idwrap.IDWrap @@ -275,7 +281,7 @@ func (s *IOWorkspaceService) exportFlows(ctx context.Context, opts ExportOptions // Export node implementations based on node types for _, node := range nodes { - if err := s.exportNodeImplementation(ctx, node, bundle, nodeRequestService, nodeIfService, nodeForService, nodeForEachService, nodeJSService, nodeAIService, nodeAIProviderService, nodeMemoryService); err != nil { + if err := s.exportNodeImplementation(ctx, node, bundle, nodeRequestService, nodeIfService, nodeForService, nodeForEachService, nodeJSService, nodeAIService, nodeAIProviderService, nodeMemoryService, nodeGraphQLService); err != nil { return fmt.Errorf("failed to export node implementation for node %s: %w", node.ID.String(), err) } } @@ -292,7 +298,42 @@ func (s *IOWorkspaceService) exportFlows(ctx context.Context, opts ExportOptions "js_nodes", len(bundle.FlowJSNodes), "ai_nodes", len(bundle.FlowAINodes), "ai_provider_nodes", len(bundle.FlowAIProviderNodes), - "ai_memory_nodes", len(bundle.FlowAIMemoryNodes)) + "ai_memory_nodes", len(bundle.FlowAIMemoryNodes), + "graphql_nodes", len(bundle.FlowGraphQLNodes)) + + return nil +} + +// exportGraphQL exports GraphQL requests and their headers and assertions +func (s *IOWorkspaceService) exportGraphQL(ctx context.Context, opts ExportOptions, bundle *WorkspaceBundle) error { + graphqlService := sgraphql.New(s.queries, s.logger) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(s.queries) + graphqlAssertService := sgraphql.NewGraphQLAssertService(s.queries) + + gqlRequests, err := graphqlService.GetByWorkspaceID(ctx, opts.WorkspaceID) + if err != nil { + return fmt.Errorf("failed to get GraphQL requests: %w", err) + } + bundle.GraphQLRequests = gqlRequests + + for _, gql := range gqlRequests { + headers, err := graphqlHeaderService.GetByGraphQLID(ctx, gql.ID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return fmt.Errorf("failed to get headers for GraphQL %s: %w", gql.ID.String(), err) + } + bundle.GraphQLHeaders = append(bundle.GraphQLHeaders, headers...) + + asserts, err := graphqlAssertService.GetByGraphQLID(ctx, gql.ID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return fmt.Errorf("failed to get asserts for GraphQL %s: %w", gql.ID.String(), err) + } + bundle.GraphQLAsserts = append(bundle.GraphQLAsserts, asserts...) + } + + s.logger.DebugContext(ctx, "Exported GraphQL requests", + "count", len(bundle.GraphQLRequests), + "headers", len(bundle.GraphQLHeaders), + "asserts", len(bundle.GraphQLAsserts)) return nil } @@ -310,6 +351,7 @@ func (s *IOWorkspaceService) exportNodeImplementation( nodeAIService sflow.NodeAIService, nodeAIProviderService sflow.NodeAiProviderService, nodeMemoryService sflow.NodeMemoryService, + nodeGraphQLService sflow.NodeGraphQLService, ) error { switch node.NodeKind { case mflow.NODE_KIND_REQUEST: @@ -383,6 +425,15 @@ func (s *IOWorkspaceService) exportNodeImplementation( if nodeMemory != nil { bundle.FlowAIMemoryNodes = append(bundle.FlowAIMemoryNodes, *nodeMemory) } + + case mflow.NODE_KIND_GRAPHQL: + nodeGraphQL, err := nodeGraphQLService.GetNodeGraphQL(ctx, node.ID) + if err != nil { + return fmt.Errorf("failed to get graphql node: %w", err) + } + if nodeGraphQL != nil { + bundle.FlowGraphQLNodes = append(bundle.FlowGraphQLNodes, *nodeGraphQL) + } } return nil diff --git a/packages/server/pkg/ioworkspace/importer.go b/packages/server/pkg/ioworkspace/importer.go index e8cba2e3a..e64d3360e 100644 --- a/packages/server/pkg/ioworkspace/importer.go +++ b/packages/server/pkg/ioworkspace/importer.go @@ -9,6 +9,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/senv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/shttp" ) @@ -35,6 +36,10 @@ type ImportResult struct { FlowAINodesCreated int FlowAIProviderNodesCreated int FlowAIMemoryNodesCreated int + FlowGraphQLNodesCreated int + GraphQLRequestsCreated int + GraphQLHeadersCreated int + GraphQLAssertsCreated int EnvironmentsCreated int EnvironmentVarsCreated int @@ -85,6 +90,11 @@ func (s *IOWorkspaceService) Import(ctx context.Context, tx *sql.Tx, bundle *Wor nodeAIService := sflow.NewNodeAIService(s.queries).TX(tx) nodeAIProviderService := sflow.NewNodeAiProviderService(s.queries).TX(tx) nodeMemoryService := sflow.NewNodeMemoryService(s.queries).TX(tx) + nodeGraphQLService := sflow.NewNodeGraphQLService(s.queries).TX(tx) + + graphqlService := sgraphql.New(s.queries, nil).TX(tx) + graphqlHeaderService := sgraphql.NewGraphQLHeaderService(s.queries).TX(tx) + graphqlAssertService := sgraphql.NewGraphQLAssertService(s.queries).TX(tx) fileService := sfile.New(s.queries, nil).TX(tx) envService := senv.NewEnvironmentService(s.queries, nil).TX(tx) @@ -104,6 +114,12 @@ func (s *IOWorkspaceService) Import(ctx context.Context, tx *sql.Tx, bundle *Wor } } + if opts.ImportHTTP && len(bundle.GraphQLRequests) > 0 { + if err := s.importGraphQLRequests(ctx, graphqlService, bundle, opts, result); err != nil { + return nil, fmt.Errorf("failed to import GraphQL requests: %w", err) + } + } + if opts.CreateFiles && len(bundle.Files) > 0 { if err := s.importFiles(ctx, fileService, bundle, opts, result); err != nil { return nil, fmt.Errorf("failed to import files: %w", err) @@ -131,6 +147,18 @@ func (s *IOWorkspaceService) Import(ctx context.Context, tx *sql.Tx, bundle *Wor } } + if opts.ImportHTTP && len(bundle.GraphQLHeaders) > 0 { + if err := s.importGraphQLHeaders(ctx, graphqlHeaderService, bundle, opts, result); err != nil { + return nil, fmt.Errorf("failed to import GraphQL headers: %w", err) + } + } + + if opts.ImportHTTP && len(bundle.GraphQLAsserts) > 0 { + if err := s.importGraphQLAsserts(ctx, graphqlAssertService, bundle, opts, result); err != nil { + return nil, fmt.Errorf("failed to import GraphQL asserts: %w", err) + } + } + if opts.ImportHTTP { if len(bundle.HTTPHeaders) > 0 { if err := s.importHTTPHeaders(ctx, httpHeaderService, bundle, opts, result); err != nil { @@ -231,6 +259,12 @@ func (s *IOWorkspaceService) Import(ctx context.Context, tx *sql.Tx, bundle *Wor return nil, fmt.Errorf("failed to import flow AI memory nodes: %w", err) } } + + if len(bundle.FlowGraphQLNodes) > 0 { + if err := s.importFlowGraphQLNodes(ctx, nodeGraphQLService, bundle, opts, result); err != nil { + return nil, fmt.Errorf("failed to import flow GraphQL nodes: %w", err) + } + } } return result, nil diff --git a/packages/server/pkg/ioworkspace/importer_flow.go b/packages/server/pkg/ioworkspace/importer_flow.go index 12cec53d8..c8602a7a7 100644 --- a/packages/server/pkg/ioworkspace/importer_flow.go +++ b/packages/server/pkg/ioworkspace/importer_flow.go @@ -7,6 +7,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" ) // importFlows imports flows from the bundle. @@ -279,3 +280,78 @@ func (s *IOWorkspaceService) importFlowAIMemoryNodes(ctx context.Context, nodeMe } return nil } + +// importGraphQLRequests imports GraphQL requests from the bundle. +func (s *IOWorkspaceService) importGraphQLRequests(ctx context.Context, graphqlService sgraphql.GraphQLService, bundle *WorkspaceBundle, opts ImportOptions, result *ImportResult) error { + for _, gql := range bundle.GraphQLRequests { + // Generate new ID if not preserving + if !opts.PreserveIDs { + gql.ID = idwrap.NewNow() + } + + // Update workspace ID + gql.WorkspaceID = opts.WorkspaceID + + // Create GraphQL request + if err := graphqlService.Create(ctx, &gql); err != nil { + return fmt.Errorf("failed to create GraphQL request %s: %w", gql.Name, err) + } + + result.GraphQLRequestsCreated++ + } + return nil +} + +// importGraphQLHeaders imports GraphQL headers from the bundle. +func (s *IOWorkspaceService) importGraphQLHeaders(ctx context.Context, graphqlHeaderService sgraphql.GraphQLHeaderService, bundle *WorkspaceBundle, opts ImportOptions, result *ImportResult) error { + for _, header := range bundle.GraphQLHeaders { + // Generate new ID if not preserving + if !opts.PreserveIDs { + header.ID = idwrap.NewNow() + } + + // Create header + if err := graphqlHeaderService.Create(ctx, &header); err != nil { + return fmt.Errorf("failed to create GraphQL header: %w", err) + } + + result.GraphQLHeadersCreated++ + } + return nil +} + +// importGraphQLAsserts imports GraphQL assertions from the bundle. +func (s *IOWorkspaceService) importGraphQLAsserts(ctx context.Context, graphqlAssertService sgraphql.GraphQLAssertService, bundle *WorkspaceBundle, opts ImportOptions, result *ImportResult) error { + for _, assert := range bundle.GraphQLAsserts { + // Generate new ID if not preserving + if !opts.PreserveIDs { + assert.ID = idwrap.NewNow() + } + + // Create assert + if err := graphqlAssertService.Create(ctx, &assert); err != nil { + return fmt.Errorf("failed to create GraphQL assert: %w", err) + } + + result.GraphQLAssertsCreated++ + } + return nil +} + +// importFlowGraphQLNodes imports flow GraphQL nodes from the bundle. +func (s *IOWorkspaceService) importFlowGraphQLNodes(ctx context.Context, nodeGraphQLService sflow.NodeGraphQLService, bundle *WorkspaceBundle, opts ImportOptions, result *ImportResult) error { + for _, gqlNode := range bundle.FlowGraphQLNodes { + // Remap flow node ID + if newNodeID, ok := result.NodeIDMap[gqlNode.FlowNodeID]; ok { + gqlNode.FlowNodeID = newNodeID + } + + // Create GraphQL node + if err := nodeGraphQLService.CreateNodeGraphQL(ctx, gqlNode); err != nil { + return fmt.Errorf("failed to create flow GraphQL node: %w", err) + } + + result.FlowGraphQLNodesCreated++ + } + return nil +} diff --git a/packages/server/pkg/ioworkspace/types.go b/packages/server/pkg/ioworkspace/types.go index 39190e661..b8eb6747f 100644 --- a/packages/server/pkg/ioworkspace/types.go +++ b/packages/server/pkg/ioworkspace/types.go @@ -6,6 +6,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/menv" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mfile" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mworkspace" ) @@ -18,7 +19,7 @@ type WorkspaceBundle struct { Workspace mworkspace.Workspace // HTTP requests and associated data structures - HTTPRequests []mhttp.HTTP + HTTPRequests []mhttp.HTTP HTTPSearchParams []mhttp.HTTPSearchParam HTTPHeaders []mhttp.HTTPHeader HTTPBodyForms []mhttp.HTTPBodyForm @@ -26,6 +27,11 @@ type WorkspaceBundle struct { HTTPBodyRaw []mhttp.HTTPBodyRaw HTTPAsserts []mhttp.HTTPAssert + // GraphQL requests and associated data + GraphQLRequests []mgraphql.GraphQL + GraphQLHeaders []mgraphql.GraphQLHeader + GraphQLAsserts []mgraphql.GraphQLAssert + // File organization Files []mfile.File @@ -44,6 +50,7 @@ type WorkspaceBundle struct { FlowAINodes []mflow.NodeAI FlowAIProviderNodes []mflow.NodeAiProvider FlowAIMemoryNodes []mflow.NodeMemory + FlowGraphQLNodes []mflow.NodeGraphQL // Environments and variables Environments []menv.Env @@ -64,6 +71,8 @@ func (wb *WorkspaceBundle) CountEntities() map[string]int { "http_body_urlencoded": len(wb.HTTPBodyUrlencoded), "http_body_raw": len(wb.HTTPBodyRaw), "http_asserts": len(wb.HTTPAsserts), + "graphql_requests": len(wb.GraphQLRequests), + "graphql_headers": len(wb.GraphQLHeaders), "files": len(wb.Files), "flows": len(wb.Flows), "flow_variables": len(wb.FlowVariables), @@ -76,8 +85,9 @@ func (wb *WorkspaceBundle) CountEntities() map[string]int { "flow_js_nodes": len(wb.FlowJSNodes), "flow_ai_nodes": len(wb.FlowAINodes), "flow_ai_provider_nodes": len(wb.FlowAIProviderNodes), - "flow_ai_memory_nodes": len(wb.FlowAIMemoryNodes), - "environments": len(wb.Environments), + "flow_ai_memory_nodes": len(wb.FlowAIMemoryNodes), + "flow_graphql_nodes": len(wb.FlowGraphQLNodes), + "environments": len(wb.Environments), "environment_vars": len(wb.EnvironmentVars), "credentials": len(wb.Credentials), } @@ -94,6 +104,17 @@ func (wb *WorkspaceBundle) GetHTTPByID(id idwrap.IDWrap) *mhttp.HTTP { return nil } +// GetGraphQLByID finds and returns a GraphQL request by its ID. +// Returns nil if the GraphQL request is not found. +func (wb *WorkspaceBundle) GetGraphQLByID(id idwrap.IDWrap) *mgraphql.GraphQL { + for i := range wb.GraphQLRequests { + if wb.GraphQLRequests[i].ID.Compare(id) == 0 { + return &wb.GraphQLRequests[i] + } + } + return nil +} + // GetFlowByID finds and returns a flow by its ID. // Returns nil if the flow is not found. func (wb *WorkspaceBundle) GetFlowByID(id idwrap.IDWrap) *mflow.Flow { diff --git a/packages/server/pkg/model/mfile/mfile.go b/packages/server/pkg/model/mfile/mfile.go index 6606de978..5d0920615 100644 --- a/packages/server/pkg/model/mfile/mfile.go +++ b/packages/server/pkg/model/mfile/mfile.go @@ -18,6 +18,7 @@ const ( ContentTypeHTTPDelta ContentType = 2 // http delta (draft/overlay) ContentTypeFlow ContentType = 3 // flow ContentTypeCredential ContentType = 4 // credential + ContentTypeGraphQL ContentType = 5 // graphql ) // String returns the string representation of ContentType @@ -33,6 +34,8 @@ func (ct ContentType) String() string { return "http_delta" case ContentTypeCredential: return "credential" + case ContentTypeGraphQL: + return "graphql" default: return "unknown" } @@ -87,6 +90,11 @@ func (f File) IsCredential() bool { return f.ContentType == ContentTypeCredential } +// IsGraphQL returns true if the file contains a GraphQL request +func (f File) IsGraphQL() bool { + return f.ContentType == ContentTypeGraphQL +} + // IsRoot returns true if the file has no parent folder func (f File) IsRoot() bool { return f.ParentID == nil @@ -130,6 +138,8 @@ func ContentTypeFromString(s string) ContentType { return ContentTypeHTTPDelta case "credential": return ContentTypeCredential + case "graphql": + return ContentTypeGraphQL default: return ContentTypeUnknown } @@ -137,7 +147,7 @@ func ContentTypeFromString(s string) ContentType { // IsValidContentType checks if the content type is valid func IsValidContentType(kind ContentType) bool { - return kind == ContentTypeFolder || kind == ContentTypeFlow || kind == ContentTypeHTTP || kind == ContentTypeHTTPDelta || kind == ContentTypeCredential + return kind == ContentTypeFolder || kind == ContentTypeFlow || kind == ContentTypeHTTP || kind == ContentTypeHTTPDelta || kind == ContentTypeCredential || kind == ContentTypeGraphQL } // IDEquals checks if two IDWrap values are equal diff --git a/packages/server/pkg/model/mflow/execution.go b/packages/server/pkg/model/mflow/execution.go index ac68768fc..0c52d567b 100644 --- a/packages/server/pkg/model/mflow/execution.go +++ b/packages/server/pkg/model/mflow/execution.go @@ -18,6 +18,7 @@ type NodeExecution struct { OutputData []byte `json:"output_data,omitempty"` OutputDataCompressType int8 `json:"output_data_compress_type"` ResponseID *idwrap.IDWrap `json:"response_id,omitempty"` + GraphQLResponseID *idwrap.IDWrap `json:"graphql_response_id,omitempty"` CompletedAt *int64 `json:"completed_at,omitempty"` } diff --git a/packages/server/pkg/model/mflow/node.go b/packages/server/pkg/model/mflow/node.go index b964bd8c1..8218e74ae 100644 --- a/packages/server/pkg/model/mflow/node.go +++ b/packages/server/pkg/model/mflow/node.go @@ -18,6 +18,7 @@ const ( NODE_KIND_AI NodeKind = 7 NODE_KIND_AI_PROVIDER NodeKind = 8 NODE_KIND_AI_MEMORY NodeKind = 9 + NODE_KIND_GRAPHQL NodeKind = 10 ) type NodeState = int8 diff --git a/packages/server/pkg/model/mflow/node_types.go b/packages/server/pkg/model/mflow/node_types.go index c5e3ac740..747306b76 100644 --- a/packages/server/pkg/model/mflow/node_types.go +++ b/packages/server/pkg/model/mflow/node_types.go @@ -252,3 +252,11 @@ type NodeMemory struct { MemoryType AiMemoryType WindowSize int32 } + +// --- GraphQL Node --- + +type NodeGraphQL struct { + FlowNodeID idwrap.IDWrap + GraphQLID *idwrap.IDWrap + DeltaGraphQLID *idwrap.IDWrap +} diff --git a/packages/server/pkg/model/mgraphql/mgraphql.go b/packages/server/pkg/model/mgraphql/mgraphql.go new file mode 100644 index 000000000..400cea565 --- /dev/null +++ b/packages/server/pkg/model/mgraphql/mgraphql.go @@ -0,0 +1,104 @@ +package mgraphql + +import ( + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" +) + +type GraphQL struct { + ID idwrap.IDWrap `json:"id"` + WorkspaceID idwrap.IDWrap `json:"workspace_id"` + FolderID *idwrap.IDWrap `json:"folder_id,omitempty"` + Name string `json:"name"` + Url string `json:"url"` + Query string `json:"query"` + Variables string `json:"variables"` + Description string `json:"description"` + ParentGraphQLID *idwrap.IDWrap `json:"parent_graphql_id,omitempty"` + IsDelta bool `json:"is_delta"` + IsSnapshot bool `json:"is_snapshot"` + DeltaName *string `json:"delta_name,omitempty"` + DeltaUrl *string `json:"delta_url,omitempty"` + DeltaQuery *string `json:"delta_query,omitempty"` + DeltaVariables *string `json:"delta_variables,omitempty"` + DeltaDescription *string `json:"delta_description,omitempty"` + LastRunAt *int64 `json:"last_run_at,omitempty"` + CreatedAt int64 `json:"created_at"` + UpdatedAt int64 `json:"updated_at"` +} + +type GraphQLHeader struct { + ID idwrap.IDWrap `json:"id"` + GraphQLID idwrap.IDWrap `json:"graphql_id"` + Key string `json:"key"` + Value string `json:"value"` + Enabled bool `json:"enabled"` + Description string `json:"description"` + DisplayOrder float32 `json:"order"` + ParentGraphQLHeaderID *idwrap.IDWrap `json:"parent_graphql_header_id,omitempty"` + IsDelta bool `json:"is_delta"` + DeltaKey *string `json:"delta_key,omitempty"` + DeltaValue *string `json:"delta_value,omitempty"` + DeltaEnabled *bool `json:"delta_enabled,omitempty"` + DeltaDescription *string `json:"delta_description,omitempty"` + DeltaDisplayOrder *float32 `json:"delta_order,omitempty"` + CreatedAt int64 `json:"created_at"` + UpdatedAt int64 `json:"updated_at"` +} + +type GraphQLAssert struct { + ID idwrap.IDWrap `json:"id"` + GraphQLID idwrap.IDWrap `json:"graphql_id"` + Value string `json:"value"` + Enabled bool `json:"enabled"` + Description string `json:"description"` + DisplayOrder float32 `json:"order"` + ParentGraphQLAssertID *idwrap.IDWrap `json:"parent_graphql_assert_id,omitempty"` + IsDelta bool `json:"is_delta"` + DeltaValue *string `json:"delta_value,omitempty"` + DeltaEnabled *bool `json:"delta_enabled,omitempty"` + DeltaDescription *string `json:"delta_description,omitempty"` + DeltaDisplayOrder *float32 `json:"delta_order,omitempty"` + CreatedAt int64 `json:"created_at"` + UpdatedAt int64 `json:"updated_at"` +} + +func (a GraphQLAssert) IsEnabled() bool { + return a.Enabled +} + +type GraphQLResponse struct { + ID idwrap.IDWrap `json:"id"` + GraphQLID idwrap.IDWrap `json:"graphql_id"` + Status int32 `json:"status"` + Body []byte `json:"body"` + Time int64 `json:"time"` + Duration int32 `json:"duration"` + Size int32 `json:"size"` + CreatedAt int64 `json:"created_at"` +} + +type GraphQLResponseHeader struct { + ID idwrap.IDWrap `json:"id"` + ResponseID idwrap.IDWrap `json:"response_id"` + HeaderKey string `json:"header_key"` + HeaderValue string `json:"header_value"` + CreatedAt int64 `json:"created_at"` +} + +type GraphQLResponseAssert struct { + ID idwrap.IDWrap `json:"id"` + ResponseID idwrap.IDWrap `json:"response_id"` + Value string `json:"value"` + Success bool `json:"success"` + CreatedAt int64 `json:"created_at"` +} + +type GraphQLVersion struct { + ID idwrap.IDWrap `json:"id"` + GraphQLID idwrap.IDWrap `json:"graphql_id"` + VersionName string `json:"version_name"` + VersionDescription string `json:"version_description"` + IsActive bool `json:"is_active"` + CreatedAt int64 `json:"created_at"` + CreatedBy *idwrap.IDWrap `json:"created_by,omitempty"` +} diff --git a/packages/server/pkg/mutation/delete_file.go b/packages/server/pkg/mutation/delete_file.go index e6a93351a..7d5c7819b 100644 --- a/packages/server/pkg/mutation/delete_file.go +++ b/packages/server/pkg/mutation/delete_file.go @@ -51,6 +51,14 @@ func (c *Context) DeleteFile(ctx context.Context, file FileDeleteItem) error { if err := c.q.DeleteCredential(ctx, *file.ContentID); err != nil { return err } + case mfile.ContentTypeGraphQL: + // GraphQL - cascade to headers + if err := c.DeleteGraphQL(ctx, GraphQLDeleteItem{ + ID: *file.ContentID, + WorkspaceID: file.WorkspaceID, + }); err != nil { + return err + } case mfile.ContentTypeFolder: // Content deletion handled by recursion above (folders don't have separate content tables) } @@ -87,6 +95,7 @@ func (c *Context) DeleteFileBatch(ctx context.Context, items []FileDeleteItem) e // Group by content type for efficient batch deletion of LEAF content var httpItems []HTTPDeleteItem var flowItems []FlowDeleteItem + var graphqlItems []GraphQLDeleteItem for _, item := range items { if item.ContentID != nil { @@ -114,6 +123,11 @@ func (c *Context) DeleteFileBatch(ctx context.Context, items []FileDeleteItem) e if err := c.q.DeleteCredential(ctx, *item.ContentID); err != nil { return err } + case mfile.ContentTypeGraphQL: + graphqlItems = append(graphqlItems, GraphQLDeleteItem{ + ID: *item.ContentID, + WorkspaceID: item.WorkspaceID, + }) } } } @@ -132,6 +146,13 @@ func (c *Context) DeleteFileBatch(ctx context.Context, items []FileDeleteItem) e } } + // Delete GraphQL content batch + if len(graphqlItems) > 0 { + if err := c.DeleteGraphQLBatch(ctx, graphqlItems); err != nil { + return err + } + } + // Track file deletes and delete file records for _, item := range items { c.track(Event{ diff --git a/packages/server/pkg/mutation/delete_graphql.go b/packages/server/pkg/mutation/delete_graphql.go new file mode 100644 index 000000000..5641cdae0 --- /dev/null +++ b/packages/server/pkg/mutation/delete_graphql.go @@ -0,0 +1,77 @@ +package mutation + +import ( + "context" + "database/sql" + "errors" + + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" +) + +// GraphQLDeleteItem represents a GraphQL entry to delete. +type GraphQLDeleteItem struct { + ID idwrap.IDWrap + WorkspaceID idwrap.IDWrap +} + +// DeleteGraphQL deletes a GraphQL entry and tracks cascade events. +func (c *Context) DeleteGraphQL(ctx context.Context, item GraphQLDeleteItem) error { + // Collect children before delete + c.collectGraphQLChildren(ctx, item.ID, item.WorkspaceID) + + // Track parent delete + c.track(Event{ + Entity: EntityGraphQL, + Op: OpDelete, + ID: item.ID, + WorkspaceID: item.WorkspaceID, + }) + + // Delete - DB CASCADE handles actual child deletion + err := c.q.DeleteGraphQL(ctx, item.ID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return err + } + return nil +} + +// DeleteGraphQLBatch deletes multiple GraphQL entries. +func (c *Context) DeleteGraphQLBatch(ctx context.Context, items []GraphQLDeleteItem) error { + for _, item := range items { + if err := c.DeleteGraphQL(ctx, item); err != nil { + return err + } + } + return nil +} + +// collectGraphQLChildren collects cascade events for a single GraphQL entry. +func (c *Context) collectGraphQLChildren(ctx context.Context, graphqlID, workspaceID idwrap.IDWrap) { + // Headers - cascaded by DB FK + if headers, err := c.q.GetGraphQLHeaders(ctx, graphqlID); err == nil { + for i := range headers { + c.track(Event{ + Entity: EntityGraphQLHeader, + Op: OpDelete, + ID: headers[i].ID, + ParentID: graphqlID, + WorkspaceID: workspaceID, + }) + } + } + + // Asserts - cascaded by DB FK + if asserts, err := c.q.GetGraphQLAssertsByGraphQLID(ctx, graphqlID.Bytes()); err == nil { + for i := range asserts { + id, _ := idwrap.NewFromBytes(asserts[i].ID) + c.track(Event{ + Entity: EntityGraphQLAssert, + Op: OpDelete, + ID: id, + ParentID: graphqlID, + WorkspaceID: workspaceID, + IsDelta: asserts[i].IsDelta, + }) + } + } +} diff --git a/packages/server/pkg/mutation/event.go b/packages/server/pkg/mutation/event.go index 8c791db60..a89acfe1c 100644 --- a/packages/server/pkg/mutation/event.go +++ b/packages/server/pkg/mutation/event.go @@ -38,6 +38,7 @@ const ( EntityFlowNodeAI EntityFlowNodeAiProvider EntityFlowNodeMemory + EntityFlowNodeGraphQL EntityFlowEdge EntityFlowVariable EntityFlowTag @@ -47,6 +48,14 @@ const ( // Credential entities EntityCredential + + // GraphQL entities + EntityGraphQL + EntityGraphQLHeader + EntityGraphQLAssert + EntityGraphQLResponse + EntityGraphQLResponseHeader + EntityGraphQLResponseAssert ) // Operation identifies the type of mutation. diff --git a/packages/server/pkg/mutation/insert_graphql.go b/packages/server/pkg/mutation/insert_graphql.go new file mode 100644 index 000000000..ae6cb3a1c --- /dev/null +++ b/packages/server/pkg/mutation/insert_graphql.go @@ -0,0 +1,80 @@ +package mutation + +import ( + "context" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" +) + +// GraphQLInsertItem represents a GraphQL entry to insert. +type GraphQLInsertItem struct { + GraphQL *mgraphql.GraphQL + WorkspaceID idwrap.IDWrap +} + +// InsertGraphQL inserts a GraphQL entry and tracks the event. +func (c *Context) InsertGraphQL(ctx context.Context, item GraphQLInsertItem) error { + writer := sgraphql.NewWriterFromQueries(c.q) + + if err := writer.Create(ctx, item.GraphQL); err != nil { + return err + } + + c.track(Event{ + Entity: EntityGraphQL, + Op: OpInsert, + ID: item.GraphQL.ID, + WorkspaceID: item.WorkspaceID, + Payload: item.GraphQL, + }) + + return nil +} + +// InsertGraphQLBatch inserts multiple GraphQL entries. +func (c *Context) InsertGraphQLBatch(ctx context.Context, items []GraphQLInsertItem) error { + for _, item := range items { + if err := c.InsertGraphQL(ctx, item); err != nil { + return err + } + } + return nil +} + +// GraphQLAssertInsertItem represents a GraphQL assert to insert. +type GraphQLAssertInsertItem struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + IsDelta bool + Params gen.CreateGraphQLAssertParams +} + +// InsertGraphQLAssert inserts a GraphQL assert and tracks the event. +func (c *Context) InsertGraphQLAssert(ctx context.Context, item GraphQLAssertInsertItem) error { + if err := c.q.CreateGraphQLAssert(ctx, item.Params); err != nil { + return err + } + c.track(Event{ + Entity: EntityGraphQLAssert, + Op: OpInsert, + ID: item.ID, + WorkspaceID: item.WorkspaceID, + ParentID: item.GraphQLID, + IsDelta: item.IsDelta, + }) + return nil +} + +// InsertGraphQLAssertBatch inserts multiple GraphQL asserts. +func (c *Context) InsertGraphQLAssertBatch(ctx context.Context, items []GraphQLAssertInsertItem) error { + for _, item := range items { + if err := c.InsertGraphQLAssert(ctx, item); err != nil { + return err + } + } + return nil +} diff --git a/packages/server/pkg/mutation/update_graphql.go b/packages/server/pkg/mutation/update_graphql.go new file mode 100644 index 000000000..73aaf90c4 --- /dev/null +++ b/packages/server/pkg/mutation/update_graphql.go @@ -0,0 +1,120 @@ +package mutation + +import ( + "context" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/patch" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/service/sgraphql" +) + +// GraphQLUpdateItem represents a GraphQL entry to update. +type GraphQLUpdateItem struct { + GraphQL *mgraphql.GraphQL + WorkspaceID idwrap.IDWrap +} + +// UpdateGraphQL updates a GraphQL entry and tracks the event. +func (c *Context) UpdateGraphQL(ctx context.Context, item GraphQLUpdateItem) error { + writer := sgraphql.NewWriterFromQueries(c.q) + + if err := writer.Update(ctx, item.GraphQL); err != nil { + return err + } + + c.track(Event{ + Entity: EntityGraphQL, + Op: OpUpdate, + ID: item.GraphQL.ID, + WorkspaceID: item.WorkspaceID, + Payload: item.GraphQL, + }) + + return nil +} + +// UpdateGraphQLBatch updates multiple GraphQL entries. +func (c *Context) UpdateGraphQLBatch(ctx context.Context, items []GraphQLUpdateItem) error { + for _, item := range items { + if err := c.UpdateGraphQL(ctx, item); err != nil { + return err + } + } + return nil +} + +// GraphQLAssertUpdateItem represents a GraphQL assert to update. +type GraphQLAssertUpdateItem struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + IsDelta bool + Params gen.UpdateGraphQLAssertParams + Patch patch.GraphQLAssertPatch +} + +// UpdateGraphQLAssert updates a GraphQL assert and tracks the event. +func (c *Context) UpdateGraphQLAssert(ctx context.Context, item GraphQLAssertUpdateItem) error { + if err := c.q.UpdateGraphQLAssert(ctx, item.Params); err != nil { + return err + } + c.track(Event{ + Entity: EntityGraphQLAssert, + Op: OpUpdate, + ID: item.ID, + WorkspaceID: item.WorkspaceID, + ParentID: item.GraphQLID, + IsDelta: item.IsDelta, + Patch: item.Patch, + }) + return nil +} + +// UpdateGraphQLAssertBatch updates multiple GraphQL asserts. +func (c *Context) UpdateGraphQLAssertBatch(ctx context.Context, items []GraphQLAssertUpdateItem) error { + for _, item := range items { + if err := c.UpdateGraphQLAssert(ctx, item); err != nil { + return err + } + } + return nil +} + +// GraphQLAssertDeltaUpdateItem represents a GraphQL assert delta to update. +type GraphQLAssertDeltaUpdateItem struct { + ID idwrap.IDWrap + GraphQLID idwrap.IDWrap + WorkspaceID idwrap.IDWrap + Params gen.UpdateGraphQLAssertDeltaParams + Patch any + Payload any +} + +// UpdateGraphQLAssertDelta updates a GraphQL assert delta and tracks the event. +func (c *Context) UpdateGraphQLAssertDelta(ctx context.Context, item GraphQLAssertDeltaUpdateItem) error { + if err := c.q.UpdateGraphQLAssertDelta(ctx, item.Params); err != nil { + return err + } + c.track(Event{ + Entity: EntityGraphQLAssert, + Op: OpUpdate, + ID: item.ID, + WorkspaceID: item.WorkspaceID, + IsDelta: true, + Patch: item.Patch, + Payload: item.Payload, + }) + return nil +} + +// UpdateGraphQLAssertDeltaBatch updates multiple GraphQL assert deltas. +func (c *Context) UpdateGraphQLAssertDeltaBatch(ctx context.Context, items []GraphQLAssertDeltaUpdateItem) error { + for _, item := range items { + if err := c.UpdateGraphQLAssertDelta(ctx, item); err != nil { + return err + } + } + return nil +} diff --git a/packages/server/pkg/patch/patch.go b/packages/server/pkg/patch/patch.go index 5b9c42f6f..ca01a17eb 100644 --- a/packages/server/pkg/patch/patch.go +++ b/packages/server/pkg/patch/patch.go @@ -110,6 +110,36 @@ func (p HTTPDeltaPatch) HasChanges() bool { return p.Name.IsSet() || p.Method.IsSet() || p.Url.IsSet() } +// GraphQLDeltaPatch represents sparse updates to GraphQL delta fields. +// +// Semantics: +// - Field.IsSet() == false = field not changed (omitted from update) +// - Field.IsUnset() == true = field explicitly UNSET/cleared +// - Field.HasValue() == true = field set to that value +type GraphQLDeltaPatch struct { + Name Optional[string] + URL Optional[string] + Query Optional[string] + Variables Optional[string] +} + +// HasChanges returns true if any field in the patch has been set +func (p GraphQLDeltaPatch) HasChanges() bool { + return p.Name.IsSet() || p.URL.IsSet() || p.Query.IsSet() || p.Variables.IsSet() +} + +// GraphQLAssertPatch represents sparse updates to GraphQL assert delta fields. +type GraphQLAssertPatch struct { + Value Optional[string] + Enabled Optional[bool] + Order Optional[float32] +} + +// HasChanges returns true if any field in the patch has been set +func (p GraphQLAssertPatch) HasChanges() bool { + return p.Value.IsSet() || p.Enabled.IsSet() || p.Order.IsSet() +} + // EdgePatch represents partial updates to an Edge type EdgePatch struct { SourceID Optional[string] // ID stored as base64 string for JSON compatibility diff --git a/packages/server/pkg/service/sflow/node_execution_mapper.go b/packages/server/pkg/service/sflow/node_execution_mapper.go index 81beb09ab..0449b86fe 100644 --- a/packages/server/pkg/service/sflow/node_execution_mapper.go +++ b/packages/server/pkg/service/sflow/node_execution_mapper.go @@ -34,6 +34,7 @@ func ConvertNodeExecutionToDB(ne mflow.NodeExecution) *gen.NodeExecution { OutputDataCompressType: ne.OutputDataCompressType, Error: errorSQL, HttpResponseID: ne.ResponseID, + GraphqlResponseID: ne.GraphQLResponseID, CompletedAt: completedAtSQL, } } @@ -62,6 +63,7 @@ func ConvertNodeExecutionToModel(ne gen.NodeExecution) *mflow.NodeExecution { OutputDataCompressType: ne.OutputDataCompressType, Error: errorPtr, ResponseID: responseIDPtr, + GraphQLResponseID: ne.GraphqlResponseID, CompletedAt: completedAtPtr, } } diff --git a/packages/server/pkg/service/sflow/node_execution_writer.go b/packages/server/pkg/service/sflow/node_execution_writer.go index 80d26759c..6b59dfdba 100644 --- a/packages/server/pkg/service/sflow/node_execution_writer.go +++ b/packages/server/pkg/service/sflow/node_execution_writer.go @@ -48,6 +48,7 @@ func (w *NodeExecutionWriter) CreateNodeExecution(ctx context.Context, ne mflow. OutputData: ne.OutputData, OutputDataCompressType: ne.OutputDataCompressType, HttpResponseID: ne.ResponseID, + GraphqlResponseID: ne.GraphQLResponseID, CompletedAt: completedAtSQL, }) @@ -78,6 +79,7 @@ func (w *NodeExecutionWriter) UpdateNodeExecution(ctx context.Context, ne mflow. OutputData: ne.OutputData, OutputDataCompressType: ne.OutputDataCompressType, HttpResponseID: ne.ResponseID, + GraphqlResponseID: ne.GraphQLResponseID, CompletedAt: completedAtSQL, }) @@ -112,6 +114,7 @@ func (w *NodeExecutionWriter) UpsertNodeExecution(ctx context.Context, ne mflow. OutputData: ne.OutputData, OutputDataCompressType: ne.OutputDataCompressType, HttpResponseID: ne.ResponseID, + GraphqlResponseID: ne.GraphQLResponseID, CompletedAt: completedAtSQL, }) diff --git a/packages/server/pkg/service/sflow/node_graphql.go b/packages/server/pkg/service/sflow/node_graphql.go new file mode 100644 index 000000000..843fdc6e5 --- /dev/null +++ b/packages/server/pkg/service/sflow/node_graphql.go @@ -0,0 +1,60 @@ +//nolint:revive // exported +package sflow + +import ( + "context" + "database/sql" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" +) + +type NodeGraphQLService struct { + reader *NodeGraphQLReader + queries *gen.Queries +} + +func NewNodeGraphQLService(queries *gen.Queries) NodeGraphQLService { + return NodeGraphQLService{ + reader: NewNodeGraphQLReaderFromQueries(queries), + queries: queries, + } +} + +func (ngs NodeGraphQLService) TX(tx *sql.Tx) NodeGraphQLService { + newQueries := ngs.queries.WithTx(tx) + return NodeGraphQLService{ + reader: NewNodeGraphQLReaderFromQueries(newQueries), + queries: newQueries, + } +} + +func NewNodeGraphQLServiceTX(ctx context.Context, tx *sql.Tx) (*NodeGraphQLService, error) { + queries, err := gen.Prepare(ctx, tx) + if err != nil { + return nil, err + } + return &NodeGraphQLService{ + reader: NewNodeGraphQLReaderFromQueries(queries), + queries: queries, + }, nil +} + +func (ngs NodeGraphQLService) GetNodeGraphQL(ctx context.Context, id idwrap.IDWrap) (*mflow.NodeGraphQL, error) { + return ngs.reader.GetNodeGraphQL(ctx, id) +} + +func (ngs NodeGraphQLService) CreateNodeGraphQL(ctx context.Context, ng mflow.NodeGraphQL) error { + return NewNodeGraphQLWriterFromQueries(ngs.queries).CreateNodeGraphQL(ctx, ng) +} + +func (ngs NodeGraphQLService) UpdateNodeGraphQL(ctx context.Context, ng mflow.NodeGraphQL) error { + return NewNodeGraphQLWriterFromQueries(ngs.queries).UpdateNodeGraphQL(ctx, ng) +} + +func (ngs NodeGraphQLService) DeleteNodeGraphQL(ctx context.Context, id idwrap.IDWrap) error { + return NewNodeGraphQLWriterFromQueries(ngs.queries).DeleteNodeGraphQL(ctx, id) +} + +func (ngs NodeGraphQLService) Reader() *NodeGraphQLReader { return ngs.reader } diff --git a/packages/server/pkg/service/sflow/node_graphql_mapper.go b/packages/server/pkg/service/sflow/node_graphql_mapper.go new file mode 100644 index 000000000..114174aee --- /dev/null +++ b/packages/server/pkg/service/sflow/node_graphql_mapper.go @@ -0,0 +1,41 @@ +package sflow + +import ( + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" +) + +func ConvertToDBNodeGraphQL(ng mflow.NodeGraphQL) (gen.FlowNodeGraphql, bool) { + if ng.GraphQLID == nil || isZeroID(*ng.GraphQLID) { + return gen.FlowNodeGraphql{}, false + } + + dbNode := gen.FlowNodeGraphql{ + FlowNodeID: ng.FlowNodeID, + GraphqlID: *ng.GraphQLID, + } + + if ng.DeltaGraphQLID != nil { + dbNode.DeltaGraphqlID = ng.DeltaGraphQLID.Bytes() + } + + return dbNode, true +} + +func ConvertToModelNodeGraphQL(ng gen.FlowNodeGraphql) *mflow.NodeGraphQL { + graphqlID := ng.GraphqlID + modelNode := &mflow.NodeGraphQL{ + FlowNodeID: ng.FlowNodeID, + GraphQLID: &graphqlID, + } + + if len(ng.DeltaGraphqlID) > 0 { + deltaID, err := idwrap.NewFromBytes(ng.DeltaGraphqlID) + if err == nil { + modelNode.DeltaGraphQLID = &deltaID + } + } + + return modelNode +} diff --git a/packages/server/pkg/service/sflow/node_graphql_reader.go b/packages/server/pkg/service/sflow/node_graphql_reader.go new file mode 100644 index 000000000..a299682bd --- /dev/null +++ b/packages/server/pkg/service/sflow/node_graphql_reader.go @@ -0,0 +1,34 @@ +package sflow + +import ( + "context" + "database/sql" + "errors" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" +) + +type NodeGraphQLReader struct { + queries *gen.Queries +} + +func NewNodeGraphQLReader(db *sql.DB) *NodeGraphQLReader { + return &NodeGraphQLReader{queries: gen.New(db)} +} + +func NewNodeGraphQLReaderFromQueries(queries *gen.Queries) *NodeGraphQLReader { + return &NodeGraphQLReader{queries: queries} +} + +func (r *NodeGraphQLReader) GetNodeGraphQL(ctx context.Context, id idwrap.IDWrap) (*mflow.NodeGraphQL, error) { + nodeGQL, err := r.queries.GetFlowNodeGraphQL(ctx, id) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, nil + } + return nil, err + } + return ConvertToModelNodeGraphQL(nodeGQL), nil +} diff --git a/packages/server/pkg/service/sflow/node_graphql_writer.go b/packages/server/pkg/service/sflow/node_graphql_writer.go new file mode 100644 index 000000000..859beff0a --- /dev/null +++ b/packages/server/pkg/service/sflow/node_graphql_writer.go @@ -0,0 +1,47 @@ +package sflow + +import ( + "context" + "database/sql" + "errors" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" +) + +type NodeGraphQLWriter struct { + queries *gen.Queries +} + +func NewNodeGraphQLWriter(tx gen.DBTX) *NodeGraphQLWriter { + return &NodeGraphQLWriter{queries: gen.New(tx)} +} + +func NewNodeGraphQLWriterFromQueries(queries *gen.Queries) *NodeGraphQLWriter { + return &NodeGraphQLWriter{queries: queries} +} + +func (w *NodeGraphQLWriter) CreateNodeGraphQL(ctx context.Context, ng mflow.NodeGraphQL) error { + dbModel, ok := ConvertToDBNodeGraphQL(ng) + if !ok { + return nil + } + return w.queries.CreateFlowNodeGraphQL(ctx, gen.CreateFlowNodeGraphQLParams(dbModel)) +} + +func (w *NodeGraphQLWriter) UpdateNodeGraphQL(ctx context.Context, ng mflow.NodeGraphQL) error { + dbModel, ok := ConvertToDBNodeGraphQL(ng) + if !ok { + // Treat removal of GraphQLID as request to delete any existing binding. + if err := w.queries.DeleteFlowNodeGraphQL(ctx, ng.FlowNodeID); err != nil && !errors.Is(err, sql.ErrNoRows) { + return err + } + return nil + } + return w.queries.UpdateFlowNodeGraphQL(ctx, gen.UpdateFlowNodeGraphQLParams(dbModel)) +} + +func (w *NodeGraphQLWriter) DeleteNodeGraphQL(ctx context.Context, id idwrap.IDWrap) error { + return w.queries.DeleteFlowNodeGraphQL(ctx, id) +} diff --git a/packages/server/pkg/service/sgraphql/assert.go b/packages/server/pkg/service/sgraphql/assert.go new file mode 100644 index 000000000..1f526415f --- /dev/null +++ b/packages/server/pkg/service/sgraphql/assert.go @@ -0,0 +1,295 @@ +package sgraphql + +import ( + "context" + "database/sql" + "errors" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/dbtime" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +var ErrNoGraphQLAssertFound = errors.New("no graphql assert found") + +type GraphQLAssertService struct { + queries *gen.Queries +} + +func NewGraphQLAssertService(queries *gen.Queries) GraphQLAssertService { + return GraphQLAssertService{queries: queries} +} + +func (s GraphQLAssertService) TX(tx *sql.Tx) GraphQLAssertService { + return GraphQLAssertService{queries: s.queries.WithTx(tx)} +} + +func (s GraphQLAssertService) GetByID(ctx context.Context, id idwrap.IDWrap) (*mgraphql.GraphQLAssert, error) { + assert, err := s.queries.GetGraphQLAssert(ctx, id.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, ErrNoGraphQLAssertFound + } + return nil, err + } + + result := convertGetGraphQLAssertRowToModel(assert) + return &result, nil +} + +func (s GraphQLAssertService) GetByGraphQLID(ctx context.Context, graphqlID idwrap.IDWrap) ([]mgraphql.GraphQLAssert, error) { + asserts, err := s.queries.GetGraphQLAssertsByGraphQLID(ctx, graphqlID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLAssert{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLAssert, len(asserts)) + for i, a := range asserts { + result[i] = convertGetGraphQLAssertsByGraphQLIDRowToModel(a) + } + return result, nil +} + +func (s GraphQLAssertService) GetByIDs(ctx context.Context, ids []idwrap.IDWrap) ([]mgraphql.GraphQLAssert, error) { + // Convert IDWraps to [][]byte + idBytes := make([][]byte, len(ids)) + for i, id := range ids { + idBytes[i] = id.Bytes() + } + + asserts, err := s.queries.GetGraphQLAssertsByIDs(ctx, idBytes) + if err != nil { + return nil, err + } + + result := make([]mgraphql.GraphQLAssert, len(asserts)) + for i, a := range asserts { + result[i] = convertGetGraphQLAssertsByIDsRowToModel(a) + } + return result, nil +} + +func (s GraphQLAssertService) Create(ctx context.Context, assert *mgraphql.GraphQLAssert) error { + now := dbtime.DBNow() + assert.CreatedAt = now.Unix() + assert.UpdatedAt = now.Unix() + + params := gen.CreateGraphQLAssertParams{ + ID: assert.ID.Bytes(), + GraphqlID: assert.GraphQLID.Bytes(), + Value: assert.Value, + Enabled: assert.Enabled, + Description: assert.Description, + DisplayOrder: float64(assert.DisplayOrder), + CreatedAt: assert.CreatedAt, + UpdatedAt: assert.UpdatedAt, + } + + // Handle delta fields + if assert.ParentGraphQLAssertID != nil { + params.ParentGraphqlAssertID = assert.ParentGraphQLAssertID.Bytes() + } + params.IsDelta = assert.IsDelta + params.DeltaValue = stringPtrToNullString(assert.DeltaValue) + params.DeltaEnabled = boolPtrToNullBool(assert.DeltaEnabled) + params.DeltaDescription = stringPtrToNullString(assert.DeltaDescription) + params.DeltaDisplayOrder = float32PtrToNullFloat64(assert.DeltaDisplayOrder) + + return s.queries.CreateGraphQLAssert(ctx, params) +} + +func (s GraphQLAssertService) Update(ctx context.Context, assert *mgraphql.GraphQLAssert) error { + return s.queries.UpdateGraphQLAssert(ctx, gen.UpdateGraphQLAssertParams{ + ID: assert.ID.Bytes(), + Value: assert.Value, + Enabled: assert.Enabled, + Description: assert.Description, + DisplayOrder: float64(assert.DisplayOrder), + UpdatedAt: dbtime.DBNow().Unix(), + }) +} + +func (s GraphQLAssertService) UpdateDelta(ctx context.Context, id idwrap.IDWrap, deltaValue *string, deltaEnabled *bool, deltaDescription *string, deltaDisplayOrder *float32) error { + return s.queries.UpdateGraphQLAssertDelta(ctx, gen.UpdateGraphQLAssertDeltaParams{ + ID: id.Bytes(), + DeltaValue: stringPtrToNullString(deltaValue), + DeltaEnabled: boolPtrToNullBool(deltaEnabled), + DeltaDescription: stringPtrToNullString(deltaDescription), + DeltaDisplayOrder: float32PtrToNullFloat64(deltaDisplayOrder), + UpdatedAt: dbtime.DBNow().Unix(), + }) +} + +func (s GraphQLAssertService) Delete(ctx context.Context, id idwrap.IDWrap) error { + return s.queries.DeleteGraphQLAssert(ctx, id.Bytes()) +} + +// Delta methods +func (s GraphQLAssertService) GetDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQLAssert, error) { + asserts, err := s.queries.GetGraphQLAssertDeltasByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLAssert{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLAssert, len(asserts)) + for i, a := range asserts { + result[i] = convertGetGraphQLAssertDeltasByWorkspaceIDRowToModel(a) + } + return result, nil +} + +func (s GraphQLAssertService) GetDeltasByParentID(ctx context.Context, parentID idwrap.IDWrap) ([]mgraphql.GraphQLAssert, error) { + asserts, err := s.queries.GetGraphQLAssertDeltasByParentID(ctx, parentID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLAssert{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLAssert, len(asserts)) + for i, a := range asserts { + result[i] = convertGetGraphQLAssertDeltasByParentIDRowToModel(a) + } + return result, nil +} + +// Row conversion helpers - convert sqlc row types to model +func convertGetGraphQLAssertRowToModel(row gen.GetGraphQLAssertRow) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(row.ID) + graphqlID, _ := idwrap.NewFromBytes(row.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: row.Value, + Enabled: row.Enabled, + Description: row.Description, + DisplayOrder: float32(row.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(row.ParentGraphqlAssertID), + IsDelta: row.IsDelta, + DeltaValue: interfaceToStringPtr(row.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(row.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(row.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(row.DeltaDisplayOrder), + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + } +} + +func convertGetGraphQLAssertsByGraphQLIDRowToModel(row gen.GetGraphQLAssertsByGraphQLIDRow) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(row.ID) + graphqlID, _ := idwrap.NewFromBytes(row.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: row.Value, + Enabled: row.Enabled, + Description: row.Description, + DisplayOrder: float32(row.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(row.ParentGraphqlAssertID), + IsDelta: row.IsDelta, + DeltaValue: interfaceToStringPtr(row.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(row.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(row.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(row.DeltaDisplayOrder), + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + } +} + +func convertGetGraphQLAssertsByIDsRowToModel(row gen.GetGraphQLAssertsByIDsRow) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(row.ID) + graphqlID, _ := idwrap.NewFromBytes(row.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: row.Value, + Enabled: row.Enabled, + Description: row.Description, + DisplayOrder: float32(row.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(row.ParentGraphqlAssertID), + IsDelta: row.IsDelta, + DeltaValue: interfaceToStringPtr(row.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(row.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(row.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(row.DeltaDisplayOrder), + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + } +} + +func convertGetGraphQLAssertDeltasByWorkspaceIDRowToModel(row gen.GetGraphQLAssertDeltasByWorkspaceIDRow) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(row.ID) + graphqlID, _ := idwrap.NewFromBytes(row.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: row.Value, + Enabled: row.Enabled, + Description: row.Description, + DisplayOrder: float32(row.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(row.ParentGraphqlAssertID), + IsDelta: row.IsDelta, + DeltaValue: interfaceToStringPtr(row.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(row.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(row.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(row.DeltaDisplayOrder), + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + } +} + +func convertGetGraphQLAssertDeltasByParentIDRowToModel(row gen.GetGraphQLAssertDeltasByParentIDRow) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(row.ID) + graphqlID, _ := idwrap.NewFromBytes(row.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: row.Value, + Enabled: row.Enabled, + Description: row.Description, + DisplayOrder: float32(row.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(row.ParentGraphqlAssertID), + IsDelta: row.IsDelta, + DeltaValue: interfaceToStringPtr(row.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(row.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(row.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(row.DeltaDisplayOrder), + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + } +} + +// Conversion helpers +func stringPtrToNullString(s *string) sql.NullString { + if s == nil { + return sql.NullString{Valid: false} + } + return sql.NullString{String: *s, Valid: true} +} + +func boolPtrToNullBool(b *bool) sql.NullBool { + if b == nil { + return sql.NullBool{Valid: false} + } + return sql.NullBool{Bool: *b, Valid: true} +} + +func float32PtrToNullFloat64(f *float32) sql.NullFloat64 { + if f == nil { + return sql.NullFloat64{Valid: false} + } + return sql.NullFloat64{Float64: float64(*f), Valid: true} +} diff --git a/packages/server/pkg/service/sgraphql/header.go b/packages/server/pkg/service/sgraphql/header.go new file mode 100644 index 000000000..04798fc78 --- /dev/null +++ b/packages/server/pkg/service/sgraphql/header.go @@ -0,0 +1,121 @@ +package sgraphql + +import ( + "context" + "database/sql" + "errors" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/dbtime" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +var ErrNoGraphQLHeaderFound = errors.New("no graphql header found") + +type GraphQLHeaderService struct { + queries *gen.Queries +} + +func NewGraphQLHeaderService(queries *gen.Queries) GraphQLHeaderService { + return GraphQLHeaderService{queries: queries} +} + +func (s GraphQLHeaderService) TX(tx *sql.Tx) GraphQLHeaderService { + return GraphQLHeaderService{queries: s.queries.WithTx(tx)} +} + +func (s GraphQLHeaderService) GetByGraphQLID(ctx context.Context, graphqlID idwrap.IDWrap) ([]mgraphql.GraphQLHeader, error) { + headers, err := s.queries.GetGraphQLHeaders(ctx, graphqlID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLHeader{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLHeader, len(headers)) + for i, h := range headers { + result[i] = ConvertToModelGraphQLHeader(h) + } + return result, nil +} + +func (s GraphQLHeaderService) GetByIDs(ctx context.Context, ids []idwrap.IDWrap) ([]mgraphql.GraphQLHeader, error) { + headers, err := s.queries.GetGraphQLHeadersByIDs(ctx, ids) + if err != nil { + return nil, err + } + + result := make([]mgraphql.GraphQLHeader, len(headers)) + for i, h := range headers { + result[i] = ConvertToModelGraphQLHeader(h) + } + return result, nil +} + +func (s GraphQLHeaderService) Create(ctx context.Context, header *mgraphql.GraphQLHeader) error { + now := dbtime.DBNow() + header.CreatedAt = now.Unix() + header.UpdatedAt = now.Unix() + + return s.queries.CreateGraphQLHeader(ctx, gen.CreateGraphQLHeaderParams{ + ID: header.ID, + GraphqlID: header.GraphQLID, + HeaderKey: header.Key, + HeaderValue: header.Value, + Description: header.Description, + Enabled: header.Enabled, + DisplayOrder: float64(header.DisplayOrder), + CreatedAt: header.CreatedAt, + UpdatedAt: header.UpdatedAt, + }) +} + +func (s GraphQLHeaderService) Update(ctx context.Context, header *mgraphql.GraphQLHeader) error { + return s.queries.UpdateGraphQLHeader(ctx, gen.UpdateGraphQLHeaderParams{ + ID: header.ID, + HeaderKey: header.Key, + HeaderValue: header.Value, + Description: header.Description, + Enabled: header.Enabled, + DisplayOrder: float64(header.DisplayOrder), + }) +} + +func (s GraphQLHeaderService) Delete(ctx context.Context, id idwrap.IDWrap) error { + return s.queries.DeleteGraphQLHeader(ctx, id) +} + +// Delta methods +func (s GraphQLHeaderService) GetDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQLHeader, error) { + headers, err := s.queries.GetGraphQLHeaderDeltasByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLHeader{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLHeader, len(headers)) + for i, h := range headers { + result[i] = ConvertToModelGraphQLHeader(h) + } + return result, nil +} + +func (s GraphQLHeaderService) GetDeltasByParentID(ctx context.Context, parentID idwrap.IDWrap) ([]mgraphql.GraphQLHeader, error) { + headers, err := s.queries.GetGraphQLHeaderDeltasByParentID(ctx, parentID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLHeader{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLHeader, len(headers)) + for i, h := range headers { + result[i] = ConvertToModelGraphQLHeader(h) + } + return result, nil +} diff --git a/packages/server/pkg/service/sgraphql/mapper.go b/packages/server/pkg/service/sgraphql/mapper.go new file mode 100644 index 000000000..7ab64b2cb --- /dev/null +++ b/packages/server/pkg/service/sgraphql/mapper.go @@ -0,0 +1,217 @@ +package sgraphql + +import ( + "time" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +func interfaceToInt64Ptr(v interface{}) *int64 { + if v == nil { + return nil + } + switch val := v.(type) { + case int64: + return &val + case int: + i := int64(val) + return &i + default: + return nil + } +} + +func interfaceToInt32(v interface{}) int32 { + switch val := v.(type) { + case int32: + return val + case int64: + return int32(val) //nolint:gosec // G115 + default: + return 0 + } +} + +func interfaceToStringPtr(v interface{}) *string { + if v == nil { + return nil + } + if str, ok := v.(string); ok { + return &str + } + return nil +} + +func interfaceToBoolPtr(v interface{}) *bool { + if v == nil { + return nil + } + if b, ok := v.(bool); ok { + return &b + } + return nil +} + +func interfaceToFloat32Ptr(v interface{}) *float32 { + if v == nil { + return nil + } + switch val := v.(type) { + case float32: + return &val + case float64: + f32 := float32(val) + return &f32 + default: + return nil + } +} + +func bytesToIDWrapPtr(b []byte) *idwrap.IDWrap { + if len(b) == 0 { + return nil + } + id, err := idwrap.NewFromBytes(b) + if err != nil { + return nil + } + return &id +} + +func idWrapPtrToBytes(id *idwrap.IDWrap) []byte { + if id == nil { + return nil + } + return id.Bytes() +} + +func stringPtrToInterface(s *string) interface{} { + if s == nil { + return nil + } + return *s +} + +func ConvertToDBGraphQL(gql mgraphql.GraphQL) gen.Graphql { + var lastRunAt interface{} + if gql.LastRunAt != nil { + lastRunAt = *gql.LastRunAt + } + + return gen.Graphql{ + ID: gql.ID, + WorkspaceID: gql.WorkspaceID, + FolderID: gql.FolderID, + Name: gql.Name, + Url: gql.Url, + Query: gql.Query, + Variables: gql.Variables, + Description: gql.Description, + ParentGraphqlID: idWrapPtrToBytes(gql.ParentGraphQLID), + IsDelta: gql.IsDelta, + IsSnapshot: gql.IsSnapshot, + DeltaName: stringPtrToInterface(gql.DeltaName), + DeltaUrl: stringPtrToInterface(gql.DeltaUrl), + DeltaQuery: stringPtrToInterface(gql.DeltaQuery), + DeltaVariables: stringPtrToInterface(gql.DeltaVariables), + DeltaDescription: stringPtrToInterface(gql.DeltaDescription), + LastRunAt: lastRunAt, + CreatedAt: gql.CreatedAt, + UpdatedAt: gql.UpdatedAt, + } +} + +func ConvertToModelGraphQL(gql gen.Graphql) *mgraphql.GraphQL { + return &mgraphql.GraphQL{ + ID: gql.ID, + WorkspaceID: gql.WorkspaceID, + FolderID: gql.FolderID, + Name: gql.Name, + Url: gql.Url, + Query: gql.Query, + Variables: gql.Variables, + Description: gql.Description, + ParentGraphQLID: bytesToIDWrapPtr(gql.ParentGraphqlID), + IsDelta: gql.IsDelta, + IsSnapshot: gql.IsSnapshot, + DeltaName: interfaceToStringPtr(gql.DeltaName), + DeltaUrl: interfaceToStringPtr(gql.DeltaUrl), + DeltaQuery: interfaceToStringPtr(gql.DeltaQuery), + DeltaVariables: interfaceToStringPtr(gql.DeltaVariables), + DeltaDescription: interfaceToStringPtr(gql.DeltaDescription), + LastRunAt: interfaceToInt64Ptr(gql.LastRunAt), + CreatedAt: gql.CreatedAt, + UpdatedAt: gql.UpdatedAt, + } +} + +func ConvertToDBGraphQLResponse(resp mgraphql.GraphQLResponse) gen.GraphqlResponse { + return gen.GraphqlResponse{ + ID: resp.ID, + GraphqlID: resp.GraphQLID, + Status: resp.Status, + Body: resp.Body, + Time: time.Unix(resp.Time, 0), + Duration: resp.Duration, + Size: resp.Size, + CreatedAt: resp.CreatedAt, + } +} + +func ConvertToModelGraphQLResponse(resp gen.GraphqlResponse) mgraphql.GraphQLResponse { + return mgraphql.GraphQLResponse{ + ID: resp.ID, + GraphQLID: resp.GraphqlID, + Status: interfaceToInt32(resp.Status), + Body: resp.Body, + Time: resp.Time.Unix(), + Duration: interfaceToInt32(resp.Duration), + Size: interfaceToInt32(resp.Size), + CreatedAt: resp.CreatedAt, + } +} + +func ConvertToModelGraphQLHeader(h gen.GraphqlHeader) mgraphql.GraphQLHeader { + return mgraphql.GraphQLHeader{ + ID: h.ID, + GraphQLID: h.GraphqlID, + Key: h.HeaderKey, + Value: h.HeaderValue, + Enabled: h.Enabled, + Description: h.Description, + DisplayOrder: float32(h.DisplayOrder), + ParentGraphQLHeaderID: bytesToIDWrapPtr(h.ParentGraphqlHeaderID), + IsDelta: h.IsDelta, + DeltaKey: interfaceToStringPtr(h.DeltaHeaderKey), + DeltaValue: interfaceToStringPtr(h.DeltaHeaderValue), + DeltaEnabled: interfaceToBoolPtr(h.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(h.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(h.DeltaDisplayOrder), + CreatedAt: h.CreatedAt, + UpdatedAt: h.UpdatedAt, + } +} + +func ConvertToModelGraphQLAssert(a gen.GraphqlAssert) mgraphql.GraphQLAssert { + id, _ := idwrap.NewFromBytes(a.ID) + graphqlID, _ := idwrap.NewFromBytes(a.GraphqlID) + + return mgraphql.GraphQLAssert{ + ID: id, + GraphQLID: graphqlID, + Value: a.Value, + Enabled: a.Enabled, + Description: a.Description, + DisplayOrder: float32(a.DisplayOrder), + ParentGraphQLAssertID: bytesToIDWrapPtr(a.ParentGraphqlAssertID), + IsDelta: a.IsDelta, + DeltaValue: interfaceToStringPtr(a.DeltaValue), + DeltaEnabled: interfaceToBoolPtr(a.DeltaEnabled), + DeltaDescription: interfaceToStringPtr(a.DeltaDescription), + DeltaDisplayOrder: interfaceToFloat32Ptr(a.DeltaDisplayOrder), + CreatedAt: a.CreatedAt, + UpdatedAt: a.UpdatedAt, + } +} diff --git a/packages/server/pkg/service/sgraphql/reader.go b/packages/server/pkg/service/sgraphql/reader.go new file mode 100644 index 000000000..6f9e16c1d --- /dev/null +++ b/packages/server/pkg/service/sgraphql/reader.go @@ -0,0 +1,140 @@ +package sgraphql + +import ( + "context" + "database/sql" + "errors" + "fmt" + "log/slog" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +type Reader struct { + queries *gen.Queries + logger *slog.Logger +} + +func NewReader(db *sql.DB, logger *slog.Logger) *Reader { + return &Reader{ + queries: gen.New(db), + logger: logger, + } +} + +func NewReaderFromQueries(queries *gen.Queries, logger *slog.Logger) *Reader { + return &Reader{ + queries: queries, + logger: logger, + } +} + +func (r *Reader) Get(ctx context.Context, id idwrap.IDWrap) (*mgraphql.GraphQL, error) { + gql, err := r.queries.GetGraphQL(ctx, id) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + if r.logger != nil { + r.logger.DebugContext(ctx, fmt.Sprintf("GraphQL ID: %s not found", id.String())) + } + return nil, ErrNoGraphQLFound + } + return nil, err + } + return ConvertToModelGraphQL(gql), nil +} + +func (r *Reader) GetByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQL, error) { + gqls, err := r.queries.GetGraphQLsByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQL{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQL, len(gqls)) + for i, gql := range gqls { + result[i] = *ConvertToModelGraphQL(gql) + } + return result, nil +} + +func (r *Reader) GetWorkspaceID(ctx context.Context, id idwrap.IDWrap) (idwrap.IDWrap, error) { + workspaceID, err := r.queries.GetGraphQLWorkspaceID(ctx, id) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return idwrap.IDWrap{}, ErrNoGraphQLFound + } + return idwrap.IDWrap{}, err + } + return workspaceID, nil +} + +func (r *Reader) GetDeltasByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQL, error) { + gqls, err := r.queries.GetGraphQLDeltasByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQL{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQL, len(gqls)) + for i, gql := range gqls { + result[i] = *ConvertToModelGraphQL(gql) + } + return result, nil +} + +func (r *Reader) GetDeltasByParentID(ctx context.Context, parentID idwrap.IDWrap) ([]mgraphql.GraphQL, error) { + gqls, err := r.queries.GetGraphQLDeltasByParentID(ctx, parentID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQL{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQL, len(gqls)) + for i, gql := range gqls { + result[i] = *ConvertToModelGraphQL(gql) + } + return result, nil +} + +func (r *Reader) GetGraphQLVersionsByGraphQLID(ctx context.Context, graphqlID idwrap.IDWrap) ([]mgraphql.GraphQLVersion, error) { + versions, err := r.queries.GetGraphQLVersionsByGraphQLID(ctx, graphqlID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLVersion{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLVersion, len(versions)) + for i, v := range versions { + var createdBy *idwrap.IDWrap + if len(v.CreatedBy) > 0 { + id, err := idwrap.NewFromBytes(v.CreatedBy) + if err == nil { + createdBy = &id + } + } + + id, _ := idwrap.NewFromBytes(v.ID) + gqlID, _ := idwrap.NewFromBytes(v.GraphqlID) + + result[i] = mgraphql.GraphQLVersion{ + ID: id, + GraphQLID: gqlID, + VersionName: v.VersionName, + VersionDescription: v.VersionDescription, + IsActive: v.IsActive, + CreatedAt: v.CreatedAt, + CreatedBy: createdBy, + } + } + return result, nil +} diff --git a/packages/server/pkg/service/sgraphql/response.go b/packages/server/pkg/service/sgraphql/response.go new file mode 100644 index 000000000..4373c2797 --- /dev/null +++ b/packages/server/pkg/service/sgraphql/response.go @@ -0,0 +1,184 @@ +package sgraphql + +import ( + "context" + "database/sql" + "errors" + "time" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +var ErrNoGraphQLResponseFound = errors.New("no graphql response found") + +type GraphQLResponseService struct { + queries *gen.Queries +} + +func NewGraphQLResponseService(queries *gen.Queries) GraphQLResponseService { + return GraphQLResponseService{queries: queries} +} + +func (s GraphQLResponseService) TX(tx *sql.Tx) GraphQLResponseService { + return GraphQLResponseService{queries: s.queries.WithTx(tx)} +} + +func (s GraphQLResponseService) Create(ctx context.Context, resp mgraphql.GraphQLResponse) error { + return s.queries.CreateGraphQLResponse(ctx, gen.CreateGraphQLResponseParams{ + ID: resp.ID, + GraphqlID: resp.GraphQLID, + Status: resp.Status, + Body: resp.Body, + Time: time.Unix(resp.Time, 0), + Duration: resp.Duration, + Size: resp.Size, + CreatedAt: resp.CreatedAt, + }) +} + +func (s GraphQLResponseService) GetByGraphQLID(ctx context.Context, graphqlID idwrap.IDWrap) ([]mgraphql.GraphQLResponse, error) { + responses, err := s.queries.GetGraphQLResponsesByGraphQLID(ctx, graphqlID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLResponse{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLResponse, len(responses)) + for i, resp := range responses { + result[i] = ConvertToModelGraphQLResponse(resp) + } + return result, nil +} + +func (s GraphQLResponseService) GetByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQLResponse, error) { + responses, err := s.queries.GetGraphQLResponsesByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLResponse{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLResponse, len(responses)) + for i, resp := range responses { + result[i] = ConvertToModelGraphQLResponse(resp) + } + return result, nil +} + +func (s GraphQLResponseService) CreateHeader(ctx context.Context, header mgraphql.GraphQLResponseHeader) error { + return s.queries.CreateGraphQLResponseHeader(ctx, gen.CreateGraphQLResponseHeaderParams{ + ID: header.ID, + ResponseID: header.ResponseID, + Key: header.HeaderKey, + Value: header.HeaderValue, + CreatedAt: header.CreatedAt, + }) +} + +func (s GraphQLResponseService) GetHeadersByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQLResponseHeader, error) { + headers, err := s.queries.GetGraphQLResponseHeadersByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLResponseHeader{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLResponseHeader, len(headers)) + for i, h := range headers { + result[i] = mgraphql.GraphQLResponseHeader{ + ID: h.ID, + ResponseID: h.ResponseID, + HeaderKey: h.Key, + HeaderValue: h.Value, + CreatedAt: h.CreatedAt, + } + } + return result, nil +} + +func (s GraphQLResponseService) GetHeadersByResponseID(ctx context.Context, responseID idwrap.IDWrap) ([]mgraphql.GraphQLResponseHeader, error) { + headers, err := s.queries.GetGraphQLResponseHeadersByResponseID(ctx, responseID) + if err != nil { + return nil, err + } + + result := make([]mgraphql.GraphQLResponseHeader, len(headers)) + for i, h := range headers { + result[i] = mgraphql.GraphQLResponseHeader{ + ID: h.ID, + ResponseID: h.ResponseID, + HeaderKey: h.Key, + HeaderValue: h.Value, + CreatedAt: h.CreatedAt, + } + } + return result, nil +} + + +func (s GraphQLResponseService) CreateAssert(ctx context.Context, assert mgraphql.GraphQLResponseAssert) error { + return s.queries.CreateGraphQLResponseAssert(ctx, gen.CreateGraphQLResponseAssertParams{ + ID: assert.ID.Bytes(), + ResponseID: assert.ResponseID.Bytes(), + Value: assert.Value, + Success: assert.Success, + CreatedAt: assert.CreatedAt, + }) +} + +func (s GraphQLResponseService) GetAssertsByResponseID(ctx context.Context, responseID idwrap.IDWrap) ([]mgraphql.GraphQLResponseAssert, error) { + asserts, err := s.queries.GetGraphQLResponseAssertsByResponseID(ctx, responseID.Bytes()) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLResponseAssert{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLResponseAssert, len(asserts)) + for i, a := range asserts { + id, _ := idwrap.NewFromBytes(a.ID) + respID, _ := idwrap.NewFromBytes(a.ResponseID) + + result[i] = mgraphql.GraphQLResponseAssert{ + ID: id, + ResponseID: respID, + Value: a.Value, + Success: a.Success, + CreatedAt: a.CreatedAt, + } + } + return result, nil +} + +func (s GraphQLResponseService) GetAssertsByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQLResponseAssert, error) { + asserts, err := s.queries.GetGraphQLResponseAssertsByWorkspaceID(ctx, workspaceID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return []mgraphql.GraphQLResponseAssert{}, nil + } + return nil, err + } + + result := make([]mgraphql.GraphQLResponseAssert, len(asserts)) + for i, a := range asserts { + id, _ := idwrap.NewFromBytes(a.ID) + respID, _ := idwrap.NewFromBytes(a.ResponseID) + + result[i] = mgraphql.GraphQLResponseAssert{ + ID: id, + ResponseID: respID, + Value: a.Value, + Success: a.Success, + CreatedAt: a.CreatedAt, + } + } + return result, nil +} + diff --git a/packages/server/pkg/service/sgraphql/sgraphql.go b/packages/server/pkg/service/sgraphql/sgraphql.go new file mode 100644 index 000000000..120f02f65 --- /dev/null +++ b/packages/server/pkg/service/sgraphql/sgraphql.go @@ -0,0 +1,62 @@ +package sgraphql + +import ( + "context" + "database/sql" + "log/slog" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +var ErrNoGraphQLFound = sql.ErrNoRows + +type GraphQLService struct { + reader *Reader + queries *gen.Queries + logger *slog.Logger +} + +func New(queries *gen.Queries, logger *slog.Logger) GraphQLService { + return GraphQLService{ + reader: NewReaderFromQueries(queries, logger), + queries: queries, + logger: logger, + } +} + +func (s GraphQLService) TX(tx *sql.Tx) GraphQLService { + newQueries := s.queries.WithTx(tx) + return GraphQLService{ + reader: NewReaderFromQueries(newQueries, s.logger), + queries: newQueries, + logger: s.logger, + } +} + +func (s GraphQLService) Create(ctx context.Context, gql *mgraphql.GraphQL) error { + return NewWriterFromQueries(s.queries).Create(ctx, gql) +} + +func (s GraphQLService) Get(ctx context.Context, id idwrap.IDWrap) (*mgraphql.GraphQL, error) { + return s.reader.Get(ctx, id) +} + +func (s GraphQLService) GetByWorkspaceID(ctx context.Context, workspaceID idwrap.IDWrap) ([]mgraphql.GraphQL, error) { + return s.reader.GetByWorkspaceID(ctx, workspaceID) +} + +func (s GraphQLService) GetWorkspaceID(ctx context.Context, id idwrap.IDWrap) (idwrap.IDWrap, error) { + return s.reader.GetWorkspaceID(ctx, id) +} + +func (s GraphQLService) Update(ctx context.Context, gql *mgraphql.GraphQL) error { + return NewWriterFromQueries(s.queries).Update(ctx, gql) +} + +func (s GraphQLService) Delete(ctx context.Context, id idwrap.IDWrap) error { + return NewWriterFromQueries(s.queries).Delete(ctx, id) +} + +func (s GraphQLService) Reader() *Reader { return s.reader } diff --git a/packages/server/pkg/service/sgraphql/writer.go b/packages/server/pkg/service/sgraphql/writer.go new file mode 100644 index 000000000..35ccc2552 --- /dev/null +++ b/packages/server/pkg/service/sgraphql/writer.go @@ -0,0 +1,96 @@ +package sgraphql + +import ( + "context" + + "github.com/the-dev-tools/dev-tools/packages/db/pkg/sqlc/gen" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/dbtime" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" +) + +type Writer struct { + queries *gen.Queries +} + +func NewWriterFromQueries(queries *gen.Queries) *Writer { + return &Writer{queries: queries} +} + +func (w *Writer) Create(ctx context.Context, gql *mgraphql.GraphQL) error { + now := dbtime.DBNow() + gql.CreatedAt = now.Unix() + gql.UpdatedAt = now.Unix() + + dbGQL := ConvertToDBGraphQL(*gql) + return w.queries.CreateGraphQL(ctx, gen.CreateGraphQLParams(dbGQL)) +} + +func (w *Writer) Update(ctx context.Context, gql *mgraphql.GraphQL) error { + gql.UpdatedAt = dbtime.DBNow().Unix() + + dbGQL := ConvertToDBGraphQL(*gql) + + if gql.IsDelta { + // Update delta fields for delta records + if err := w.queries.UpdateGraphQLDelta(ctx, gen.UpdateGraphQLDeltaParams{ + ID: dbGQL.ID, + DeltaName: dbGQL.DeltaName, + DeltaUrl: dbGQL.DeltaUrl, + DeltaQuery: dbGQL.DeltaQuery, + DeltaVariables: dbGQL.DeltaVariables, + DeltaDescription: dbGQL.DeltaDescription, + }); err != nil { + return err + } + // Fallthrough to update common fields (like LastRunAt) + } + + var lastRunAt interface{} + if gql.LastRunAt != nil { + lastRunAt = *gql.LastRunAt + } + + // Update base fields + return w.queries.UpdateGraphQL(ctx, gen.UpdateGraphQLParams{ + ID: gql.ID, + Name: gql.Name, + Url: gql.Url, + Query: gql.Query, + Variables: gql.Variables, + Description: gql.Description, + LastRunAt: lastRunAt, + }) +} + +func (w *Writer) Delete(ctx context.Context, id idwrap.IDWrap) error { + return w.queries.DeleteGraphQL(ctx, id) +} + +func (w *Writer) CreateGraphQLVersion(ctx context.Context, graphqlID, createdBy idwrap.IDWrap, versionName, versionDescription string) (*mgraphql.GraphQLVersion, error) { + id := idwrap.NewNow() + now := dbtime.DBNow().Unix() + + err := w.queries.CreateGraphQLVersion(ctx, gen.CreateGraphQLVersionParams{ + ID: id.Bytes(), + GraphqlID: graphqlID.Bytes(), + VersionName: versionName, + VersionDescription: versionDescription, + IsActive: true, + CreatedAt: now, + CreatedBy: createdBy.Bytes(), + }) + if err != nil { + return nil, err + } + + return &mgraphql.GraphQLVersion{ + ID: id, + GraphQLID: graphqlID, + VersionName: versionName, + VersionDescription: versionDescription, + IsActive: true, + CreatedAt: now, + CreatedBy: &createdBy, + }, nil +} diff --git a/packages/server/pkg/translate/yamlflowsimplev2/converter.go b/packages/server/pkg/translate/yamlflowsimplev2/converter.go index 290391f30..60cad4751 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/converter.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/converter.go @@ -55,9 +55,17 @@ func ConvertSimplifiedYAML(data []byte, opts ConvertOptionsV2) (*ioworkspace.Wor } } + // Prepare GraphQL request templates + graphqlTemplates := make(map[string]YamlGraphQLDefV2) + for _, gql := range yamlFormat.GraphQLRequests { + if gql.Name != "" { + graphqlTemplates[gql.Name] = gql + } + } + // Process flows and generate HTTP requests for _, flowEntry := range yamlFormat.Flows { - flowData, err := processFlow(flowEntry, yamlFormat.Run, requestTemplates, opts) + flowData, err := processFlow(flowEntry, yamlFormat.Run, requestTemplates, graphqlTemplates, opts) if err != nil { return nil, fmt.Errorf("failed to process flow '%s': %w", flowEntry.Name, err) } diff --git a/packages/server/pkg/translate/yamlflowsimplev2/converter_flow.go b/packages/server/pkg/translate/yamlflowsimplev2/converter_flow.go index 405692dcd..a50d19e3a 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/converter_flow.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/converter_flow.go @@ -15,7 +15,7 @@ import ( ) // processFlow processes a single flow and returns the generated data -func processFlow(flowEntry YamlFlowFlowV2, runEntries []YamlRunEntryV2, templates map[string]YamlRequestDefV2, opts ConvertOptionsV2) (*ioworkspace.WorkspaceBundle, error) { +func processFlow(flowEntry YamlFlowFlowV2, runEntries []YamlRunEntryV2, templates map[string]YamlRequestDefV2, graphqlTemplates map[string]YamlGraphQLDefV2, opts ConvertOptionsV2) (*ioworkspace.WorkspaceBundle, error) { result := &ioworkspace.WorkspaceBundle{} flowID := idwrap.NewNow() @@ -68,7 +68,7 @@ func processFlow(flowEntry YamlFlowFlowV2, runEntries []YamlRunEntryV2, template startNodeID := idwrap.NewNow() // Process steps - processRes, err := processSteps(flowEntry, templates, varMap, flowID, startNodeID, opts, result) + processRes, err := processSteps(flowEntry, templates, graphqlTemplates, varMap, flowID, startNodeID, opts, result) if err != nil { return nil, fmt.Errorf("failed to process steps: %w", err) } @@ -270,6 +270,11 @@ func mergeFlowData(result *ioworkspace.WorkspaceBundle, flowData *ioworkspace.Wo result.FlowAINodes = append(result.FlowAINodes, flowData.FlowAINodes...) result.FlowAIProviderNodes = append(result.FlowAIProviderNodes, flowData.FlowAIProviderNodes...) result.FlowAIMemoryNodes = append(result.FlowAIMemoryNodes, flowData.FlowAIMemoryNodes...) + + result.GraphQLRequests = append(result.GraphQLRequests, flowData.GraphQLRequests...) + result.GraphQLHeaders = append(result.GraphQLHeaders, flowData.GraphQLHeaders...) + result.GraphQLAsserts = append(result.GraphQLAsserts, flowData.GraphQLAsserts...) + result.FlowGraphQLNodes = append(result.FlowGraphQLNodes, flowData.FlowGraphQLNodes...) } func mergeAssociatedData(result *ioworkspace.WorkspaceBundle, assoc *HTTPAssociatedData) { diff --git a/packages/server/pkg/translate/yamlflowsimplev2/converter_node.go b/packages/server/pkg/translate/yamlflowsimplev2/converter_node.go index 31057b089..2089851d9 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/converter_node.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/converter_node.go @@ -11,6 +11,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/ioworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mcondition" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" "github.com/the-dev-tools/dev-tools/packages/server/pkg/varsystem" ) @@ -34,6 +35,8 @@ func getStepCommon(sw YamlStepWrapper) *YamlStepCommon { return &sw.AIProvider.YamlStepCommon case sw.AIMemory != nil: return &sw.AIMemory.YamlStepCommon + case sw.GraphQL != nil: + return &sw.GraphQL.YamlStepCommon case sw.ManualStart != nil: return sw.ManualStart default: @@ -53,7 +56,7 @@ func createStartNodeWithID(nodeID, flowID idwrap.IDWrap, result *ioworkspace.Wor } // processSteps processes all steps in a flow -func processSteps(flowEntry YamlFlowFlowV2, templates map[string]YamlRequestDefV2, varMap varsystem.VarMap, flowID, startNodeID idwrap.IDWrap, opts ConvertOptionsV2, result *ioworkspace.WorkspaceBundle) (*StepProcessingResult, error) { +func processSteps(flowEntry YamlFlowFlowV2, templates map[string]YamlRequestDefV2, graphqlTemplates map[string]YamlGraphQLDefV2, varMap varsystem.VarMap, flowID, startNodeID idwrap.IDWrap, opts ConvertOptionsV2, result *ioworkspace.WorkspaceBundle) (*StepProcessingResult, error) { nodeInfoMap := make(map[string]*nodeInfo) nodeList := make([]*nodeInfo, 0) startNodeFound := false @@ -69,6 +72,9 @@ func processSteps(flowEntry YamlFlowFlowV2, templates map[string]YamlRequestDefV case stepWrapper.Request != nil: nodeName = stepWrapper.Request.Name dependsOn = stepWrapper.Request.DependsOn + case stepWrapper.GraphQL != nil: + nodeName = stepWrapper.GraphQL.Name + dependsOn = stepWrapper.GraphQL.DependsOn case stepWrapper.If != nil: nodeName = stepWrapper.If.Name dependsOn = stepWrapper.If.DependsOn @@ -124,6 +130,10 @@ func processSteps(flowEntry YamlFlowFlowV2, templates map[string]YamlRequestDefV file := createFileForHTTP(*httpReq, opts) result.Files = append(result.Files, file) } + case stepWrapper.GraphQL != nil: + if err := processGraphQLStructStep(stepWrapper.GraphQL, nodeID, flowID, graphqlTemplates, opts, result); err != nil { + return nil, err + } case stepWrapper.If != nil: if stepWrapper.If.Condition == "" { return nil, NewYamlFlowErrorV2("missing required condition", "if", i) @@ -483,3 +493,113 @@ func processAIMemoryStructStep(step *YamlStepAIMemory, nodeID, flowID idwrap.IDW result.FlowAIMemoryNodes = append(result.FlowAIMemoryNodes, memoryNode) return nil } + +func processGraphQLStructStep(step *YamlStepGraphQL, nodeID, flowID idwrap.IDWrap, templates map[string]YamlGraphQLDefV2, opts ConvertOptionsV2, result *ioworkspace.WorkspaceBundle) error { + url := step.URL + query := step.Query + variables := step.Variables + var headers HeaderMapOrSlice + var assertions AssertionsOrSlice + + if step.UseRequest != "" { + if tmpl, ok := templates[step.UseRequest]; ok { + if tmpl.URL != "" { + url = tmpl.URL + } + if tmpl.Query != "" { + query = tmpl.Query + } + if tmpl.Variables != "" { + variables = tmpl.Variables + } + headers = tmpl.Headers + assertions = tmpl.Assertions + } else { + return NewYamlFlowErrorV2(fmt.Sprintf("graphql step '%s' references unknown template '%s'", step.Name, step.UseRequest), "use_request", step.UseRequest) + } + } + + // Step-level values override template + if step.URL != "" { + url = step.URL + } + if step.Query != "" { + query = step.Query + } + if step.Variables != "" { + variables = step.Variables + } + if len(step.Headers) > 0 { + headers = append(headers, step.Headers...) + } + if len(step.Assertions) > 0 { + assertions = append(assertions, step.Assertions...) + } + + if url == "" { + return NewYamlFlowErrorV2(fmt.Sprintf("graphql step '%s' missing required url", step.Name), "url", nil) + } + + gqlID := idwrap.NewNow() + now := time.Now().UnixMilli() + + gqlReq := mgraphql.GraphQL{ + ID: gqlID, + WorkspaceID: opts.WorkspaceID, + FolderID: opts.FolderID, + Name: step.Name, + Url: url, + Query: query, + Variables: variables, + CreatedAt: now, + UpdatedAt: now, + } + result.GraphQLRequests = append(result.GraphQLRequests, gqlReq) + + // Create headers + for i, h := range headers { + header := mgraphql.GraphQLHeader{ + ID: idwrap.NewNow(), + GraphQLID: gqlID, + Key: h.Name, + Value: h.Value, + Enabled: h.Enabled, + DisplayOrder: float32(i), + CreatedAt: now, + UpdatedAt: now, + } + result.GraphQLHeaders = append(result.GraphQLHeaders, header) + } + + // Create assertions + for i, a := range assertions { + assert := mgraphql.GraphQLAssert{ + ID: idwrap.NewNow(), + GraphQLID: gqlID, + Value: a.Expression, + Enabled: a.Enabled, + DisplayOrder: float32(i), + CreatedAt: now, + UpdatedAt: now, + } + result.GraphQLAsserts = append(result.GraphQLAsserts, assert) + } + + // Create flow node + flowNode := mflow.Node{ + ID: nodeID, + FlowID: flowID, + Name: step.Name, + NodeKind: mflow.NODE_KIND_GRAPHQL, + } + result.FlowNodes = append(result.FlowNodes, flowNode) + + // Create GraphQL node linking flow node to GraphQL entity + graphqlNode := mflow.NodeGraphQL{ + FlowNodeID: nodeID, + GraphQLID: &gqlID, + } + result.FlowGraphQLNodes = append(result.FlowGraphQLNodes, graphqlNode) + + return nil +} diff --git a/packages/server/pkg/translate/yamlflowsimplev2/exporter.go b/packages/server/pkg/translate/yamlflowsimplev2/exporter.go index 06f7c01ad..6ea9184ed 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/exporter.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/exporter.go @@ -11,6 +11,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/ioworkspace" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mcredential" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mhttp" "gopkg.in/yaml.v3" @@ -117,6 +118,26 @@ func MarshalSimplifiedYAML(data *ioworkspace.WorkspaceBundle) ([]byte, error) { aiMemoryNodeMap[n.FlowNodeID] = n } + graphqlNodeMap := make(map[idwrap.IDWrap]mflow.NodeGraphQL) + for _, n := range data.FlowGraphQLNodes { + graphqlNodeMap[n.FlowNodeID] = n + } + + graphqlMap := make(map[idwrap.IDWrap]mgraphql.GraphQL) + for _, g := range data.GraphQLRequests { + graphqlMap[g.ID] = g + } + + graphqlHeadersMap := make(map[idwrap.IDWrap][]mgraphql.GraphQLHeader) + for _, h := range data.GraphQLHeaders { + graphqlHeadersMap[h.GraphQLID] = append(graphqlHeadersMap[h.GraphQLID], h) + } + + graphqlAssertsMap := make(map[idwrap.IDWrap][]mgraphql.GraphQLAssert) + for _, a := range data.GraphQLAsserts { + graphqlAssertsMap[a.GraphQLID] = append(graphqlAssertsMap[a.GraphQLID], a) + } + // Credential Map (ID -> Credential) credentialMap := make(map[idwrap.IDWrap]mcredential.Credential) for _, c := range data.Credentials { @@ -213,6 +234,74 @@ func MarshalSimplifiedYAML(data *ioworkspace.WorkspaceBundle) ([]byte, error) { yamlFormat.Requests = requests } + // 2b. Build top-level graphql_requests section + graphqlIDToRequestName := make(map[idwrap.IDWrap]string) + graphqlNameUsed := make(map[string]bool) + + // First pass: collect all GraphQL requests used in flows + for _, flow := range data.Flows { + for _, n := range data.FlowNodes { + if n.FlowID != flow.ID || n.NodeKind != mflow.NODE_KIND_GRAPHQL { + continue + } + gqlNode, ok := graphqlNodeMap[n.ID] + if !ok || gqlNode.GraphQLID == nil { + continue + } + gqlReq, ok := graphqlMap[*gqlNode.GraphQLID] + if !ok { + continue + } + + if _, exists := graphqlIDToRequestName[gqlReq.ID]; exists { + continue + } + + gqlName := gqlReq.Name + if gqlName == "" { + gqlName = "GraphQL Request" + } + + baseName := gqlName + counter := 1 + for graphqlNameUsed[gqlName] { + gqlName = fmt.Sprintf("%s_%d", baseName, counter) + counter++ + } + graphqlNameUsed[gqlName] = true + graphqlIDToRequestName[gqlReq.ID] = gqlName + } + } + + // Second pass: build the graphql_requests section + var graphqlRequests []YamlGraphQLDefV2 + var graphqlIDs []idwrap.IDWrap + for gqlID := range graphqlIDToRequestName { + graphqlIDs = append(graphqlIDs, gqlID) + } + sort.Slice(graphqlIDs, func(i, j int) bool { + return graphqlIDToRequestName[graphqlIDs[i]] < graphqlIDToRequestName[graphqlIDs[j]] + }) + + for _, gqlID := range graphqlIDs { + gqlName := graphqlIDToRequestName[gqlID] + gqlReq := graphqlMap[gqlID] + + gqlDef := YamlGraphQLDefV2{ + Name: gqlName, + URL: gqlReq.Url, + Query: gqlReq.Query, + Variables: gqlReq.Variables, + Headers: buildGraphQLHeaderMapOrSlice(graphqlHeadersMap[gqlID]), + Assertions: buildGraphQLAssertions(graphqlAssertsMap[gqlID]), + } + graphqlRequests = append(graphqlRequests, gqlDef) + } + + if len(graphqlRequests) > 0 { + yamlFormat.GraphQLRequests = graphqlRequests + } + // 3. Process each Flow flowNameUsed := make(map[string]bool) for _, flow := range data.Flows { @@ -448,6 +537,31 @@ func MarshalSimplifiedYAML(data *ioworkspace.WorkspaceBundle) ([]byte, error) { } stepWrapper.AIMemory = memoryStep + case mflow.NODE_KIND_GRAPHQL: + gqlNode, ok := graphqlNodeMap[node.ID] + if !ok || gqlNode.GraphQLID == nil { + continue + } + gqlReq, ok := graphqlMap[*gqlNode.GraphQLID] + if !ok { + continue + } + + gqlStep := &YamlStepGraphQL{ + YamlStepCommon: common, + } + + if gqlName, exists := graphqlIDToRequestName[gqlReq.ID]; exists { + gqlStep.UseRequest = gqlName + } else { + gqlStep.URL = gqlReq.Url + gqlStep.Query = gqlReq.Query + gqlStep.Variables = gqlReq.Variables + gqlStep.Headers = buildGraphQLHeaderMapOrSlice(graphqlHeadersMap[gqlReq.ID]) + gqlStep.Assertions = buildGraphQLAssertions(graphqlAssertsMap[gqlReq.ID]) + } + stepWrapper.GraphQL = gqlStep + case mflow.NODE_KIND_MANUAL_START: if node.ID == startNodeID { stepWrapper.ManualStart = &common @@ -459,7 +573,7 @@ func MarshalSimplifiedYAML(data *ioworkspace.WorkspaceBundle) ([]byte, error) { // Add to flow // Because stepWrapper has pointer fields, "empty" fields are nil // Checking if any field is set (simplified check, assume one set if we got here) - isValid := stepWrapper.Request != nil || stepWrapper.If != nil || stepWrapper.For != nil || + isValid := stepWrapper.Request != nil || stepWrapper.GraphQL != nil || stepWrapper.If != nil || stepWrapper.For != nil || stepWrapper.ForEach != nil || stepWrapper.JS != nil || stepWrapper.AI != nil || stepWrapper.AIProvider != nil || stepWrapper.AIMemory != nil || stepWrapper.ManualStart != nil if isValid { @@ -528,6 +642,33 @@ func MarshalSimplifiedYAML(data *ioworkspace.WorkspaceBundle) ([]byte, error) { return yaml.Marshal(yamlFormat) } +func buildGraphQLHeaderMapOrSlice(headers []mgraphql.GraphQLHeader) HeaderMapOrSlice { + if len(headers) == 0 { + return nil + } + var result []YamlNameValuePairV2 + for _, h := range headers { + result = append(result, YamlNameValuePairV2{ + Name: h.Key, + Value: h.Value, + Enabled: h.Enabled, + Description: h.Description, + }) + } + return HeaderMapOrSlice(result) +} + +func buildGraphQLAssertions(asserts []mgraphql.GraphQLAssert) AssertionsOrSlice { + if len(asserts) == 0 { + return nil + } + var result []YamlAssertionV2 + for _, a := range asserts { + result = append(result, YamlAssertionV2{Expression: a.Value, Enabled: a.Enabled}) + } + return AssertionsOrSlice(result) +} + type deltaLookupContext struct { httpMap map[idwrap.IDWrap]mhttp.HTTP headersMap map[idwrap.IDWrap][]mhttp.HTTPHeader diff --git a/packages/server/pkg/translate/yamlflowsimplev2/types.go b/packages/server/pkg/translate/yamlflowsimplev2/types.go index 1caf5ab20..8aa6f2322 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/types.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/types.go @@ -9,6 +9,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/pkg/compress" "github.com/the-dev-tools/dev-tools/packages/server/pkg/idwrap" "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mflow" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/model/mgraphql" ) // YamlFlowFormatV2 represents the modern YAML structure for simplified workflows @@ -20,6 +21,7 @@ type YamlFlowFormatV2 struct { Run []YamlRunEntryV2 `yaml:"run,omitempty"` RequestTemplates map[string]YamlRequestDefV2 `yaml:"request_templates,omitempty"` Requests []YamlRequestDefV2 `yaml:"requests,omitempty"` + GraphQLRequests []YamlGraphQLDefV2 `yaml:"graphql_requests,omitempty"` Flows []YamlFlowFlowV2 `yaml:"flows"` Environments []YamlEnvironmentV2 `yaml:"environments,omitempty"` } @@ -51,6 +53,16 @@ type YamlRequestDefV2 struct { Description string `yaml:"description,omitempty"` } +// YamlGraphQLDefV2 represents a GraphQL request definition (template or standalone) +type YamlGraphQLDefV2 struct { + Name string `yaml:"name,omitempty"` + URL string `yaml:"url,omitempty"` + Query string `yaml:"query"` + Variables string `yaml:"variables,omitempty"` + Headers HeaderMapOrSlice `yaml:"headers,omitempty"` + Assertions AssertionsOrSlice `yaml:"assertions,omitempty"` +} + // YamlFlowFlowV2 represents a flow in the modern YAML format type YamlFlowFlowV2 struct { Name string `yaml:"name"` @@ -64,6 +76,7 @@ type YamlFlowFlowV2 struct { // A step is a map with a single key that identifies the type type YamlStepWrapper struct { Request *YamlStepRequest `yaml:"request,omitempty"` + GraphQL *YamlStepGraphQL `yaml:"graphql,omitempty"` If *YamlStepIf `yaml:"if,omitempty"` For *YamlStepFor `yaml:"for,omitempty"` ForEach *YamlStepForEach `yaml:"for_each,omitempty"` @@ -93,6 +106,16 @@ type YamlStepRequest struct { Assertions AssertionsOrSlice `yaml:"assertions,omitempty"` } +type YamlStepGraphQL struct { + YamlStepCommon `yaml:",inline"` + UseRequest string `yaml:"use_request,omitempty"` + URL string `yaml:"url,omitempty"` + Query string `yaml:"query,omitempty"` + Variables string `yaml:"variables,omitempty"` + Headers HeaderMapOrSlice `yaml:"headers,omitempty"` + Assertions AssertionsOrSlice `yaml:"assertions,omitempty"` +} + type YamlStepIf struct { YamlStepCommon `yaml:",inline"` Condition string `yaml:"condition"` @@ -382,6 +405,10 @@ type YamlFlowDataV2 struct { // HTTP request data HTTPRequests []YamlHTTPRequestV2 + // GraphQL request data + GraphQLRequests []mgraphql.GraphQL + GraphQLHeaders []mgraphql.GraphQLHeader + // Flow node implementations RequestNodes []mflow.NodeRequest ConditionNodes []mflow.NodeIf @@ -391,6 +418,7 @@ type YamlFlowDataV2 struct { AINodes []mflow.NodeAI AIProviderNodes []mflow.NodeAiProvider AIMemoryNodes []mflow.NodeMemory + GraphQLNodes []mflow.NodeGraphQL } // YamlVariableV2 represents a variable during parsing diff --git a/packages/server/pkg/translate/yamlflowsimplev2/utils.go b/packages/server/pkg/translate/yamlflowsimplev2/utils.go index f4f9d9880..f9dcaec30 100644 --- a/packages/server/pkg/translate/yamlflowsimplev2/utils.go +++ b/packages/server/pkg/translate/yamlflowsimplev2/utils.go @@ -246,6 +246,17 @@ func ValidateYAMLStructure(yamlFormat *YamlFlowFormatV2) error { } } + // Check for duplicate GraphQL request names + graphqlNames := make(map[string]bool) + for _, gql := range yamlFormat.GraphQLRequests { + if gql.Name != "" { + if graphqlNames[gql.Name] { + return NewYamlFlowErrorV2(fmt.Sprintf("duplicate graphql request name: %s", gql.Name), "graphql_requests", gql.Name) + } + graphqlNames[gql.Name] = true + } + } + // Check for flow dependencies that reference non-existent flows for _, runEntry := range yamlFormat.Run { flowName := runEntry.Flow diff --git a/packages/server/test/e2e_har_to_cli_test.go b/packages/server/test/e2e_har_to_cli_test.go index bc80a209e..c1120ae74 100644 --- a/packages/server/test/e2e_har_to_cli_test.go +++ b/packages/server/test/e2e_har_to_cli_test.go @@ -27,6 +27,7 @@ import ( "github.com/the-dev-tools/dev-tools/packages/server/internal/api/rimportv2" "github.com/the-dev-tools/dev-tools/packages/server/pkg/eventstream/memory" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/flowbuilder" + "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/ngraphql" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/node/nrequest" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner" "github.com/the-dev-tools/dev-tools/packages/server/pkg/flow/runner/flowlocalrunner" @@ -271,10 +272,14 @@ func TestE2E_HAR_To_CLI_Chain(t *testing.T) { nil, // NodeAIService nil, // NodeAiProviderService nil, // NodeMemoryService + nil, // NodeGraphQLService + nil, // GraphQLService + nil, // GraphQLHeaderService cli.Workspace, cli.Variable, cli.FlowVariable, res, + nil, // GraphQLResolver cli.Logger, nil, // LLMProviderFactory ) @@ -475,6 +480,16 @@ func executeFlow(ctx context.Context, flowPtr *mflow.Flow, c *cliServices, build }() defer close(requestRespChan) + gqlRespChan := make(chan ngraphql.NodeGraphQLSideResp, requestBufferSize) + go func() { + for resp := range gqlRespChan { + if resp.Done != nil { + close(resp.Done) + } + } + }() + defer close(gqlRespChan) + // Build flow node map flowNodeMap, startNodeID, err := builder.BuildNodes( ctx, @@ -483,6 +498,7 @@ func executeFlow(ctx context.Context, flowPtr *mflow.Flow, c *cliServices, build nodeTimeout, httpClient, requestRespChan, + gqlRespChan, nil, // No JS client needed for this test ) if err != nil { diff --git a/packages/spec/api/file-system.tsp b/packages/spec/api/file-system.tsp index b8d639bef..88fcdb0b7 100644 --- a/packages/spec/api/file-system.tsp +++ b/packages/spec/api/file-system.tsp @@ -8,6 +8,7 @@ enum FileKind { HttpDelta, Flow, Credential, + GraphQL, } @TanStackDB.collection diff --git a/packages/spec/api/flow.tsp b/packages/spec/api/flow.tsp index e905fda41..27c23414b 100644 --- a/packages/spec/api/flow.tsp +++ b/packages/spec/api/flow.tsp @@ -95,6 +95,7 @@ enum NodeKind { Ai, AiProvider, AiMemory, + GraphQL, } enum AiMemoryType { @@ -172,6 +173,22 @@ model NodeHttp { @foreignKey deltaHttpId?: Id; } +@AITools.mutationTool(#{ + operation: AITools.CrudOperation.Insert, + title: "Create GraphQL Node", + name: "CreateGraphQLNode", + parent: "Node", + exclude: #["kind", "graphqlId", "deltaGraphqlId"], + include: #[#{ fromModel: "GraphQL", fields: #["url", "query", "variables"] }], + description: "Create a new GraphQL request node that executes a GraphQL query or mutation against a GraphQL API endpoint.", +}) +@TanStackDB.collection +model NodeGraphQL { + @primaryKey nodeId: Id; + @foreignKey graphqlId: Id; + @foreignKey deltaGraphqlId?: Id; +} + enum ErrorHandling { Ignore, Break, @@ -328,5 +345,6 @@ model NodeExecution { input?: Protobuf.WellKnown.Json; output?: Protobuf.WellKnown.Json; httpResponseId?: Id; + graphqlResponseId?: Id; completedAt?: Protobuf.WellKnown.Timestamp; } diff --git a/packages/spec/api/graphql.tsp b/packages/spec/api/graphql.tsp new file mode 100644 index 000000000..9be323de3 --- /dev/null +++ b/packages/spec/api/graphql.tsp @@ -0,0 +1,91 @@ +using DevTools; + +namespace Api.GraphQL; + +@withDelta +@TanStackDB.collection +model GraphQL { + @primaryKey graphqlId: Id; + name: string; + url: string; + query: string; + variables: string; + lastRunAt?: Protobuf.WellKnown.Timestamp; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLVersion { + @primaryKey graphqlVersionId: Id; + @foreignKey graphqlId: Id; + @foreignKey deltaGraphqlId?: Id; + name: string; + description: string; + createdAt: int64; +} + +@withDelta +@TanStackDB.collection +model GraphQLHeader { + @primaryKey graphqlHeaderId: Id; + ...CommonTableFields; +} + +@withDelta +@TanStackDB.collection +model GraphQLAssert { + @primaryKey graphqlAssertId: Id; + @foreignKey graphqlId: Id; + value: string; + enabled: boolean; + order: float32; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLResponse { + @primaryKey graphqlResponseId: Id; + @foreignKey graphqlId: Id; + status: int32; + body: string; + time: Protobuf.WellKnown.Timestamp; + duration: int32; + size: int32; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLResponseHeader { + @primaryKey graphqlResponseHeaderId: Id; + @foreignKey graphqlResponseId: Id; + key: string; + value: string; +} + +@TanStackDB.collection(#{ isReadOnly: true }) +model GraphQLResponseAssert { + @primaryKey graphqlResponseAssertId: Id; + @foreignKey graphqlResponseId: Id; + value: string; + success: boolean; +} + +model GraphQLRunRequest { + graphqlId: Id; +} + +op GraphQLRun(...GraphQLRunRequest): {}; + +model GraphQLDuplicateRequest { + graphqlId: Id; +} + +op GraphQLDuplicate(...GraphQLDuplicateRequest): {}; + +model GraphQLIntrospectRequest { + graphqlId: Id; +} + +model GraphQLIntrospectResponse { + sdl: string; + introspectionJson: string; +} + +op GraphQLIntrospect(...GraphQLIntrospectRequest): GraphQLIntrospectResponse; diff --git a/packages/spec/api/main.tsp b/packages/spec/api/main.tsp index bdfd47525..ae71a6e20 100644 --- a/packages/spec/api/main.tsp +++ b/packages/spec/api/main.tsp @@ -8,6 +8,7 @@ import "./environment.tsp"; import "./export.tsp"; import "./file-system.tsp"; import "./flow.tsp"; +import "./graphql.tsp"; import "./health.tsp"; import "./http.tsp"; import "./import.tsp"; diff --git a/packages/spec/api/reference.tsp b/packages/spec/api/reference.tsp index 2867af993..e3ecec5f1 100644 --- a/packages/spec/api/reference.tsp +++ b/packages/spec/api/reference.tsp @@ -45,6 +45,8 @@ model ReferenceContext { workspaceId?: Id; httpId?: Id; deltaHttpId?: Id; + graphqlId?: Id; + deltaGraphqlId?: Id; flowNodeId?: Id; }