Error while executing Dataflow template using clou

2019-03-05 17:35发布

Getting below error while trying to execute custom dataflow template using Google Cloud function.

Error:"problem running dataflow template, error was: { Error: A Forbidden error was returned while attempting to retrieve an access token for the Compute Engine built-in service account. This may be because the Compute Engine instance does not have the correct permission scopes specified. Could not refresh access token".

I have tried supplying all the required permissions and scopes.Could someone please suggest a resolution.

1条回答
2楼-- · 2019-03-05 18:00

The google-cloud node library does not yet support the Dataflow API, so the current way to use that API is the googleapis library.

Following the instructions there, I've tried to launch a Dataflow job with a Google-provided template using an HTTP-triggered function, and had no issues:

const {google} = require('googleapis');
const project = "your-project-id"

exports.launchDataflowTemplate = (req, res) => {
    let result;
    google.auth.getApplicationDefault(function(err, authClient, projectId) {
            if (err) {
                throw err;
            }
            if (authClient.createScopedRequired && authClient.createScopedRequired()) {
                authClient = authClient.createScoped([
                    'https://www.googleapis.com/auth/cloud-platform',
                    'https://www.googleapis.com/auth/compute',
                    'https://www.googleapis.com/auth/compute.readonly',
                    'https://www.googleapis.com/auth/userinfo.email'
                ]);
            }
            var dataflow = google.dataflow({
                version: "v1b3",
                auth: authClient
            });

            var launchParams = {
                "inputFilePattern": "gs://your-input-bucket/*.gz",
                "outputDirectory": "gs://your-result-bucket/",
                "outputFailureFile": "gs://your-logs-bucket/error.csv"
            };

            var env = {
               "tempLocation": "gs://your-staging-bucket/temp",
               "zone": "us-central1-f"
            }


            var opts = {
                projectId: project,
                gcsPath: "gs://dataflow-templates/latest/Bulk_Decompress_GCS_Files",
                resource: {
                    parameters: launchParams,
                    environment: env
                }
            };

            dataflow.projects.templates.launch(opts, (err, result) => {
                if (err) {
                    throw err;
                }
                res.send(result.data);
            });
    });
};
查看更多
登录 后发表回答