lidian manoha.
lidian manoha.

Reputation: 99

Why am I encountering an error when deploying a nodejs function in gcloud with a zip or directly with editor?

I want to realize cloud functions, I do it on vscode. I think I use all that is necessary to realize the cloud function.

To test this one I installed @ google-cloud / storage and it works perfectly on my machine, however when I compress in zip to import it into GCP at deployment it gives me an error:

(Build failed: function.js does not exist; Error ID: 7485c5b6)

While I clearly indicate the entry point of my exports.backup function that I indicate in the entry point in GCP.

Here is the code I'm trying to run - something must be missing but I can't figure it out.

package.json:

{
  "name": "export-mysql",
  "version": "1.0.0",
  "description": "create backup database production",
  "main": "index.js",
  "scripts": {
    "backup": "functions-framework --target=backup"
  },
  "author": "",
  "license": "ISC",
  "dependencies": {
    "chalk": "^4.1.2",
    "dayjs": "^1.10.7",
    "dotenv": "^10.0.0",
    "googleapis": "^92.0.0",
    "@google-cloud/functions-framework": "^2.1.0"
  }
}

code:

const { google } = require("googleapis");
const sqlAdmin = google.sqladmin("v1beta4");
const dayjs = require("dayjs");
const chalk = require("chalk");
const dotenv = require("dotenv");
const log = console.log;
const error = console.error;

dotenv.config({ path: "./config/.env" });
let = respo = "";
authorize(function (authClient) {
    const date = dayjs(Date.now()).format("YYYYMMDDHHmm");

    var request = {
        project: "project",
        instance: "database-prod",
        resource: {
            exportContext: {
                databases: ["database"],
                fileType: "SQL",
                kind: "sql#exportContext",
                uri: `gs://backup-database-pop/backup-prod-${date}.gz`,
            },
        },
        auth: authClient,
    };

    sqlAdmin.instances.export(request, function (err, response) {
        if (err) {
            error(chalk.yellow.bold(`Status: ${err.code}`));
            log(chalk.red.bold(`Message: ${err.message}`));

            return;
        }
        // TODO: Change code below to process the `response` object:
        // log(chalk.yellow.bold(`Status: ${response.status}`));
        log(chalk.greenBright.bold(`Database Exporter dans le bucket -> backup-database-pop fichier: backup-prod-${date}.sql`));
        respo = `Database Exporter dans le bucket -> backup-database-pop fichier: backup-prod-${date}.sql`;
        return respo;
        // log.log(JSON.stringify(response, null, 2));
    });
});
function authorize(callback) {
    google.auth
        .getClient({
            scopes: ["https://www.googleapis.com/auth/cloud-platform"],
        })
        .then((client) => {
            callback(client);
        })
        .catch((err) => {
            error(chalk.red.bold("authentication failed: ", err));
        });
}
exports.backup = (req, res) => {
    res.end();
    log(respo);
    log("Function complete!");
};

And here is the structure of the folder that is zipped:

functionFolder
 folder ->  config/.env
 index.js
 package.json
 package-lock.json
 authorize.json

Upvotes: 3

Views: 384

Answers (1)

lidian manoha.
lidian manoha.

Reputation: 99

Here is the solution you have to select the files and compress them and not compress the folder

Upvotes: 2

Related Questions