Skip to content

Commit 9787c20

Browse files
CorieWcabljac
andauthored
feat(firestore-bigquery-export): add log level config param (#2330)
* feat(firestore-bigquery-export): add log level config param * feat(firestore-bigquery-export): added no logging option (silent) * perf(firestore-bigquery-export): improved performance of log levels code * refactor(firestore-bigquery-export): code shortening * chore(firestore-bigquery-export): format and docs * fix(firestore-bigquery-export): tests * chore(firestore-bigquery-export): make some info logs debug logs * chore(firestore-bigquery-export): made some changes based on feedback * feat(firestore-bigquery-export): add tests for logger * chore(firestore-bigquery-export): format * chore(firestore-bigquery-export): adjust for latest changes * chore(firestore-bigquery-export): test mocking adjustments * chore(firestore-bigquery-export): bump extension version and add changelog * fix(firestore-bigquery-export): converted a missed log to use new logger --------- Co-authored-by: Jacob Cable <[email protected]>
1 parent 5bb2e2a commit 9787c20

File tree

10 files changed

+96
-39
lines changed

10 files changed

+96
-39
lines changed

firestore-bigquery-export/CHANGELOG.md

+4
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
## Version 0.1.60
2+
3+
feat - configure a log level to control the verbosity of logs.
4+
15
## Version 0.1.59
26

37
docs - remove references to lifecycle backfill feature.

firestore-bigquery-export/README.md

+2
Original file line numberDiff line numberDiff line change
@@ -297,6 +297,8 @@ Available schema extensions table fields for clustering include: `document_id, d
297297

298298
* Maximum number of enqueue attempts: This parameter will set the maximum number of attempts to enqueue a document to cloud tasks for export to BigQuery.
299299

300+
* Log level: The log level for the extension. The log level controls the verbosity of the extension's logs. The available log levels are: debug, info, warn, and error. To reduce the volume of logs, use a log level of warn or error.
301+
300302

301303

302304
**Cloud Functions:**

firestore-bigquery-export/extension.yaml

+22-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# limitations under the License.
1414

1515
name: firestore-bigquery-export
16-
version: 0.1.59
16+
version: 0.1.60
1717
specVersion: v1beta
1818

1919
displayName: Stream Firestore to BigQuery
@@ -461,6 +461,27 @@ params:
461461
validationErrorMessage: Please select an integer between 1 and 10
462462
default: 3
463463

464+
- param: LOG_LEVEL
465+
label: Log level
466+
description: >-
467+
The log level for the extension. The log level controls the verbosity of
468+
the extension's logs. The available log levels are: debug, info, warn, and
469+
error. To reduce the volume of logs, use a log level of warn or error.
470+
type: select
471+
options:
472+
- label: Debug
473+
value: debug
474+
- label: Info
475+
value: info
476+
- label: Warn
477+
value: warn
478+
- label: Error
479+
value: error
480+
- label: Silent
481+
value: silent
482+
default: info
483+
required: true
484+
464485
events:
465486
# OLD event types for backward compatibility
466487
- type: firebase.extensions.firestore-counter.v1.onStart

firestore-bigquery-export/functions/__tests__/__snapshots__/config.test.ts.snap

+1
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ Object {
2121
"instanceId": undefined,
2222
"kmsKeyName": "test",
2323
"location": "us-central1",
24+
"logLevel": "info",
2425
"maxDispatchesPerSecond": 10,
2526
"maxEnqueueAttempts": 3,
2627
"maxStaleness": undefined,

firestore-bigquery-export/functions/__tests__/functions.test.ts

+15
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,21 @@ jest.mock("@firebaseextensions/firestore-bigquery-change-tracker", () => ({
1616
UPDATE: 1,
1717
CREATE: 0,
1818
},
19+
LogLevel: {
20+
DEBUG: "debug",
21+
INFO: "info",
22+
WARN: "warn",
23+
ERROR: "error",
24+
SILENT: "silent",
25+
},
26+
Logger: jest.fn().mockImplementation(() => ({
27+
debug: jest.fn(),
28+
info: jest.fn(),
29+
warn: jest.fn(),
30+
error: jest.fn(),
31+
log: jest.fn(),
32+
setLogLevel: jest.fn(),
33+
})),
1934
}));
2035

2136
jest.mock("firebase-admin/functions", () => ({

firestore-bigquery-export/functions/package-lock.json

+4-4
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

firestore-bigquery-export/functions/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
"author": "Jan Wyszynski <[email protected]>",
1414
"license": "Apache-2.0",
1515
"dependencies": {
16-
"@firebaseextensions/firestore-bigquery-change-tracker": "^1.1.39",
16+
"@firebaseextensions/firestore-bigquery-change-tracker": "^1.1.40",
1717
"@google-cloud/bigquery": "^7.6.0",
1818
"@types/chai": "^4.1.6",
1919
"@types/express-serve-static-core": "4.17.30",

firestore-bigquery-export/functions/src/config.ts

+2
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
* See the License for the specific language governing permissions and
1414
* limitations under the License.
1515
*/
16+
import { LogLevel } from "@firebaseextensions/firestore-bigquery-change-tracker";
1617

1718
function timePartitioning(type) {
1819
if (
@@ -73,4 +74,5 @@ export default {
7374
backupBucketName:
7475
process.env.BACKUP_GCS_BUCKET || `${process.env.PROJECT_ID}.appspot.com`,
7576
backupDir: `_${process.env.INSTANCE_ID || "firestore-bigquery-export"}`,
77+
logLevel: process.env.LOG_LEVEL || LogLevel.INFO,
7678
};

firestore-bigquery-export/functions/src/index.ts

+3-4
Original file line numberDiff line numberDiff line change
@@ -57,13 +57,15 @@ const eventTrackerConfig = {
5757
config.viewType === "materialized_incremental",
5858
maxStaleness: config.maxStaleness,
5959
refreshIntervalMinutes: config.refreshIntervalMinutes,
60+
logLevel: config.logLevel,
6061
};
6162

6263
// Initialize the Firestore Event History Tracker with the given configuration.
6364
const eventTracker: FirestoreBigQueryEventHistoryTracker =
6465
new FirestoreBigQueryEventHistoryTracker(eventTrackerConfig);
6566

6667
// Initialize logging.
68+
logs.logger.setLogLevel(config.logLevel);
6769
logs.init();
6870

6971
/** Initialize Firebase Admin SDK if not already initialized */
@@ -212,10 +214,7 @@ export const fsexportbigquery = functions.firestore
212214
context
213215
);
214216
} catch (err) {
215-
functions.logger.warn(
216-
"Failed to write event to BigQuery Immediately. Will attempt to Enqueue to Cloud Tasks.",
217-
err
218-
);
217+
logs.failedToWriteToBigQueryImmediately(err as Error);
219218
// Handle enqueue errors with retries and backup to GCS.
220219
await attemptToEnqueue(
221220
err,

firestore-bigquery-export/functions/src/logs.ts

+42-29
Original file line numberDiff line numberDiff line change
@@ -13,131 +13,137 @@
1313
* See the License for the specific language governing permissions and
1414
* limitations under the License.
1515
*/
16-
import { logger } from "firebase-functions";
1716
import config from "./config";
18-
import { ChangeType } from "@firebaseextensions/firestore-bigquery-change-tracker";
17+
import {
18+
ChangeType,
19+
Logger,
20+
} from "@firebaseextensions/firestore-bigquery-change-tracker";
21+
22+
export const logger = new Logger();
1923

2024
export const arrayFieldInvalid = (fieldName: string) => {
2125
logger.warn(`Array field '${fieldName}' does not contain an array, skipping`);
2226
};
2327

2428
export const bigQueryDatasetCreated = (datasetId: string) => {
25-
logger.log(`Created BigQuery dataset: ${datasetId}`);
29+
logger.info(`Created BigQuery dataset: ${datasetId}`);
2630
};
2731

2832
export const bigQueryDatasetCreating = (datasetId: string) => {
29-
logger.log(`Creating BigQuery dataset: ${datasetId}`);
33+
logger.debug(`Creating BigQuery dataset: ${datasetId}`);
3034
};
3135

3236
export const bigQueryDatasetExists = (datasetId: string) => {
33-
logger.log(`BigQuery dataset already exists: ${datasetId}`);
37+
logger.info(`BigQuery dataset already exists: ${datasetId}`);
3438
};
3539

3640
export const bigQueryErrorRecordingDocumentChange = (e: Error) => {
3741
logger.error(`Error recording document changes.`, e);
3842
};
3943

4044
export const bigQueryLatestSnapshotViewQueryCreated = (query: string) => {
41-
logger.log(`BigQuery latest snapshot view query:\n${query}`);
45+
logger.info(`BigQuery latest snapshot view query:\n${query}`);
4246
};
4347

4448
export const bigQueryTableAlreadyExists = (
4549
tableName: string,
4650
datasetName: string
4751
) => {
48-
logger.log(
52+
logger.info(
4953
`BigQuery table with name ${tableName} already ` +
5054
`exists in dataset ${datasetName}!`
5155
);
5256
};
5357

5458
export const bigQueryTableCreated = (tableName: string) => {
55-
logger.log(`Created BigQuery table: ${tableName}`);
59+
logger.info(`Created BigQuery table: ${tableName}`);
5660
};
5761

5862
export const bigQueryTableCreating = (tableName: string) => {
59-
logger.log(`Creating BigQuery table: ${tableName}`);
63+
logger.debug(`Creating BigQuery table: ${tableName}`);
6064
};
6165

6266
export const bigQueryTableUpdated = (tableName: string) => {
63-
logger.log(`Updated existing BigQuery table: ${tableName}`);
67+
logger.info(`Updated existing BigQuery table: ${tableName}`);
6468
};
6569

6670
export const bigQueryTableUpdating = (tableName: string) => {
67-
logger.log(`Updating existing BigQuery table: ${tableName}`);
71+
logger.debug(`Updating existing BigQuery table: ${tableName}`);
6872
};
6973

7074
export const bigQueryTableUpToDate = (tableName: string) => {
71-
logger.log(`BigQuery table: ${tableName} is up to date`);
75+
logger.info(`BigQuery table: ${tableName} is up to date`);
7276
};
7377

7478
export const bigQueryTableValidated = (tableName: string) => {
75-
logger.log(`Validated existing BigQuery table: ${tableName}`);
79+
logger.info(`Validated existing BigQuery table: ${tableName}`);
7680
};
7781

7882
export const bigQueryTableValidating = (tableName: string) => {
79-
logger.log(`Validating existing BigQuery table: ${tableName}`);
83+
logger.debug(`Validating existing BigQuery table: ${tableName}`);
8084
};
8185

8286
export const bigQueryUserDefinedFunctionCreating = (
8387
functionDefinition: string
8488
) => {
85-
logger.log(`Creating BigQuery User-defined Function:\n${functionDefinition}`);
89+
logger.debug(
90+
`Creating BigQuery User-defined Function:\n${functionDefinition}`
91+
);
8692
};
8793

8894
export const bigQueryUserDefinedFunctionCreated = (
8995
functionDefinition: string
9096
) => {
91-
logger.log(`Created BigQuery User-defined Function:\n${functionDefinition}`);
97+
logger.info(`Created BigQuery User-defined Function:\n${functionDefinition}`);
9298
};
9399

94100
export const bigQueryViewCreated = (viewName: string) => {
95-
logger.log(`Created BigQuery view: ${viewName}`);
101+
logger.info(`Created BigQuery view: ${viewName}`);
96102
};
97103

98104
export const bigQueryViewCreating = (viewName: string) => {
99-
logger.log(`Creating BigQuery view: ${viewName}`);
105+
logger.debug(`Creating BigQuery view: ${viewName}`);
100106
};
101107

102108
export const bigQueryViewAlreadyExists = (
103109
viewName: string,
104110
datasetName: string
105111
) => {
106-
logger.log(
112+
logger.info(
107113
`View with id ${viewName} already exists in dataset ${datasetName}.`
108114
);
109115
};
110116

111117
export const bigQueryViewUpdated = (viewName: string) => {
112-
logger.log(`Updated existing BigQuery view: ${viewName}`);
118+
logger.info(`Updated existing BigQuery view: ${viewName}`);
113119
};
114120

115121
export const bigQueryViewUpdating = (viewName: string) => {
116-
logger.log(`Updating existing BigQuery view: ${viewName}`);
122+
logger.debug(`Updating existing BigQuery view: ${viewName}`);
117123
};
118124

119125
export const bigQueryViewUpToDate = (viewName: string) => {
120-
logger.log(`BigQuery view: ${viewName} is up to date`);
126+
logger.info(`BigQuery view: ${viewName} is up to date`);
121127
};
122128

123129
export const bigQueryViewValidated = (viewName: string) => {
124-
logger.log(`Validated existing BigQuery view: ${viewName}`);
130+
logger.info(`Validated existing BigQuery view: ${viewName}`);
125131
};
126132

127133
export const bigQueryViewValidating = (viewName: string) => {
128-
logger.log(`Validating existing BigQuery view: ${viewName}`);
134+
logger.debug(`Validating existing BigQuery view: ${viewName}`);
129135
};
130136

131137
export const complete = () => {
132-
logger.log("Completed execution of extension");
138+
logger.info("Completed execution of extension");
133139
};
134140

135141
export const dataInserted = (rowCount: number) => {
136-
logger.log(`Inserted ${rowCount} row(s) of data into BigQuery`);
142+
logger.debug(`Inserted ${rowCount} row(s) of data into BigQuery`);
137143
};
138144

139145
export const dataInserting = (rowCount: number) => {
140-
logger.log(`Inserting ${rowCount} row(s) of data into BigQuery`);
146+
logger.debug(`Inserting ${rowCount} row(s) of data into BigQuery`);
141147
};
142148

143149
export const dataTypeInvalid = (
@@ -171,11 +177,11 @@ export const error = (
171177
};
172178

173179
export const init = () => {
174-
logger.log("Initializing extension with configuration", config);
180+
logger.info("Initializing extension with configuration", config);
175181
};
176182

177183
export const start = () => {
178-
logger.log("Started execution of extension with configuration", config);
184+
logger.info("Started execution of extension with configuration", config);
179185
};
180186

181187
export const timestampMissingValue = (fieldName: string) => {
@@ -218,3 +224,10 @@ export const logFailedEventAction = (
218224
error,
219225
});
220226
};
227+
228+
export const failedToWriteToBigQueryImmediately = (error: Error) => {
229+
logger.warn(
230+
"Failed to write event to BigQuery Immediately. Will attempt to Enqueue to Cloud Tasks.",
231+
error
232+
);
233+
};

0 commit comments

Comments
 (0)