Skip to content

Commit

Permalink
refactor(typescript): convert index to es module (#227)
Browse files Browse the repository at this point in the history
  • Loading branch information
JustinBeckwith authored Oct 26, 2018
1 parent 180083d commit fcbc07b
Show file tree
Hide file tree
Showing 19 changed files with 1,431 additions and 1,377 deletions.
1 change: 1 addition & 0 deletions .eslintignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
node_modules/*
samples/node_modules/*
src/**/doc/*
build/*
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ Google APIs Client Libraries, in [Client Libraries Explained][explained].

```javascript
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

// Your Google Cloud Platform project ID
const projectId = 'YOUR_PROJECT_ID';
Expand Down
8 changes: 4 additions & 4 deletions benchmark/bench.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,17 @@

'use strict';

const async = require('async');
const fs = require('fs');
const BigQuery = require('../src/index.js');
import * as async from 'async';
import * as fs from 'fs';
import {BigQuery} from '../src';
const env = require('../../../system-test/env.js');

if (process.argv.length < 3) {
throw new Error(`need query file; ` +
`usage: '${process.argv[0]} ${process.argv[1]} <queries.json>'`);
}

const queryJson = fs.readFileSync(process.argv[2]);
const queryJson = fs.readFileSync(process.argv[2], 'utf8');
const queries = JSON.parse(queryJson);
const client = new BigQuery(env);

Expand Down
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@
"benchmark": "time node benchmark/bench.js benchmark/queries.json",
"docs": "jsdoc -c .jsdoc.js",
"lint": "eslint samples/",
"prettier": "prettier --write samples/*.js samples/*/*.js",
"cover": "nyc --reporter=lcov mocha test/*.js && nyc report",
"samples-test": "cd samples/ && npm link ../ && npm test && cd ../",
"test": "mocha build/test",
Expand All @@ -66,12 +65,12 @@
"check": "gts check",
"clean": "gts clean",
"compile": "tsc -p .",
"fix": "gts fix",
"fix": "eslint --fix '**/*.js'",
"prepare": "npm run compile",
"pretest": "npm run compile"
},
"dependencies": {
"@google-cloud/common": "^0.25.0",
"@google-cloud/common": "^0.26.0",
"@google-cloud/paginator": "^0.1.2",
"@google-cloud/promisify": "^0.3.0",
"arrify": "^1.0.0",
Expand All @@ -87,6 +86,7 @@
"@google-cloud/nodejs-repo-tools": "^2.2.3",
"@google-cloud/storage": "^2.0.0",
"@types/mocha": "^5.2.5",
"@types/sinon": "^5.0.5",
"async": "^2.6.0",
"codecov": "^3.0.0",
"eslint": "^5.0.0",
Expand Down
6 changes: 3 additions & 3 deletions samples/datasets.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
async function createDataset(datasetId, projectId) {
// [START bigquery_create_dataset]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

/**
* TODO(developer): Uncomment the following lines before running the sample.
Expand All @@ -38,7 +38,7 @@ async function createDataset(datasetId, projectId) {
async function deleteDataset(datasetId, projectId) {
// [START bigquery_delete_dataset]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

/**
* TODO(developer): Uncomment the following lines before running the sample.
Expand All @@ -61,7 +61,7 @@ async function deleteDataset(datasetId, projectId) {
async function listDatasets(projectId) {
// [START bigquery_list_datasets]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

/**
* TODO(developer): Uncomment the following lines before running the sample.
Expand Down
6 changes: 3 additions & 3 deletions samples/queries.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ async function queryStackOverflow() {
// [START bigquery_simple_app_all]
// [START bigquery_simple_app_deps]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
// [END bigquery_simple_app_deps]

// [START bigquery_simple_app_client]
Expand Down Expand Up @@ -63,7 +63,7 @@ async function queryStackOverflow() {
async function query() {
// [START bigquery_query]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

// Creates a client
const bigquery = new BigQuery();
Expand Down Expand Up @@ -94,7 +94,7 @@ async function query() {
async function queryDisableCache() {
// [START bigquery_query_no_cache]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

// Creates a client
const bigquery = new BigQuery();
Expand Down
2 changes: 1 addition & 1 deletion samples/quickstart.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

// [START bigquery_quickstart]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

// Your Google Cloud Platform project ID
const projectId = 'YOUR_PROJECT_ID';
Expand Down
2 changes: 1 addition & 1 deletion samples/system-test/datasets.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

'use strict';

const BigQuery = require(`@google-cloud/bigquery`);
const {BigQuery} = require(`@google-cloud/bigquery`);
const path = require(`path`);
const test = require(`ava`);
const tools = require(`@google-cloud/nodejs-repo-tools`);
Expand Down
6 changes: 4 additions & 2 deletions samples/system-test/quickstart.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const test = require(`ava`);
const tools = require(`@google-cloud/nodejs-repo-tools`);
const uuid = require(`uuid`);

const BigQuery = proxyquire(`@google-cloud/bigquery`, {});
const {BigQuery} = proxyquire(`@google-cloud/bigquery`, {});
const bigquery = new BigQuery();

const expectedDatasetId = `my_new_dataset`;
Expand Down Expand Up @@ -67,7 +67,9 @@ test(`quickstart should create a dataset`, async t => {
};

proxyquire(`../quickstart`, {
'@google-cloud/bigquery': sinon.stub().returns(bigqueryMock),
'@google-cloud/bigquery': {
BigQuery: sinon.stub().returns(bigqueryMock),
},
});
});
});
4 changes: 1 addition & 3 deletions samples/system-test/tables.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,9 @@
const test = require(`ava`);
const path = require(`path`);
const uuid = require(`uuid`);

const tools = require(`@google-cloud/nodejs-repo-tools`);
const {Storage} = require(`@google-cloud/storage`);

const BigQuery = require(`@google-cloud/bigquery`);
const {BigQuery} = require(`@google-cloud/bigquery`);

const storage = new Storage();

Expand Down
36 changes: 18 additions & 18 deletions samples/tables.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
async function createTable(datasetId, tableId, schema, projectId) {
// [START bigquery_create_table]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

/**
* TODO(developer): Uncomment the following lines before running the sample.
Expand Down Expand Up @@ -46,7 +46,7 @@ async function createTable(datasetId, tableId, schema, projectId) {
async function deleteTable(datasetId, tableId, projectId) {
// [START bigquery_delete_table]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

/**
* TODO(developer): Uncomment the following lines before running the sample.
Expand All @@ -71,7 +71,7 @@ async function deleteTable(datasetId, tableId, projectId) {
async function listTables(datasetId, projectId) {
// [START bigquery_list_tables]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

/**
* TODO(developer): Uncomment the following lines before running the sample.
Expand All @@ -93,7 +93,7 @@ async function listTables(datasetId, projectId) {
async function browseRows(datasetId, tableId, projectId) {
// [START bigquery_browse_table]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

/**
* TODO(developer): Uncomment the following lines before running the sample.
Expand Down Expand Up @@ -125,7 +125,7 @@ async function copyTable(
) {
// [START bigquery_copy_table]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

/**
* TODO(developer): Uncomment the following lines before running the sample.
Expand Down Expand Up @@ -158,7 +158,7 @@ async function copyTable(
async function loadLocalFile(datasetId, tableId, filename, projectId) {
// [START bigquery_load_from_file]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

/**
* TODO(developer): Uncomment the following lines before running the sample.
Expand Down Expand Up @@ -190,7 +190,7 @@ async function loadLocalFile(datasetId, tableId, filename, projectId) {
async function loadORCFromGCS(datasetId, tableId, projectId) {
// [START bigquery_load_table_gcs_orc]
// Imports the Google Cloud client libraries
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');

/**
Expand Down Expand Up @@ -239,7 +239,7 @@ async function loadORCFromGCS(datasetId, tableId, projectId) {
async function loadParquetFromGCS(datasetId, tableId, projectId) {
// [START bigquery_load_table_gcs_parquet]
// Imports the Google Cloud client libraries
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');

/**
Expand Down Expand Up @@ -288,7 +288,7 @@ async function loadParquetFromGCS(datasetId, tableId, projectId) {
function loadCSVFromGCS(datasetId, tableId, projectId) {
// [START bigquery_load_table_gcs_csv]
// Imports the Google Cloud client libraries
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');

/**
Expand Down Expand Up @@ -355,7 +355,7 @@ function loadCSVFromGCS(datasetId, tableId, projectId) {
function loadJSONFromGCS(datasetId, tableId, projectId) {
// [START bigquery_load_table_gcs_json]
// Imports the Google Cloud client libraries
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');

/**
Expand Down Expand Up @@ -421,7 +421,7 @@ function loadJSONFromGCS(datasetId, tableId, projectId) {
function loadCSVFromGCSAutodetect(datasetId, tableId, projectId) {
// [START bigquery_load_table_gcs_csv_autodetect]
// Imports the Google Cloud client libraries
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');

/**
Expand Down Expand Up @@ -483,7 +483,7 @@ function loadCSVFromGCSAutodetect(datasetId, tableId, projectId) {
function loadJSONFromGCSAutodetect(datasetId, tableId, projectId) {
// [START bigquery_load_table_gcs_json_autodetect]
// Imports the Google Cloud client libraries
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');

/**
Expand Down Expand Up @@ -544,7 +544,7 @@ function loadJSONFromGCSAutodetect(datasetId, tableId, projectId) {
function loadCSVFromGCSTruncate(datasetId, tableId, projectId) {
// [START bigquery_load_table_gcs_csv_truncate]
// Imports the Google Cloud client libraries
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');

/**
Expand Down Expand Up @@ -613,7 +613,7 @@ function loadCSVFromGCSTruncate(datasetId, tableId, projectId) {
function loadJSONFromGCSTruncate(datasetId, tableId, projectId) {
// [START bigquery_load_table_gcs_json_truncate]
// Imports the Google Cloud client libraries
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');

/**
Expand Down Expand Up @@ -681,7 +681,7 @@ function loadJSONFromGCSTruncate(datasetId, tableId, projectId) {
function loadParquetFromGCSTruncate(datasetId, tableId, projectId) {
// [START bigquery_load_table_gcs_parquet_truncate]
// Imports the Google Cloud client libraries
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');

/**
Expand Down Expand Up @@ -743,7 +743,7 @@ function loadParquetFromGCSTruncate(datasetId, tableId, projectId) {
function loadOrcFromGCSTruncate(datasetId, tableId, projectId) {
// [START bigquery_load_table_gcs_orc_truncate]
// Imports the Google Cloud client libraries
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');

/**
Expand Down Expand Up @@ -811,7 +811,7 @@ function extractTableToGCS(
) {
// [START bigquery_extract_table]
// Imports the Google Cloud client libraries
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');
const {Storage} = require('@google-cloud/storage');

/**
Expand Down Expand Up @@ -858,7 +858,7 @@ function extractTableToGCS(
function insertRowsAsStream(datasetId, tableId, rows, projectId) {
// [START bigquery_table_insert_rows]
// Imports the Google Cloud client library
const BigQuery = require('@google-cloud/bigquery');
const {BigQuery} = require('@google-cloud/bigquery');

/**
* TODO(developer): Uncomment the following lines before running the sample.
Expand Down
Loading

0 comments on commit fcbc07b

Please sign in to comment.