|
10 | 10 | }, |
11 | 11 | "outputs": [], |
12 | 12 | "source": [ |
13 | | - "# sql_engine: bigquery\n", |
14 | | - "# output_variable: df\n", |
15 | | - "# start _sql\n", |
16 | | - "_sql = \"\"\"\n", |
| 13 | + "from google.cloud import bigquery\n", |
| 14 | + "\n", |
| 15 | + "client = bigquery.Client()\n", |
| 16 | + "sql = \"\"\"\n", |
17 | 17 | "## [Restore deleted dataset](https://docs.cloud.google.com/bigquery/docs/restore-deleted-datasets#restore_a_dataset)\n", |
18 | 18 | "UNDROP SCHEMA httparchive.crawl;\n", |
19 | | - "\"\"\" # end _sql\n", |
20 | | - "from google.colab.sql import bigquery as _bqsqlcell\n", |
21 | | - "df = _bqsqlcell.run(_sql)\n", |
22 | | - "df" |
| 19 | + "\"\"\"\n", |
| 20 | + "client.query(sql).result()" |
23 | 21 | ] |
24 | 22 | }, |
25 | 23 | { |
|
46 | 44 | }, |
47 | 45 | "outputs": [], |
48 | 46 | "source": [ |
49 | | - "# sql_engine: bigquery\n", |
50 | | - "# output_variable: df\n", |
51 | | - "# start _sql\n", |
52 | | - "_sql = \"\"\"\n", |
| 47 | + "from google.cloud import bigquery\n", |
| 48 | + "\n", |
| 49 | + "client = bigquery.Client()\n", |
| 50 | + "sql = \"\"\"\n", |
53 | 51 | "## [Restore a table to a specific point in time](https://cloud.google.com/bigquery/docs/restore-tables#restoring_a_table_to_a_specific_point_in_time)\n", |
54 | 52 | "CREATE TABLE httparchive.crawl_staging.pages_restored_20250804 AS\n", |
55 | 53 | "SELECT *\n", |
56 | 54 | "FROM httparchive.crawl.pages\n", |
57 | 55 | " FOR SYSTEM_TIME AS OF TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 HOUR);\n", |
58 | | - "\"\"\" # end _sql\n", |
59 | | - "from google.colab.sql import bigquery as _bqsqlcell\n", |
60 | | - "df = _bqsqlcell.run(_sql)\n", |
61 | | - "df" |
| 56 | + "\"\"\"\n", |
| 57 | + "client.query(sql).result()" |
62 | 58 | ] |
63 | 59 | } |
64 | 60 | ], |
|
0 commit comments