-
Notifications
You must be signed in to change notification settings - Fork 4.5k
Expand file tree
/
Copy pathio_it_test.py
More file actions
116 lines (102 loc) · 4.39 KB
/
io_it_test.py
File metadata and controls
116 lines (102 loc) · 4.39 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Integration tests for Dataframe sources and sinks."""
# pytype: skip-file
import logging
import unittest
import pytest
import apache_beam.io.gcp.bigquery
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
_LOGGER = logging.getLogger(__name__)
try:
from google.api_core.exceptions import GoogleAPICallError
except ImportError:
GoogleAPICallError = None
@unittest.skipIf(
GoogleAPICallError is None, 'GCP dependencies are not installed')
class ReadUsingReadGbqTests(unittest.TestCase):
@pytest.mark.it_postcommit
def test_ReadGbq(self):
from apache_beam.dataframe import convert
with TestPipeline(is_integration_test=True) as p:
actual_df = p | apache_beam.dataframe.io.read_gbq(
table="apache-beam-testing:beam_bigquery_io_test."
"dfsqltable_3c7d6fd5_16e0460dfd0",
use_bqstorage_api=False)
assert_that(
convert.to_pcollection(actual_df),
equal_to([(3, 'customer1', 'test'), (1, 'customer1', 'test'),
(2, 'customer2', 'test'), (4, 'customer2', 'test')]))
@pytest.mark.it_postcommit
def test_ReadGbq_export_with_project(self):
from apache_beam.dataframe import convert
with TestPipeline(is_integration_test=True) as p:
actual_df = p | apache_beam.dataframe.io.read_gbq(
table="dfsqltable_3c7d6fd5_16e0460dfd0",
dataset="beam_bigquery_io_test",
project_id="apache-beam-testing",
use_bqstorage_api=False)
assert_that(
convert.to_pcollection(actual_df),
equal_to([(3, 'customer1', 'test'), (1, 'customer1', 'test'),
(2, 'customer2', 'test'), (4, 'customer2', 'test')]))
@pytest.mark.it_postcommit
def test_ReadGbq_direct_read(self):
from apache_beam.dataframe import convert
with TestPipeline(is_integration_test=True) as p:
actual_df = p | apache_beam.dataframe.io.\
read_gbq(
table=
"apache-beam-testing:beam_bigquery_io_test."
"dfsqltable_3c7d6fd5_16e0460dfd0",
use_bqstorage_api=True)
assert_that(
convert.to_pcollection(actual_df),
equal_to([(3, 'customer1', 'test'), (1, 'customer1', 'test'),
(2, 'customer2', 'test'), (4, 'customer2', 'test')]))
@pytest.mark.it_postcommit
def test_ReadGbq_direct_read_with_project(self):
from apache_beam.dataframe import convert
with TestPipeline(is_integration_test=True) as p:
actual_df = p | apache_beam.dataframe.io.read_gbq(
table="dfsqltable_3c7d6fd5_16e0460dfd0",
dataset="beam_bigquery_io_test",
project_id="apache-beam-testing",
use_bqstorage_api=True)
assert_that(
convert.to_pcollection(actual_df),
equal_to([(3, 'customer1', 'test'), (1, 'customer1', 'test'),
(2, 'customer2', 'test'), (4, 'customer2', 'test')]))
@pytest.mark.it_postcommit
def test_ReadGbq_with_computation(self):
from apache_beam.dataframe import convert
with TestPipeline(is_integration_test=True) as p:
beam_df = p | apache_beam.dataframe.io.read_gbq(
table="dfsqltable_3c7d6fd5_16e0460dfd0",
dataset="beam_bigquery_io_test",
project_id="apache-beam-testing")
actual_df = beam_df.groupby('id').count()
assert_that(
convert.to_pcollection(actual_df, include_indexes=True),
equal_to([(1, 1, 1), (2, 1, 1), (3, 1, 1), (4, 1, 1)]))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()