forked from GoogleCloudPlatform/DataflowSDK-examples
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtfidf_test.py
93 lines (79 loc) · 3.24 KB
/
tfidf_test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Test for the TF-IDF example."""
import logging
import os
import re
import tempfile
import unittest
import apache_beam as beam
from apache_beam.examples.complete import tfidf
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
EXPECTED_RESULTS = set([
('ghi', '1.txt', 0.3662040962227032),
('abc', '1.txt', 0.0),
('abc', '3.txt', 0.0),
('abc', '2.txt', 0.0),
('def', '1.txt', 0.13515503603605478),
('def', '2.txt', 0.2027325540540822)])
EXPECTED_LINE_RE = r'\(u\'([a-z]*)\', \(\'.*([0-9]\.txt)\', (.*)\)\)'
class TfIdfTest(unittest.TestCase):
def create_file(self, path, contents):
logging.info('Creating temp file: %s', path)
with open(path, 'w') as f:
f.write(contents)
def test_tfidf_transform(self):
with TestPipeline() as p:
uri_to_line = p | 'create sample' >> beam.Create(
[('1.txt', 'abc def ghi'),
('2.txt', 'abc def'),
('3.txt', 'abc')])
result = (
uri_to_line
| tfidf.TfIdf()
| beam.Map(lambda (word, (uri, tfidf)): (word, uri, tfidf)))
assert_that(result, equal_to(EXPECTED_RESULTS))
# Run the pipeline. Note that the assert_that above adds to the pipeline
# a check that the result PCollection contains expected values.
# To actually trigger the check the pipeline must be run (e.g. by
# exiting the with context).
def test_basics(self):
# Setup the files with expected content.
temp_folder = tempfile.mkdtemp()
self.create_file(os.path.join(temp_folder, '1.txt'), 'abc def ghi')
self.create_file(os.path.join(temp_folder, '2.txt'), 'abc def')
self.create_file(os.path.join(temp_folder, '3.txt'), 'abc')
tfidf.run([
'--uris=%s/*' % temp_folder,
'--output', os.path.join(temp_folder, 'result')])
# Parse result file and compare.
results = []
with open(os.path.join(temp_folder,
'result-00000-of-00001')) as result_file:
for line in result_file:
match = re.search(EXPECTED_LINE_RE, line)
logging.info('Result line: %s', line)
if match is not None:
results.append(
(match.group(1), match.group(2), float(match.group(3))))
logging.info('Computed results: %s', set(results))
self.assertEqual(set(results), EXPECTED_RESULTS)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()