-
Notifications
You must be signed in to change notification settings - Fork 16
/
.travis.yml
134 lines (122 loc) · 4.87 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
dist: trusty
language: scala
scala:
- 2.11.2
sudo: required
services:
- docker
env:
global:
- PYENV_VERSION=3.6
- SPARK_HOME=./spark
matrix:
fast_finish: true
include:
- name: 'Integration tests: SBT + Apache Spark local (DB, FE)'
before_install:
- docker-compose -f docker-compose.yml -f docker-compose.host.yml up -d scylla bblfshd
- docker exec -it gemini_bblfshd_1 bblfshctl driver list
- ./scripts/install_python_feature_extractor.sh
- tar -xzf src/test/resources/weighted-minhash/csv.tar.gz -C src/test/resources/weighted-minhash/
- ./scripts/install_python_report.sh
script:
# start dependencies
- VIRTUAL_ENV=None IN_BACKGROUND=1 ./feature_extractor
# run all test suite
- ./sbt test
before_deploy:
- make build || travis_terminate 1
- VERSION=$TRAVIS_TAG ./scripts/release.sh
deploy:
- provider: releases
api_key:
secure: $GITHUB_TOKEN
file_glob: true
file: gemini_$TRAVIS_TAG.tar.gz
skip_cleanup: true
on:
tags: true
- provider: script
script: make docker-push-latest
on:
tags: true
- provider: script
script: make docker-push
on:
branch: experimental
- name: 'Integration tests: CLI + Apache Spark standalone cluster'
env:
- MASTER=spark://127.0.0.1:7077
before_install:
- ./scripts/install_python_report.sh
- ./scripts/install_python_feature_extractor.sh
- docker-compose -f docker-compose.yml -f docker-compose.host.yml up -d scylla bblfshd
- ./scripts/get_apache_spark.sh "2.2.0" "2.7" || travis_terminate 1
install:
- make build
script:
# start dependencies
- ./scripts/start_apache_spark_cluster.sh "127.0.0.1" "7077" || travis_terminate 1
- VIRTUAL_ENV=None IN_BACKGROUND=1 ./feature_extractor
# hashing test repositories
- ./hash src/test/resources/siva || travis_terminate 1
# query without similarity
- ./query ./src/test/resources/LICENSE
# report without similarity
- ./report
# query with similarity
- ./query ./src/test/resources/consumer.go | tee query_result.txt
# check both identical & similar files appeared in output
- grep "Duplicates of ./src/test/resources/consumer.go" query_result.txt > /dev/null
- grep "Similar files of ./src/test/resources/consumer.go" query_result.txt > /dev/null
# hashing funcs
- ./hash -m func ./src/test/resources/siva/duplicate-funcs || travis_terminate 1
# query with similarity (funcs)
- ./query -m func ./src/test/resources/func_mod.go | tee query_result.txt
- grep "Similar funcs of ./src/test/resources/func_mod.go" query_result.txt > /dev/null
# query with filter (funcs)
- ./query -m func "./src/test/resources/func_mod.go:B:12" | tee query_result.txt
- grep "Similar funcs of ./src/test/resources/func_mod.go:B:12" query_result.txt > /dev/null
# report with similarity
- ./report | tee report_result.txt
# check both identical & similar files/functions appeared in output
- grep "2 duplicates" report_result.txt > /dev/null
- grep "2 similar files" report_result.txt > /dev/null
- grep "2 similar functions" report_result.txt > /dev/null
- name: 'Integration tests: Python + Scala for Feature Extractor'
script:
- docker-compose -f docker-compose.yml -f docker-compose.host.yml up -d featurext
- docker-compose exec featurext pytest -v
- ./sbt "test-only * -- -n tags.FEIntegration"
- name: 'Unit tests: Scala (only DB)'
before_install:
- docker-compose -f docker-compose.yml -f docker-compose.host.yml up -d scylla
- tar -xzf src/test/resources/weighted-minhash/csv.tar.gz -C src/test/resources/weighted-minhash/
script:
# run only tests without external deps (beside DB)
- ./sbt "test-only * -- -l tags.FEIntegration -l tags.Bblfsh -l tags.FeatureExtractor"
- name: 'Unit tests: Python for Report'
before_install:
- ./scripts/install_python_report.sh
script:
- pytest -v src/main/python/community-detector
- name: 'Code: Scala style check'
script: ./sbt scalastyle
- name: 'Code: Python style check'
before_install:
- pip3 install yapf
script:
- make lint-python
after_failure:
- docker logs db
before_cache:
# Cleanup the cached directories to avoid unnecessary cache updates
- find $HOME/.ivy2/cache -name "ivydata-*.properties" -print -delete
- find $HOME/.sbt -name "*.lock" -print -delete
cache:
directories:
- .spark-dist
- $HOME/.sbt
- $HOME/.ivy2/cache
- $HOME/.coursier
- $HOME/.cache/pip/wheels