Skip to content

Commit 03a9172

Browse files
authored
Merge branch 'master' into build/check-proto-build-setup
2 parents 807ac1a + ec82b7c commit 03a9172

20 files changed

+913
-426
lines changed

.github/workflows/protobuf.yml

Lines changed: 57 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,10 @@
11
name: ProtoBuf CI Builds
22

3+
env:
4+
PROTOBUF_VERSION: 3.20.1
5+
PROTOBUF_VARIANT: '-all' # Use '-all' prior to 22.0, '' after
6+
ABSEIL_VERSION: 20230802.1
7+
38
on:
49
push:
510
pull_request:
@@ -13,7 +18,7 @@ jobs:
1318

1419
steps:
1520
- name: Checkout OSI
16-
uses: actions/checkout@v2
21+
uses: actions/checkout@v4
1722
with:
1823
submodules: true
1924

@@ -23,12 +28,17 @@ jobs:
2328
( result=0 ; for f in *.proto ; do grep -q "'$f'" setup.py || { echo "Missing $f in setup.py" && let "result++"; } ; done ; exit $result )
2429
2530
- name: Setup Python
26-
uses: actions/setup-python@v2
31+
uses: actions/setup-python@v5
2732
with:
28-
python-version: '3.7'
33+
python-version: '3.8'
2934

3035
- name: Install Python Dependencies
31-
run: python -m pip install --upgrade pip setuptools wheel pyyaml
36+
run: |
37+
python -m pip install --upgrade pip
38+
python -m pip install -r requirements_develop.txt
39+
40+
- name: Check black format
41+
run: black --check --diff .
3242

3343
- name: Install Doxygen
3444
run: sudo apt-get install doxygen graphviz
@@ -37,20 +47,29 @@ jobs:
3747
id: cache-depends
3848
uses: actions/cache@v3
3949
with:
40-
path: protobuf-3.20.1
50+
path: protobuf-${{ env.PROTOBUF_VERSION }}
4151
key: ${{ runner.os }}-v2-depends
4252

43-
- name: Download ProtoBuf
53+
- name: Download ProtoBuf ${{ env.PROTOBUF_VERSION }}
4454
if: steps.cache-depends.outputs.cache-hit != 'true'
45-
run: curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v3.20.1/protobuf-all-3.20.1.tar.gz && tar xzvf protobuf-all-3.20.1.tar.gz
55+
run: curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v${{env.PROTOBUF_VERSION}}/protobuf${{env.PROTOBUF_VARIANT}}-${{env.PROTOBUF_VERSION}}.tar.gz && tar xzvf protobuf${{env.PROTOBUF_VARIANT}}-${{env.PROTOBUF_VERSION}}.tar.gz
4656

47-
- name: Build ProtoBuf
48-
if: steps.cache-depends.outputs.cache-hit != 'true'
49-
working-directory: protobuf-3.20.1
57+
- name: Download Abseil ${{ env.ABSEIL_VERSION }}
58+
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == ''
59+
run: curl -OL https://github.com/abseil/abseil-cpp/archive/refs/tags/${{env.ABSEIL_VERSION}}.tar.gz && tar xzvf ${{env.ABSEIL_VERSION}}.tar.gz && rm -rf protobuf-${{env.PROTOBUF_VERSION}}/third_party/abseil-cpp && mv abseil-cpp-${{env.ABSEIL_VERSION}} protobuf-${{env.PROTOBUF_VERSION}}/third_party/abseil-cpp
60+
61+
- name: Build ProtoBuf ${{ env.PROTOBUF_VERSION }} via autotools
62+
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == '-all'
63+
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
5064
run: ./configure DIST_LANG=cpp --prefix=/usr && make
5165

52-
- name: Install ProtoBuf
53-
working-directory: protobuf-3.20.1
66+
- name: Build ProtoBuf ${{ env.PROTOBUF_VERSION }} via cmake
67+
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == ''
68+
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
69+
run: cmake -DCMAKE_CXX_STANDARD=17 -Dprotobuf_BUILD_SHARED_LIBS=ON -Dprotobuf_BUILD_TESTS=OFF . && cmake --build . --config Release -j 4
70+
71+
- name: Install ProtoBuf ${{ env.PROTOBUF_VERSION }}
72+
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
5473
run: sudo make install && sudo ldconfig
5574

5675
- name: Install proto2cpp
@@ -62,17 +81,17 @@ jobs:
6281
# Versioning
6382
- name: Get versioning
6483
id: get_version
65-
run: echo ::set-output name=VERSION::$(git describe --always)
84+
run: echo "VERSION=$(git describe --always)" >> $GITHUB_OUTPUT
6685

6786
- name: Prepare Documentation Build
6887
run: |
6988
sed -i 's/PROJECT_NUMBER\s*= @VERSION_MAJOR@.@VERSION_MINOR@.@VERSION_PATCH@/PROJECT_NUMBER = master (${{ steps.get_version.outputs.VERSION }})/g' doxygen_config.cmake.in
70-
echo "EXCLUDE_PATTERNS = */osi3/* */protobuf-3.20.1/* */proto2cpp/* */flatbuffers/*" >> doxygen_config.cmake.in
89+
echo "EXCLUDE_PATTERNS = */osi3/* */protobuf-*/* */proto2cpp/* */flatbuffers/*" >> doxygen_config.cmake.in
7190
echo "GENERATE_TREEVIEW = YES" >> doxygen_config.cmake.in
7291
7392
- name: Configure C++ Build
7493
working-directory: build
75-
run: cmake -D FILTER_PROTO2CPP_PY_PATH=$GITHUB_WORKSPACE/proto2cpp ..
94+
run: cmake -D FILTER_PROTO2CPP_PY_PATH=$GITHUB_WORKSPACE/proto2cpp ${{ env.PROTOBUF_VARIANT =='' && '-DCMAKE_CXX_STANDARD=17' }} ..
7695

7796
- name: Build C++
7897
working-directory: build
@@ -89,7 +108,7 @@ jobs:
89108

90109
- name: Archive Documentation
91110
if: ${{ github.event_name == 'pull_request' }}
92-
uses: actions/upload-artifact@v2
111+
uses: actions/upload-artifact@v4
93112
with:
94113
name: linux64-doc
95114
path: doc/html
@@ -109,36 +128,45 @@ jobs:
109128

110129
steps:
111130
- name: Checkout OSI
112-
uses: actions/checkout@v2
131+
uses: actions/checkout@v4
113132
with:
114133
submodules: true
115134

116135
- name: Setup Python
117-
uses: actions/setup-python@v2
136+
uses: actions/setup-python@v5
118137
with:
119-
python-version: '3.7'
138+
python-version: '3.8'
120139

121140
- name: Install Python Dependencies
122141
run: python -m pip install --upgrade pip setuptools wheel pyyaml
123142

124143
- name: Cache Dependencies
125144
id: cache-depends
126-
uses: actions/cache@v2
145+
uses: actions/cache@v3
127146
with:
128-
path: protobuf-3.20.1
147+
path: protobuf-${{ env.PROTOBUF_VERSION }}
129148
key: ${{ runner.os }}-v2-depends
130149

131-
- name: Download ProtoBuf
150+
- name: Download ProtoBuf ${{ env.PROTOBUF_VERSION }}
132151
if: steps.cache-depends.outputs.cache-hit != 'true'
133-
run: curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v3.20.1/protobuf-all-3.20.1.tar.gz && tar xzvf protobuf-all-3.20.1.tar.gz
152+
run: curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v${{env.PROTOBUF_VERSION}}/protobuf${{env.PROTOBUF_VARIANT}}-${{env.PROTOBUF_VERSION}}.tar.gz && tar xzvf protobuf${{env.PROTOBUF_VARIANT}}-${{env.PROTOBUF_VERSION}}.tar.gz
134153

135-
- name: Build ProtoBuf
136-
if: steps.cache-depends.outputs.cache-hit != 'true'
137-
working-directory: protobuf-3.20.1
154+
- name: Download Abseil ${{ env.ABSEIL_VERSION }}
155+
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == ''
156+
run: curl -OL https://github.com/abseil/abseil-cpp/archive/refs/tags/${{env.ABSEIL_VERSION}}.tar.gz && tar xzvf ${{env.ABSEIL_VERSION}}.tar.gz && rm -rf protobuf-${{env.PROTOBUF_VERSION}}/third_party/abseil-cpp && mv abseil-cpp-${{env.ABSEIL_VERSION}} protobuf-${{env.PROTOBUF_VERSION}}/third_party/abseil-cpp
157+
158+
- name: Build ProtoBuf ${{ env.PROTOBUF_VERSION }} via autotools
159+
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == '-all'
160+
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
138161
run: ./configure DIST_LANG=cpp --prefix=/usr && make
139162

140-
- name: Install ProtoBuf
141-
working-directory: protobuf-3.20.1
163+
- name: Build ProtoBuf ${{ env.PROTOBUF_VERSION }} via cmake
164+
if: steps.cache-depends.outputs.cache-hit != 'true' && env.PROTOBUF_VARIANT == ''
165+
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
166+
run: cmake -DCMAKE_CXX_STANDARD=17 -Dprotobuf_BUILD_SHARED_LIBS=ON -Dprotobuf_BUILD_TESTS=OFF . && cmake --build . --config Release -j 4
167+
168+
- name: Install ProtoBuf ${{ env.PROTOBUF_VERSION }}
169+
working-directory: protobuf-${{ env.PROTOBUF_VERSION }}
142170
run: sudo make install && sudo ldconfig
143171

144172
- name: Prepare C++ Build
@@ -151,7 +179,7 @@ jobs:
151179
152180
- name: Configure C++ Build
153181
working-directory: build
154-
run: cmake ..
182+
run: cmake ${{ env.PROTOBUF_VARIANT =='' && '-DCMAKE_CXX_STANDARD=17' }} ..
155183

156184
- name: Build C++
157185
working-directory: build

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,8 @@ compile_commands.json
3232

3333
# Python-generated files
3434
__pycache__/
35+
.venv/
36+
venv/
3537
*.py[cod]
3638
proto2cpp.log
3739
.clang-format

CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR)
99
endif()
1010

1111
# Set the C++ standard
12-
set(CMAKE_CXX_STANDARD 11)
12+
set(CMAKE_CXX_STANDARD 11 CACHE STRING "C++ standard to be used")
1313
set(CMAKE_CXX_STANDARD_REQUIRED ON)
1414

1515
# Optional Flatbuffer support

format/OSITrace.py

Lines changed: 44 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,10 @@
1010
from osi3.osi_groundtruth_pb2 import GroundTruth
1111
from osi3.osi_sensordata_pb2 import SensorData
1212
import warnings
13-
warnings.simplefilter('default')
1413

15-
SEPARATOR = b'$$__$$'
14+
warnings.simplefilter("default")
15+
16+
SEPARATOR = b"$$__$$"
1617
SEPARATOR_LENGTH = len(SEPARATOR)
1718
BUFFER_SIZE = 1000000
1819

@@ -31,7 +32,7 @@ def get_size_from_file_stream(file_object):
3132
MESSAGES_TYPE = {
3233
"SensorView": SensorView,
3334
"GroundTruth": GroundTruth,
34-
"SensorData": SensorData
35+
"SensorData": SensorData,
3536
}
3637

3738

@@ -49,15 +50,15 @@ def __init__(self, path=None, type_name="SensorView"):
4950
def from_file(self, path, type_name="SensorView", max_index=-1, format_type=None):
5051
"""Import a scenario from a file"""
5152

52-
if path.lower().endswith(('.lzma', '.xz')):
53+
if path.lower().endswith((".lzma", ".xz")):
5354
self.scenario_file = lzma.open(path, "rb")
5455
else:
5556
self.scenario_file = open(path, "rb")
5657

5758
self.type_name = type_name
5859
self.format_type = format_type
5960

60-
if self.format_type == 'separated':
61+
if self.format_type == "separated":
6162
# warnings.warn("The separated trace files will be completely removed in the near future. Please convert them to *.osi files with the converter in the main OSI repository.", PendingDeprecationWarning)
6263
self.timestep_count = self.retrieve_message_offsets(max_index)
6364
else:
@@ -73,7 +74,7 @@ def retrieve_message_offsets(self, max_index):
7374
scenario_size = get_size_from_file_stream(self.scenario_file)
7475

7576
if max_index == -1:
76-
max_index = float('inf')
77+
max_index = float("inf")
7778

7879
buffer_deque = deque(maxlen=2)
7980

@@ -100,7 +101,7 @@ def retrieve_message_offsets(self, max_index):
100101
self.scenario_file.seek(message_offset)
101102

102103
while eof and found != -1:
103-
buffer = buffer[found + SEPARATOR_LENGTH:]
104+
buffer = buffer[found + SEPARATOR_LENGTH :]
104105
found = buffer.find(SEPARATOR)
105106

106107
buffer_offset = scenario_size - len(buffer)
@@ -126,7 +127,7 @@ def retrieve_message(self):
126127
self.message_offsets = [0]
127128
eof = False
128129

129-
# TODO Implement buffering for the scenarios
130+
# TODO Implement buffering for the scenarios
130131
self.scenario_file.seek(0)
131132
serialized_message = self.scenario_file.read()
132133
INT_LENGTH = len(struct.pack("<L", 0))
@@ -135,8 +136,12 @@ def retrieve_message(self):
135136
i = 0
136137
while i < len(serialized_message):
137138
message = MESSAGES_TYPE[self.type_name]()
138-
message_length = struct.unpack("<L", serialized_message[i:INT_LENGTH+i])[0]
139-
message.ParseFromString(serialized_message[i+INT_LENGTH:i+INT_LENGTH+message_length])
139+
message_length = struct.unpack(
140+
"<L", serialized_message[i : INT_LENGTH + i]
141+
)[0]
142+
message.ParseFromString(
143+
serialized_message[i + INT_LENGTH : i + INT_LENGTH + message_length]
144+
)
140145
i += message_length + INT_LENGTH
141146
self.message_offsets.append(i)
142147

@@ -153,7 +158,7 @@ def get_message_by_index(self, index):
153158
Get a message by its index. Try first to get it from the cache made
154159
by the method ``cache_messages_in_index_range``.
155160
"""
156-
return next(self.get_messages_in_index_range(index, index+1))
161+
return next(self.get_messages_in_index_range(index, index + 1))
157162

158163
def get_messages(self):
159164
return self.get_messages_in_index_range(0, len(self.message_offsets))
@@ -164,26 +169,28 @@ def get_messages_in_index_range(self, begin, end):
164169
"""
165170
self.scenario_file.seek(self.message_offsets[begin])
166171
abs_first_offset = self.message_offsets[begin]
167-
abs_last_offset = self.message_offsets[end] \
168-
if end < len(self.message_offsets) \
172+
abs_last_offset = (
173+
self.message_offsets[end]
174+
if end < len(self.message_offsets)
169175
else self.retrieved_scenario_size
176+
)
170177

171178
rel_message_offsets = [
172179
abs_message_offset - abs_first_offset
173180
for abs_message_offset in self.message_offsets[begin:end]
174181
]
175182

176183
if self.format_type == "separated":
177-
message_sequence_len = abs_last_offset - \
178-
abs_first_offset - SEPARATOR_LENGTH
179-
serialized_messages_extract = self.scenario_file.read(
180-
message_sequence_len)
184+
message_sequence_len = abs_last_offset - abs_first_offset - SEPARATOR_LENGTH
185+
serialized_messages_extract = self.scenario_file.read(message_sequence_len)
181186

182187
for rel_index, rel_message_offset in enumerate(rel_message_offsets):
183188
rel_begin = rel_message_offset
184-
rel_end = rel_message_offsets[rel_index + 1] - SEPARATOR_LENGTH \
185-
if rel_index + 1 < len(rel_message_offsets) \
189+
rel_end = (
190+
rel_message_offsets[rel_index + 1] - SEPARATOR_LENGTH
191+
if rel_index + 1 < len(rel_message_offsets)
186192
else message_sequence_len
193+
)
187194
message = MESSAGES_TYPE[self.type_name]()
188195
serialized_message = serialized_messages_extract[rel_begin:rel_end]
189196
message.ParseFromString(serialized_message)
@@ -212,27 +219,35 @@ def get_messages_in_index_range(self, begin, end):
212219

213220
def make_readable(self, name, interval=None, index=None):
214221
self.scenario_file.seek(0)
215-
serialized_message = self.scenario_file.read()
222+
serialized_message = self.scenario_file.read()
216223
message_length = len(serialized_message)
217224

218225
if message_length > 1000000000:
219226
# Throw a warning if trace file is bigger than 1GB
220-
gb_size_input = round(message_length/1000000000, 2)
221-
gb_size_output = round(3.307692308*message_length/1000000000, 2)
222-
warnings.warn(f"The trace file you are trying to make readable has the size {gb_size_input}GB. This will generate a readable file with the size {gb_size_output}GB. Make sure you have enough disc space and memory to read the file with your text editor.", ResourceWarning)
223-
224-
with open(name, 'a') as f:
225-
227+
gb_size_input = round(message_length / 1000000000, 2)
228+
gb_size_output = round(3.307692308 * message_length / 1000000000, 2)
229+
warnings.warn(
230+
f"The trace file you are trying to make readable has the size {gb_size_input}GB. This will generate a readable file with the size {gb_size_output}GB. Make sure you have enough disc space and memory to read the file with your text editor.",
231+
ResourceWarning,
232+
)
233+
234+
with open(name, "a") as f:
226235
if interval is None and index is None:
227236
for i in self.get_messages():
228237
f.write(str(i))
229-
238+
230239
if interval is not None and index is None:
231-
if type(interval) == tuple and len(interval) == 2 and interval[0]<interval[1]:
240+
if (
241+
type(interval) == tuple
242+
and len(interval) == 2
243+
and interval[0] < interval[1]
244+
):
232245
for i in self.get_messages_in_index_range(interval[0], interval[1]):
233246
f.write(str(i))
234247
else:
235-
raise Exception("Argument 'interval' needs to be a tuple of length 2! The first number must be smaller then the second.")
248+
raise Exception(
249+
"Argument 'interval' needs to be a tuple of length 2! The first number must be smaller then the second."
250+
)
236251

237252
if interval is None and index is not None:
238253
if type(index) == int:

0 commit comments

Comments
 (0)