Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "v2xflexstack"
version = "0.10.9"
version = "0.10.10"
authors = [
{ name = "Jordi Marias-i-Parella", email = "jordi.marias@i2cat.net" },
{ name = "Daniel Ulied Guevara", email = "daniel.ulied@i2cat.net" },
Expand Down
2 changes: 1 addition & 1 deletion src/flexstack/facilities/ca_basic_service/cam_asn1.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# pylint: skip-file
from flexstack.utils.asn1.etsi_its_cdd import ETSI_ITS_CDD_ASN1_DESCRIPTIONS
from ...utils.asn1.etsi_its_cdd import ETSI_ITS_CDD_ASN1_DESCRIPTIONS

CAM_ASN1_DESCRIPTIONS = (
ETSI_ITS_CDD_ASN1_DESCRIPTIONS
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,12 @@ class CABasicServiceLDM:
Time that the messages stored in the LDM will be mantained. (In milliseconds).
"""

def __init__(self, ldm: LDMFacility, access_permissions: tuple[AccessPermission, ...], time_validity: int):
def __init__(
self,
ldm: LDMFacility,
access_permissions: tuple[AccessPermission, ...],
time_validity: int,
):
self.logging = logging.getLogger("ca_basic_service")
self.ldm_if_ldm_3 = ldm.if_ldm_3
self.access_permissions = access_permissions
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def reception_callback(self, btp_indication: BTPDataIndication) -> None:
cam["utc_timestamp"] = utc_timestamp
if self.ca_basic_service_ldm is not None:
self.ca_basic_service_ldm.add_provider_data_to_ldm(cam)
self.logging.debug(
self.logging.info(
"Received CAM with timestamp: %s, station_id: %s",
cam["cam"]["generationDeltaTime"],
cam["header"]["stationId"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

This file implements the CAM Transmission Management required by the CAM Basic Service.
"""

from __future__ import annotations
from math import trunc
import logging
Expand All @@ -17,7 +18,7 @@
CommunicationProfile,
TrafficClass,
)
from ...utils.time_service import TimeService, ITS_EPOCH_MS, ELAPSED_MILLISECONDS
from ...utils.time_service import ITS_EPOCH_MS, ELAPSED_MILLISECONDS
from .cam_ldm_adaptation import CABasicServiceLDM

T_GEN_CAM_MIN = 100 # T_GenCamMin [in ms]
Expand Down Expand Up @@ -50,13 +51,16 @@ class VehicleData:
vehicle_width : int
Vehicle Width as specified in ETSI TS 102 894-2 V2.3.1 (2024-08).
"""

station_id: int = 0
station_type: int = 0
drive_direction: str = "unavailable"
vehicle_length: dict = field(default_factory=lambda: {
"vehicleLengthValue": 1023,
"vehicleLengthConfidenceIndication": "unavailable",
})
vehicle_length: dict = field(
default_factory=lambda: {
"vehicleLengthValue": 1023,
"vehicleLengthConfidenceIndication": "unavailable",
}
)
vehicle_width: int = 62

def __check_valid_station_id(self) -> None:
Expand All @@ -69,11 +73,13 @@ def __check_valid_station_type(self) -> None:

def __check_valid_drive_direction(self) -> None:
if self.drive_direction not in ["forward", "backward", "unavailable"]:
raise ValueError(
"Drive Direction must be forward, backward or unavailable")
raise ValueError("Drive Direction must be forward, backward or unavailable")

def __check_valid_vehicle_length(self) -> None:
if self.vehicle_length["vehicleLengthValue"] < 0 or self.vehicle_length["vehicleLengthValue"] > 1023:
if (
self.vehicle_length["vehicleLengthValue"] < 0
or self.vehicle_length["vehicleLengthValue"] > 1023
):
raise ValueError("Vehicle length must be between 0 and 1023")

def __check_valid_vehicle_width(self) -> None:
Expand Down Expand Up @@ -105,6 +111,7 @@ class GenerationDeltaTime:
msec : int
Time in milliseconds.
"""

msec: int = 0

@classmethod
Expand Down Expand Up @@ -138,12 +145,19 @@ def as_timestamp_in_certain_point(self, utc_timestamp_in_millis: int) -> float:
Timestamp of the generation delta time in milliseconds
"""
number_of_cycles = trunc(
(utc_timestamp_in_millis - ITS_EPOCH_MS + ELAPSED_MILLISECONDS) / 65536)
transformed_timestamp = self.msec + 65536 * \
number_of_cycles + ITS_EPOCH_MS - ELAPSED_MILLISECONDS
(utc_timestamp_in_millis - ITS_EPOCH_MS + ELAPSED_MILLISECONDS) / 65536
)
transformed_timestamp = (
self.msec + 65536 * number_of_cycles + ITS_EPOCH_MS - ELAPSED_MILLISECONDS
)
if transformed_timestamp <= utc_timestamp_in_millis:
return transformed_timestamp
return self.msec + 65536 * (number_of_cycles - 1) + ITS_EPOCH_MS - ELAPSED_MILLISECONDS
return (
self.msec
+ 65536 * (number_of_cycles - 1)
+ ITS_EPOCH_MS
- ELAPSED_MILLISECONDS
)

def __gt__(self, other: object) -> bool:
"""
Expand Down Expand Up @@ -208,8 +222,10 @@ class CooperativeAwarenessMessage:
All the CAM message in dict format as decoded by the CAMCoder.

"""

cam: dict = field(
default_factory=lambda: CooperativeAwarenessMessage.generate_white_cam_static())
default_factory=lambda: CooperativeAwarenessMessage.generate_white_cam_static()
)

@staticmethod
def generate_white_cam_static() -> dict:
Expand Down Expand Up @@ -307,7 +323,8 @@ def fullfill_gen_delta_time_with_tpv_data(self, tpv: dict) -> None:
"""
if "time" in tpv:
gen_delta_time = GenerationDeltaTime.from_timestamp(
parser.parse(tpv["time"]).timestamp())
parser.parse(tpv["time"]).timestamp()
)
self.cam["cam"]["generationDeltaTime"] = int(gen_delta_time.msec)

def fullfill_basic_container_with_tpv_data(self, tpv: dict) -> None:
Expand Down Expand Up @@ -362,7 +379,7 @@ def fullfill_high_frequency_container_with_tpv_data(self, tpv: dict) -> None:
if "track" in tpv.keys():
self.cam["cam"]["camParameters"]["highFrequencyContainer"][1]["heading"][
"headingValue"
] = int(tpv["track"]*10)
] = int(tpv["track"] * 10)
if "epd" in tpv.keys():
self.cam["cam"]["camParameters"]["highFrequencyContainer"][1]["heading"][
"headingConfidence"
Expand Down Expand Up @@ -622,11 +639,7 @@ def _send_cam(self, cam: CooperativeAwarenessMessage) -> None:
Send the next CAM.
"""
if self.ca_basic_service_ldm is not None:
cam_ldm = cam.cam.copy()
cam_ldm["utc_timestamp"] = TimeService.time()
self.ca_basic_service_ldm.add_provider_data_to_ldm(
cam.cam
)
self.ca_basic_service_ldm.add_provider_data_to_ldm(cam.cam)
data = self.cam_coder.encode(cam.cam)
request = BTPDataRequest(
btp_type=CommonNH.BTP_B,
Expand All @@ -639,7 +652,7 @@ def _send_cam(self, cam: CooperativeAwarenessMessage) -> None:
)

self.btp_router.btp_data_request(request)
self.logging.debug(
self.logging.info(
"Sent CAM message with timestamp: %d, station_id: %d",
cam.cam["cam"]["generationDeltaTime"],
cam.cam["header"]["stationId"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def reception_callback(self, btp_indication: BTPDataIndication) -> None:
"""
denm = self.denm_coder.decode(btp_indication.data)
self.feed_ldm(denm)
self.logging.debug(
self.logging.info(
"Received DENM with timestamp: %s, station_id: %s",
denm["denm"]["management"]["referenceTime"],
denm["header"]["stationId"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def transmit_denm(
length=len(data),
)
self.btp_router.btp_data_request(request)
self.logging.debug(
self.logging.info(
"Sent DENM with timestamp: %s, station_id: %s",
denm_to_send.denm["denm"]["management"]["referenceTime"],
denm_to_send.denm["header"]["stationId"],
Expand Down
28 changes: 20 additions & 8 deletions src/flexstack/facilities/local_dynamic_map/dictionary_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,19 +50,23 @@ def _create_query_search(self, query_with_attribute, operator, ref_value):
return OPERATOR_MAPPING[operator](query_with_attribute, ref_value)
raise ValueError(f"Invalid operator: {operator}")

def _filter_data(self, data_filter: Filter, database: list[dict]) -> tuple[dict, ...]:
def _filter_data(
self, data_filter: Filter, database: list[dict]
) -> tuple[dict, ...]:
list_of_data = []
if data_filter.filter_statement_2 is not None:
if str(data_filter.logical_operator) == "and":
for data in database:
if self._create_query_search(
self._get_nested(
data, str(data_filter.filter_statement_1.attribute)),
data, str(data_filter.filter_statement_1.attribute)
),
str(data_filter.filter_statement_1.operator),
data_filter.filter_statement_1.ref_value,
) & self._create_query_search(
self._get_nested(
data, str(data_filter.filter_statement_2.attribute)),
data, str(data_filter.filter_statement_2.attribute)
),
str(data_filter.filter_statement_2.operator),
data_filter.filter_statement_2.ref_value,
):
Expand All @@ -71,12 +75,14 @@ def _filter_data(self, data_filter: Filter, database: list[dict]) -> tuple[dict,
for data in database:
if self._create_query_search(
self._get_nested(
data, str(data_filter.filter_statement_1.attribute)),
data, str(data_filter.filter_statement_1.attribute)
),
str(data_filter.filter_statement_1.operator),
data_filter.filter_statement_1.ref_value,
) | self._create_query_search(
self._get_nested(
data, str(data_filter.filter_statement_2.attribute)),
data, str(data_filter.filter_statement_2.attribute)
),
str(data_filter.filter_statement_2.operator),
data_filter.filter_statement_2.ref_value,
):
Expand All @@ -85,7 +91,8 @@ def _filter_data(self, data_filter: Filter, database: list[dict]) -> tuple[dict,
for data in database:
if self._create_query_search(
self._get_nested(
data, str(data_filter.filter_statement_1.attribute)),
data, str(data_filter.filter_statement_1.attribute)
),
str(data_filter.filter_statement_1.operator),
data_filter.filter_statement_1.ref_value,
):
Expand All @@ -108,12 +115,17 @@ def search(self, data_request: RequestDataObjectsReq) -> tuple[dict, ...]:
"""
with self._lock:
if data_request.filter is None:
return RequestDataObjectsReq.filter_out_by_data_object_type(self.all(), data_request.data_object_type)
return tuple(
RequestDataObjectsReq.filter_out_by_data_object_type(
tuple(self.all()), data_request.data_object_type
)
)
try:
return self._filter_data(
data_request.filter,
RequestDataObjectsReq.filter_out_by_data_object_type(
tuple(self.all()), data_request.data_object_type),
tuple(self.all()), data_request.data_object_type
),
)
except KeyError as e:
print(f"[ListDatabase] KeyError searching data: {str(e)}")
Expand Down
Loading