From 6ae5708623b9bb6a1f7ddf04e3f8185773592561 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Sat, 14 Mar 2026 13:15:24 -0300 Subject: [PATCH 01/44] BUG: Fix hard-coded radius value for parachute added mass calculation (#889) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix hard-coded radius value for parachute added mass calculation Calculate radius from cd_s using a typical hemispherical parachute drag coefficient (1.4) when radius is not explicitly provided. This fixes drift distance calculations for smaller parachutes like drogues. Formula: R = sqrt(cd_s / (Cd * π)) Closes #860 Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Address code review: improve docstrings and add explicit None defaults Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Add CHANGELOG entry for PR #889 Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Update rocket.add_parachute to use radius=None for consistency Changed the default radius from 1.5 to None in the add_parachute method to match the Parachute class behavior. This ensures consistent automatic radius calculation from cd_s across both APIs. Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Refactor Parachute class to remove hard-coded radius value and introduce drag_coefficient parameter for radius estimation Fix hard-coded radius value for parachute added mass calculation Calculate radius from cd_s using a typical hemispherical parachute drag coefficient (1.4) when radius is not explicitly provided. This fixes drift distance calculations for smaller parachutes like drogues. Formula: R = sqrt(cd_s / (Cd * π)) Closes #860 Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Add CHANGELOG entry for PR #889 Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Refactor Parachute class to remove hard-coded radius value and introduce drag_coefficient parameter for radius estimation MNT: Extract noise initialization to fix pylint too-many-statements in Parachute.__init__ Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> * Refactor environment method access in controller test for clarity * fix pylint * fix comments * avoid breaking change with drag_coefficient * reafactors Parachute.__init__ method * fix tests --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Co-authored-by: Gui-FernandesBR --- CHANGELOG.md | 1 + rocketpy/rocket/parachute.py | 123 +++++++++++++++----- rocketpy/rocket/rocket.py | 32 +++-- rocketpy/stochastic/stochastic_parachute.py | 9 ++ tests/integration/simulation/test_flight.py | 87 +++++++------- tests/unit/rocket/test_parachute.py | 111 ++++++++++++++++++ 6 files changed, 277 insertions(+), 86 deletions(-) create mode 100644 tests/unit/rocket/test_parachute.py diff --git a/CHANGELOG.md b/CHANGELOG.md index cc5bc989e..e46ee3faa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -58,6 +58,7 @@ Attention: The newest changes should be on top --> ### Fixed +- BUG: Fix hard-coded radius value for parachute added mass calculation [#889](https://github.com/RocketPy-Team/RocketPy/pull/889) - DOC: Fix documentation build [#908](https://github.com/RocketPy-Team/RocketPy/pull/908) - BUG: energy_data plot not working for 3 dof sims [[#906](https://github.com/RocketPy-Team/RocketPy/issues/906)] - BUG: Fix CSV column header spacing in FlightDataExporter [#864](https://github.com/RocketPy-Team/RocketPy/issues/864) diff --git a/rocketpy/rocket/parachute.py b/rocketpy/rocket/parachute.py index 83b0ce0fd..4e0318d18 100644 --- a/rocketpy/rocket/parachute.py +++ b/rocketpy/rocket/parachute.py @@ -92,17 +92,25 @@ class Parachute: Function of noisy_pressure_signal. Parachute.clean_pressure_signal_function : Function Function of clean_pressure_signal. + Parachute.drag_coefficient : float + Drag coefficient of the inflated canopy shape, used only when + ``radius`` is not provided to estimate the parachute radius from + ``cd_s``: ``R = sqrt(cd_s / (drag_coefficient * pi))``. Typical + values: 1.4 for hemispherical canopies (default), 0.75 for flat + circular canopies, 1.5 for extended-skirt canopies. Parachute.radius : float Length of the non-unique semi-axis (radius) of the inflated hemispheroid - parachute in meters. - Parachute.height : float, None + parachute in meters. If not provided at construction time, it is + estimated from ``cd_s`` and ``drag_coefficient``. + Parachute.height : float Length of the unique semi-axis (height) of the inflated hemispheroid parachute in meters. Parachute.porosity : float - Geometric porosity of the canopy (ratio of open area to total canopy area), - in [0, 1]. Affects only the added-mass scaling during descent; it does - not change ``cd_s`` (drag). The default, 0.0432, yields an added-mass - of 1.0 (“neutral” behavior). + Geometric porosity of the canopy (ratio of open area to total canopy + area), in [0, 1]. Affects only the added-mass scaling during descent; + it does not change ``cd_s`` (drag). The default value of 0.0432 is + chosen so that the resulting ``added_mass_coefficient`` equals + approximately 1.0 ("neutral" added-mass behavior). Parachute.added_mass_coefficient : float Coefficient used to calculate the added-mass due to dragged air. It is calculated from the porosity of the parachute. @@ -116,9 +124,10 @@ def __init__( sampling_rate, lag=0, noise=(0, 0, 0), - radius=1.5, + radius=None, height=None, porosity=0.0432, + drag_coefficient=1.4, ): """Initializes Parachute class. @@ -172,25 +181,83 @@ def __init__( passed to the trigger function. Default value is ``(0, 0, 0)``. Units are in Pa. radius : float, optional - Length of the non-unique semi-axis (radius) of the inflated hemispheroid - parachute. Default value is 1.5. + Length of the non-unique semi-axis (radius) of the inflated + hemispheroid parachute. If not provided, it is estimated from + ``cd_s`` and ``drag_coefficient`` using: + ``radius = sqrt(cd_s / (drag_coefficient * pi))``. Units are in meters. height : float, optional Length of the unique semi-axis (height) of the inflated hemispheroid parachute. Default value is the radius of the parachute. Units are in meters. porosity : float, optional - Geometric porosity of the canopy (ratio of open area to total canopy area), - in [0, 1]. Affects only the added-mass scaling during descent; it does - not change ``cd_s`` (drag). The default, 0.0432, yields an added-mass - of 1.0 (“neutral” behavior). + Geometric porosity of the canopy (ratio of open area to total + canopy area), in [0, 1]. Affects only the added-mass scaling + during descent; it does not change ``cd_s`` (drag). The default + value of 0.0432 is chosen so that the resulting + ``added_mass_coefficient`` equals approximately 1.0 ("neutral" + added-mass behavior). + drag_coefficient : float, optional + Drag coefficient of the inflated canopy shape, used only when + ``radius`` is not provided. It relates the aerodynamic ``cd_s`` + to the physical canopy area via + ``cd_s = drag_coefficient * pi * radius**2``. Typical values: + + - **1.4** — hemispherical canopy (default, NASA SP-8066) + - **0.75** — flat circular canopy + - **1.5** — extended-skirt canopy + + Has no effect when ``radius`` is explicitly provided. """ + + # Save arguments as attributes self.name = name self.cd_s = cd_s self.trigger = trigger self.sampling_rate = sampling_rate self.lag = lag self.noise = noise + self.drag_coefficient = drag_coefficient + self.porosity = porosity + + # Initialize derived attributes + self.radius = self.__resolve_radius(radius, cd_s, drag_coefficient) + self.height = self.__resolve_height(height, self.radius) + self.added_mass_coefficient = self.__compute_added_mass_coefficient( + self.porosity + ) + self.__init_noise(noise) + self.__evaluate_trigger_function(trigger) + + # Prints and plots + self.prints = _ParachutePrints(self) + + def __resolve_radius(self, radius, cd_s, drag_coefficient): + """Resolves parachute radius from input or aerodynamic relation.""" + if radius is not None: + return radius + + # cd_s = Cd * S = Cd * pi * R^2 => R = sqrt(cd_s / (Cd * pi)) + return np.sqrt(cd_s / (drag_coefficient * np.pi)) + + def __resolve_height(self, height, radius): + """Resolves parachute height defaulting to radius when not provided.""" + return height or radius + + def __compute_added_mass_coefficient(self, porosity): + """Computes the added-mass coefficient from canopy porosity.""" + return 1.068 * ( + 1 - 1.465 * porosity - 0.25975 * porosity**2 + 1.2626 * porosity**3 + ) + + def __init_noise(self, noise): + """Initializes all noise-related attributes. + + Parameters + ---------- + noise : tuple, list + List in the format (mean, standard deviation, time-correlation). + """ self.noise_signal = [[-1e-6, np.random.normal(noise[0], noise[1])]] self.noisy_pressure_signal = [] self.clean_pressure_signal = [] @@ -200,32 +267,19 @@ def __init__( self.clean_pressure_signal_function = Function(0) self.noisy_pressure_signal_function = Function(0) self.noise_signal_function = Function(0) - self.radius = radius - self.height = height or radius - self.porosity = porosity - self.added_mass_coefficient = 1.068 * ( - 1 - - 1.465 * self.porosity - - 0.25975 * self.porosity**2 - + 1.2626 * self.porosity**3 - ) - alpha, beta = self.noise_corr self.noise_function = lambda: ( alpha * self.noise_signal[-1][1] + beta * np.random.normal(noise[0], noise[1]) ) - self.prints = _ParachutePrints(self) - - self.__evaluate_trigger_function(trigger) - def __evaluate_trigger_function(self, trigger): """This is used to set the triggerfunc attribute that will be used to interact with the Flight class. """ # pylint: disable=unused-argument, function-redefined - # The parachute is deployed by a custom function + + # Case 1: The parachute is deployed by a custom function if callable(trigger): # work around for having added sensors to parachute triggers # to avoid breaking changes @@ -238,9 +292,10 @@ def triggerfunc(p, h, y, sensors): self.triggerfunc = triggerfunc + # Case 2: The parachute is deployed at a given height elif isinstance(trigger, (int, float)): # The parachute is deployed at a given height - def triggerfunc(p, h, y, sensors): # pylint: disable=unused-argument + def triggerfunc(p, h, y, sensors): # p = pressure considering parachute noise signal # h = height above ground level considering parachute noise signal # y = [x, y, z, vx, vy, vz, e0, e1, e2, e3, w1, w2, w3] @@ -248,9 +303,10 @@ def triggerfunc(p, h, y, sensors): # pylint: disable=unused-argument self.triggerfunc = triggerfunc + # Case 3: The parachute is deployed at apogee elif trigger.lower() == "apogee": # The parachute is deployed at apogee - def triggerfunc(p, h, y, sensors): # pylint: disable=unused-argument + def triggerfunc(p, h, y, sensors): # p = pressure considering parachute noise signal # h = height above ground level considering parachute noise signal # y = [x, y, z, vx, vy, vz, e0, e1, e2, e3, w1, w2, w3] @@ -258,6 +314,7 @@ def triggerfunc(p, h, y, sensors): # pylint: disable=unused-argument self.triggerfunc = triggerfunc + # Case 4: Invalid trigger input else: raise ValueError( f"Unable to set the trigger function for parachute '{self.name}'. " @@ -289,7 +346,7 @@ def info(self): def all_info(self): """Prints all information about the Parachute class.""" self.info() - # self.plots.all() # Parachutes still doesn't have plots + # self.plots.all() # TODO: Parachutes still doesn't have plots def to_dict(self, **kwargs): allow_pickle = kwargs.get("allow_pickle", True) @@ -309,6 +366,7 @@ def to_dict(self, **kwargs): "lag": self.lag, "noise": self.noise, "radius": self.radius, + "drag_coefficient": self.drag_coefficient, "height": self.height, "porosity": self.porosity, } @@ -341,7 +399,8 @@ def from_dict(cls, data): sampling_rate=data["sampling_rate"], lag=data["lag"], noise=data["noise"], - radius=data.get("radius", 1.5), + radius=data.get("radius", None), + drag_coefficient=data.get("drag_coefficient", 1.4), height=data.get("height", None), porosity=data.get("porosity", 0.0432), ) diff --git a/rocketpy/rocket/rocket.py b/rocketpy/rocket/rocket.py index 86fa981a9..51719753d 100644 --- a/rocketpy/rocket/rocket.py +++ b/rocketpy/rocket/rocket.py @@ -1502,9 +1502,10 @@ def add_parachute( sampling_rate=100, lag=0, noise=(0, 0, 0), - radius=1.5, + radius=None, height=None, porosity=0.0432, + drag_coefficient=1.4, ): """Creates a new parachute, storing its parameters such as opening delay, drag coefficients and trigger function. @@ -1564,26 +1565,34 @@ def add_parachute( passed to the trigger function. Default value is (0, 0, 0). Units are in pascal. radius : float, optional - Length of the non-unique semi-axis (radius) of the inflated hemispheroid - parachute. Default value is 1.5. + Length of the non-unique semi-axis (radius) of the inflated + hemispheroid parachute. If not provided, it is estimated from + `cd_s` and `drag_coefficient` using: + `radius = sqrt(cd_s / (drag_coefficient * pi))`. Units are in meters. height : float, optional Length of the unique semi-axis (height) of the inflated hemispheroid parachute. Default value is the radius of the parachute. Units are in meters. porosity : float, optional - Geometric porosity of the canopy (ratio of open area to total canopy area), - in [0, 1]. Affects only the added-mass scaling during descent; it does - not change ``cd_s`` (drag). The default, 0.0432, yields an added-mass - of 1.0 (“neutral” behavior). + Geometric porosity of the canopy (ratio of open area to total + canopy area), in [0, 1]. Affects only the added-mass scaling + during descent; it does not change `cd_s` (drag). The default + value of 0.0432 yields an `added_mass_coefficient` of + approximately 1.0 ("neutral" added-mass behavior). + drag_coefficient : float, optional + Drag coefficient of the inflated canopy shape, used only when + `radius` is not provided. Typical values: 1.4 for hemispherical + canopies (default), 0.75 for flat circular canopies, 1.5 for + extended-skirt canopies. Has no effect when `radius` is given. Returns ------- parachute : Parachute - Parachute containing trigger, sampling_rate, lag, cd_s, noise, radius, - height, porosity and name. Furthermore, it stores clean_pressure_signal, - noise_signal and noisyPressureSignal which are filled in during - Flight simulation. + Parachute containing trigger, sampling_rate, lag, cd_s, noise, + radius, drag_coefficient, height, porosity and name. Furthermore, + it stores clean_pressure_signal, noise_signal and + noisyPressureSignal which are filled in during Flight simulation. """ parachute = Parachute( name, @@ -1595,6 +1604,7 @@ def add_parachute( radius, height, porosity, + drag_coefficient, ) self.parachutes.append(parachute) return self.parachutes[-1] diff --git a/rocketpy/stochastic/stochastic_parachute.py b/rocketpy/stochastic/stochastic_parachute.py index dea8a077d..038907187 100644 --- a/rocketpy/stochastic/stochastic_parachute.py +++ b/rocketpy/stochastic/stochastic_parachute.py @@ -31,6 +31,9 @@ class StochasticParachute(StochasticModel): List with the name of the parachute object. This cannot be randomized. radius : tuple, list, int, float Radius of the parachute in meters. + drag_coefficient : tuple, list, int, float + Drag coefficient of the inflated canopy shape, used only when + ``radius`` is not provided. height : tuple, list, int, float Height of the parachute in meters. porosity : tuple, list, int, float @@ -46,6 +49,7 @@ def __init__( lag=None, noise=None, radius=None, + drag_coefficient=None, height=None, porosity=None, ): @@ -74,6 +78,9 @@ def __init__( time-correlation). radius : tuple, list, int, float Radius of the parachute in meters. + drag_coefficient : tuple, list, int, float + Drag coefficient of the inflated canopy shape, used only when + ``radius`` is not provided. height : tuple, list, int, float Height of the parachute in meters. porosity : tuple, list, int, float @@ -86,6 +93,7 @@ def __init__( self.lag = lag self.noise = noise self.radius = radius + self.drag_coefficient = drag_coefficient self.height = height self.porosity = porosity @@ -100,6 +108,7 @@ def __init__( noise=noise, name=None, radius=radius, + drag_coefficient=drag_coefficient, height=height, porosity=porosity, ) diff --git a/tests/integration/simulation/test_flight.py b/tests/integration/simulation/test_flight.py index 7e25a8927..66f0848a4 100644 --- a/tests/integration/simulation/test_flight.py +++ b/tests/integration/simulation/test_flight.py @@ -717,6 +717,48 @@ def invalid_controller_9_params( # pylint: disable=unused-argument ) +def make_controller_test_environment_access(methods_called): + def _call_env_methods(environment, altitude_asl): + _ = environment.elevation + methods_called["elevation"] = True + _ = environment.wind_velocity_x(altitude_asl) + methods_called["wind_velocity_x"] = True + _ = environment.wind_velocity_y(altitude_asl) + methods_called["wind_velocity_y"] = True + _ = environment.speed_of_sound(altitude_asl) + methods_called["speed_of_sound"] = True + _ = environment.pressure(altitude_asl) + methods_called["pressure"] = True + _ = environment.temperature(altitude_asl) + methods_called["temperature"] = True + + def controller( # pylint: disable=unused-argument + time, + sampling_rate, + state, + state_history, + observed_variables, + air_brakes, + sensors, + environment, + ): + """Controller that tests access to various environment methods.""" + altitude_asl = state[2] + + if time < 3.9: + return None + + try: + _call_env_methods(environment, altitude_asl) + air_brakes.deployment_level = 0.3 + except AttributeError as e: + raise AssertionError(f"Environment method not accessible: {e}") from e + + return (time, air_brakes.deployment_level) + + return controller + + def test_environment_methods_accessible_in_controller( calisto_robust, example_plain_env ): @@ -742,54 +784,13 @@ def test_environment_methods_accessible_in_controller( "temperature": False, } - def controller_test_environment_access( # pylint: disable=unused-argument - time, - sampling_rate, - state, - state_history, - observed_variables, - air_brakes, - sensors, - environment, - ): - """Controller that tests access to various environment methods.""" - altitude_asl = state[2] - - if time < 3.9: - return None - - # Test accessing various environment methods - try: - _ = environment.elevation - methods_called["elevation"] = True - - _ = environment.wind_velocity_x(altitude_asl) - methods_called["wind_velocity_x"] = True - - _ = environment.wind_velocity_y(altitude_asl) - methods_called["wind_velocity_y"] = True - - _ = environment.speed_of_sound(altitude_asl) - methods_called["speed_of_sound"] = True - - _ = environment.pressure(altitude_asl) - methods_called["pressure"] = True - - _ = environment.temperature(altitude_asl) - methods_called["temperature"] = True - - air_brakes.deployment_level = 0.3 - except AttributeError as e: - # If any method is not accessible, the test should fail - raise AssertionError(f"Environment method not accessible: {e}") from e - - return (time, air_brakes.deployment_level) + controller = make_controller_test_environment_access(methods_called) # Add air brakes with environment-testing controller calisto_robust.parachutes = [] calisto_robust.add_air_brakes( drag_coefficient_curve="data/rockets/calisto/air_brakes_cd.csv", - controller_function=controller_test_environment_access, + controller_function=controller, sampling_rate=10, clamp=True, ) diff --git a/tests/unit/rocket/test_parachute.py b/tests/unit/rocket/test_parachute.py new file mode 100644 index 000000000..e193b777b --- /dev/null +++ b/tests/unit/rocket/test_parachute.py @@ -0,0 +1,111 @@ +"""Unit tests for the Parachute class, focusing on the radius and +drag_coefficient parameters introduced in PR #889.""" + +import numpy as np +import pytest + +from rocketpy import Parachute + + +def _make_parachute(**kwargs): + defaults = { + "name": "test", + "cd_s": 10.0, + "trigger": "apogee", + "sampling_rate": 100, + } + defaults.update(kwargs) + return Parachute(**defaults) + + +class TestParachuteRadiusEstimation: + """Tests for auto-computed radius from cd_s and drag_coefficient.""" + + def test_radius_auto_computed_from_cd_s_default_drag_coefficient(self): + """When radius is not provided the radius is estimated using the + default drag_coefficient of 1.4 and the formula R = sqrt(cd_s / (Cd * pi)).""" + cd_s = 10.0 + parachute = _make_parachute(cd_s=cd_s) + expected_radius = np.sqrt(cd_s / (1.4 * np.pi)) + assert parachute.radius == pytest.approx(expected_radius, rel=1e-9) + + def test_radius_auto_computed_uses_custom_drag_coefficient(self): + """When drag_coefficient is provided and radius is not, the radius + must be estimated using the given drag_coefficient.""" + cd_s = 10.0 + custom_cd = 0.75 + parachute = _make_parachute(cd_s=cd_s, drag_coefficient=custom_cd) + expected_radius = np.sqrt(cd_s / (custom_cd * np.pi)) + assert parachute.radius == pytest.approx(expected_radius, rel=1e-9) + + def test_explicit_radius_overrides_estimation(self): + """When radius is explicitly provided, it must be used directly and + drag_coefficient must be ignored for the radius calculation.""" + explicit_radius = 2.5 + parachute = _make_parachute(radius=explicit_radius, drag_coefficient=0.5) + assert parachute.radius == explicit_radius + + def test_drag_coefficient_stored_on_instance(self): + """drag_coefficient must be stored as an attribute regardless of + whether radius is provided or not.""" + parachute = _make_parachute(drag_coefficient=0.75) + assert parachute.drag_coefficient == 0.75 + + def test_drag_coefficient_default_is_1_4(self): + """Default drag_coefficient must be 1.4 for backward compatibility.""" + parachute = _make_parachute() + assert parachute.drag_coefficient == pytest.approx(1.4) + + def test_drogue_radius_smaller_than_main(self): + """A drogue (cd_s=1.0) must have a smaller radius than a main (cd_s=10.0) + when using the same drag_coefficient.""" + main = _make_parachute(cd_s=10.0) + drogue = _make_parachute(cd_s=1.0) + assert drogue.radius < main.radius + + def test_drogue_radius_approximately_0_48(self): + """For cd_s=1.0 and drag_coefficient=1.4, the estimated radius + must be approximately 0.48 m (fixes the previous hard-coded 1.5 m).""" + drogue = _make_parachute(cd_s=1.0) + assert drogue.radius == pytest.approx(0.476, abs=1e-3) + + def test_main_radius_approximately_1_51(self): + """For cd_s=10.0 and drag_coefficient=1.4, the estimated radius + must be approximately 1.51 m, matching the old hard-coded value.""" + main = _make_parachute(cd_s=10.0) + assert main.radius == pytest.approx(1.508, abs=1e-3) + + +class TestParachuteSerialization: + """Tests for to_dict / from_dict round-trip including drag_coefficient.""" + + def test_to_dict_includes_drag_coefficient(self): + """to_dict must include the drag_coefficient key.""" + parachute = _make_parachute(drag_coefficient=0.75) + data = parachute.to_dict() + assert "drag_coefficient" in data + assert data["drag_coefficient"] == 0.75 + + def test_from_dict_round_trip_preserves_drag_coefficient(self): + """A Parachute serialized to dict and restored must have the same + drag_coefficient.""" + original = _make_parachute(cd_s=5.0, drag_coefficient=0.75) + data = original.to_dict() + restored = Parachute.from_dict(data) + assert restored.drag_coefficient == pytest.approx(0.75) + assert restored.radius == pytest.approx(original.radius, rel=1e-9) + + def test_from_dict_defaults_drag_coefficient_to_1_4_when_absent(self): + """Dicts serialized before drag_coefficient was added (no key) must + fall back to 1.4 for backward compatibility.""" + data = { + "name": "legacy", + "cd_s": 10.0, + "trigger": "apogee", + "sampling_rate": 100, + "lag": 0, + "noise": (0, 0, 0), + # no drag_coefficient key — simulates old serialized data + } + parachute = Parachute.from_dict(data) + assert parachute.drag_coefficient == pytest.approx(1.4) From ab6dc3df4fe52597ccceb42ffc4c363c2069e104 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Wed, 18 Mar 2026 22:28:37 -0300 Subject: [PATCH 02/44] ENH: get changes from BUG: All NOAA NOMADS Dependent Atmosphere Models Broken Fixes #933 --- CHANGELOG.md | 1 + .../environment/1-atm-models/ensemble.rst | 34 +- .../environment/1-atm-models/forecast.rst | 40 +- .../environment/1-atm-models/soundings.rst | 25 +- .../1-atm-models/standard_atmosphere.rst | 4 +- .../user/environment/3-further/other_apis.rst | 96 +++- rocketpy/environment/environment.py | 504 ++++++++++-------- rocketpy/environment/fetchers.py | 131 +++-- rocketpy/environment/tools.py | 216 ++++++-- rocketpy/environment/weather_model_mapping.py | 169 +++++- .../environment/test_environment.py | 23 +- tests/unit/environment/test_environment.py | 68 +++ 12 files changed, 919 insertions(+), 392 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c19f579f..d71dbb101 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -71,6 +71,7 @@ Attention: The newest changes should be on top --> ### Fixed +- BUG: Migrate Forecasts to UCAR THREDDS [#943](https://github.com/RocketPy-Team/RocketPy/pull/943) - BUG: Fix hard-coded radius value for parachute added mass calculation [#889](https://github.com/RocketPy-Team/RocketPy/pull/889) - DOC: Fix documentation build [#908](https://github.com/RocketPy-Team/RocketPy/pull/908) - BUG: energy_data plot not working for 3 dof sims [[#906](https://github.com/RocketPy-Team/RocketPy/issues/906)] diff --git a/docs/user/environment/1-atm-models/ensemble.rst b/docs/user/environment/1-atm-models/ensemble.rst index 97c247f68..504cbfe60 100644 --- a/docs/user/environment/1-atm-models/ensemble.rst +++ b/docs/user/environment/1-atm-models/ensemble.rst @@ -1,3 +1,5 @@ +.. _ensemble_atmosphere: + Ensemble ======== @@ -21,7 +23,21 @@ Ensemble Forecast Global Ensemble Forecast System (GEFS) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The ``GEFS`` model is a global ensemble forecast model ... +.. danger:: + + **GEFS shortcut unavailable**: ``file="GEFS"`` is currently disabled in + RocketPy because NOMADS OPeNDAP is deactivated for this endpoint. + +.. note:: + + If you have a GEFS-compatible NetCDF or OPeNDAP dataset from another + provider (or a local copy), you can still load it explicitly by passing the + dataset path/URL in ``file`` and a compatible mapping in ``dictionary``. + + +The ``GEFS`` model is a global ensemble forecast system useful for uncertainty +analysis, but RocketPy's automatic ``file="GEFS"`` shortcut is temporarily +disabled. .. code-block:: python @@ -71,20 +87,16 @@ CMC Ensemble resulted in a change of the model's endpoint. Efforts are underway to \ restore access to the CMC Ensemble model as swiftly as possible. -.. code-block:: python +At the moment, there is no built-in ``file="CMC"`` shortcut in +``Environment.set_atmospheric_model``. - env_cmc = Environment( - date=date_info, - latitude=-21.960641, - longitude=-47.482122, - elevation=640, - ) - env_cmc.set_atmospheric_model(type="Ensemble", file="CMC") - env_cmc.all_info() +If you have a CMC-compatible NetCDF or OPeNDAP dataset, load it explicitly by +passing the dataset path/URL in ``file`` and a matching mapping dictionary in +``dictionary``. Ensemble Reanalysis ------------------- Ensemble reanalyses are also possible with RocketPy. See the -:ref:`reanalysis_ensemble` section for more information. +:ref:`reanalysis_ensemble` section for more information. \ No newline at end of file diff --git a/docs/user/environment/1-atm-models/forecast.rst b/docs/user/environment/1-atm-models/forecast.rst index c88c71ff2..ac91504e0 100644 --- a/docs/user/environment/1-atm-models/forecast.rst +++ b/docs/user/environment/1-atm-models/forecast.rst @@ -24,7 +24,7 @@ Global Forecast System (GFS) Using the latest forecast from GFS is simple. Set the atmospheric model to ``forecast`` and specify that GFS is the file you want. -Note that since data is downloaded from the NOMADS server, this line of code can +Note that since data is downloaded from a remote OPeNDAP server, this line of code can take longer than usual. .. jupyter-execute:: @@ -111,36 +111,15 @@ The same coordinates for SpacePort America will be used. High Resolution Window (HIRESW) ------------------------------- -The High Resolution Window (HIRESW) model is a sophisticated weather forecasting -system that operates at a high spatial resolution of approximately 3 km. -It utilizes two main dynamical cores: the Advanced Research WRF (WRF-ARW) and -the Finite Volume Cubed Sphere (FV3), each designed to enhance the accuracy of -weather predictions. +.. danger:: -You can easily set up HIRESW in RocketPy by specifying the date, latitude, and -longitude of your location. Let's use SpacePort America as an example. + **HIRESW shortcut unavailable**: ``file="HIRESW"`` is currently disabled in + RocketPy because NOMADS OPeNDAP is deactivated for this endpoint. -.. jupyter-execute:: - - env_hiresw = Environment( - date=tomorrow, - latitude=32.988528, - longitude=-106.975056, - ) +If you have a HIRESW-compatible dataset from another provider (or a local copy), +you can still load it explicitly by passing the path/URL in ``file`` and an +appropriate mapping in ``dictionary``. - env_hiresw.set_atmospheric_model( - type="Forecast", - file="HIRESW", - dictionary="HIRESW", - ) - - env_hiresw.plots.atmospheric_model() - -.. note:: - - The HRES model is updated every 12 hours, providing forecasts with a \ - resolution of 3 km. The model can predict weather conditions up to 48 hours \ - in advance. RocketPy uses the CONUS domain with ARW core. Using Windy Atmosphere @@ -248,6 +227,5 @@ Also, the servers may be down or may face high traffic. .. seealso:: - To see a complete list of available models on the NOAA's NOMADS server, visit - `NOMADS `_. - + To browse available NCEP model collections on UCAR THREDDS, visit + `THREDDS NCEP Catalog `_. \ No newline at end of file diff --git a/docs/user/environment/1-atm-models/soundings.rst b/docs/user/environment/1-atm-models/soundings.rst index 9a276477e..279750df5 100644 --- a/docs/user/environment/1-atm-models/soundings.rst +++ b/docs/user/environment/1-atm-models/soundings.rst @@ -57,31 +57,22 @@ This service allows users to download virtual soundings from numerical weather prediction models such as GFS, RAP, and NAM, and also real soundings from the Integrated Global Radiosonde Archive (IGRA). -These options can be retrieved as a text file in GSD format. -By generating such a file through the link above, the file's URL can be used to -import the atmospheric data into RocketPy. - -We will use the same sounding station as we did for the Wyoming Soundings. +These options can be retrieved as a text file in GSD format. However, +RocketPy no longer provides a dedicated ``set_atmospheric_model`` type for +NOAA RUC Soundings. .. note:: Select ROABs as the initial data source, specify the station through its \ WMO-ID, and opt for the ASCII (GSD format) button. -Initialize a new Environment instance: - -.. code-block:: python +If you need to use RUC-sounding-like data in RocketPy, convert it to one of the +supported workflows: - url = r"https://rucsoundings.noaa.gov/get_raobs.cgi?data_source=RAOB&latest=latest&start_year=2019&start_month_name=Feb&start_mday=5&start_hour=12&start_min=0&n_hrs=1.0&fcst_len=shortest&airport=83779&text=Ascii%20text%20%28GSD%20format%29&hydrometeors=false&start=latest" - - env = Environment() - env.set_atmospheric_model(type="NOAARucSounding", file=url) - env.plots.atmospheric_model() +- Use :ref:`custom_atmosphere` after parsing the text data. +- Use :ref:`reanalysis` or :ref:`forecast` with NetCDF/OPeNDAP sources. .. note:: The leading `r` in the URL string is used to indicate a raw string, which \ - is useful when dealing with backslashes in URLs. - - - + is useful when dealing with backslashes in URLs. \ No newline at end of file diff --git a/docs/user/environment/1-atm-models/standard_atmosphere.rst b/docs/user/environment/1-atm-models/standard_atmosphere.rst index 0c125dfd8..d6c1de782 100644 --- a/docs/user/environment/1-atm-models/standard_atmosphere.rst +++ b/docs/user/environment/1-atm-models/standard_atmosphere.rst @@ -1,3 +1,5 @@ +.. _standard_atmosphere: + Standard Atmosphere =================== @@ -29,4 +31,4 @@ The International Standard Atmosphere can also be reset at any time by using the .. jupyter-execute:: - env.set_atmospheric_model(type="standard_atmosphere") + env.set_atmospheric_model(type="standard_atmosphere") \ No newline at end of file diff --git a/docs/user/environment/3-further/other_apis.rst b/docs/user/environment/3-further/other_apis.rst index c70fd58f7..01d4b9a30 100644 --- a/docs/user/environment/3-further/other_apis.rst +++ b/docs/user/environment/3-further/other_apis.rst @@ -1,3 +1,5 @@ +.. _environment_other_apis: + Connecting to other APIs ======================== @@ -25,14 +27,19 @@ the following dimensions and variables: - Latitude - Longitude - Pressure Levels +- Temperature (as a function of Time, Pressure Levels, Latitude and Longitude) - Geopotential Height (as a function of Time, Pressure Levels, Latitude and Longitude) +- or Geopotential (as a function of Time, Pressure Levels, Latitude and Longitude) - Surface Geopotential Height (as a function of Time, Latitude and Longitude) + (optional) - Wind - U Component (as a function of Time, Pressure Levels, Latitude and Longitude) - Wind - V Component (as a function of Time, Pressure Levels, Latitude and Longitude) +Some projected grids also require a ``projection`` key in the mapping. + -For example, let's imagine we want to use the HIRESW model from this endpoint: -`https://nomads.ncep.noaa.gov/dods/hiresw/ `_ +For example, let's imagine we want to use a forecast model available via an +OPeNDAP endpoint. Looking through the variable list in the link above, we find the following correspondence: @@ -72,15 +79,85 @@ Therefore, we can create an environment like this: dictionary=name_mapping, ) +Built-in mapping dictionaries +----------------------------- + +Instead of a custom dictionary, you can pass a built-in mapping name in the +``dictionary`` argument. Common options include: + +- ``"ECMWF"`` +- ``"ECMWF_v0"`` +- ``"NOAA"`` +- ``"GFS"`` +- ``"NAM"`` +- ``"RAP"`` +- ``"HIRESW"`` (mapping available; latest-model shortcut currently disabled) +- ``"GEFS"`` (mapping available; latest-model shortcut currently disabled) +- ``"MERRA2"`` +- ``"CMC"`` (for compatible datasets loaded explicitly) + +What a mapping name means +^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Base mapping names (for example ``"GFS"``, ``"NAM"`` and ``"RAP"``) map + RocketPy weather keys to the current default variable naming used by the + corresponding provider datasets. +- These defaults are aligned with current shortcut workflows (for example, + THREDDS-backed latest model sources) and may use projected coordinates + (``x``/``y`` plus ``projection``) depending on the model. + +Legacy mapping names +^^^^^^^^^^^^^^^^^^^^ + +If you are loading archived or older NOMADS-style datasets, use the explicit +legacy aliases: + +- ``"GFS_LEGACY"`` +- ``"NAM_LEGACY"`` +- ``"NOAA_LEGACY"`` +- ``"RAP_LEGACY"`` +- ``"CMC_LEGACY"`` +- ``"GEFS_LEGACY"`` +- ``"HIRESW_LEGACY"`` +- ``"MERRA2_LEGACY"`` + +Legacy aliases primarily cover older variable naming patterns such as +``lev``, ``tmpprs``, ``hgtprs``, ``ugrdprs`` and ``vgrdprs``. + +.. note:: + + Mapping names are case-insensitive. For example, + ``"gfs_legacy"`` and ``"GFS_LEGACY"`` are equivalent. + +For custom dictionaries, the canonical structure is: + +.. code-block:: python + + mapping = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "lev", + "temperature": "tmpprs", + "surface_geopotential_height": "hgtsfc", # optional + "geopotential_height": "hgtprs", # or geopotential + "geopotential": None, + "u_wind": "ugrdprs", + "v_wind": "vgrdprs", + } + +.. important:: + + Ensemble datasets require an additional key for member selection: + ``"ensemble": ""``. + .. caution:: - Notice the ``file`` argument were suppressed in the code above. This is because \ - the URL depends on the date you are running the simulation. For example, as \ - it for now, a possible link could be: https://nomads.ncep.noaa.gov/dods/hiresw/hiresw20240803/hiresw_conusfv3_12z \ - (for the 3rd of August, 2024, at 12:00 UTC). \ - You should replace the date in the URL with the date you are running the simulation. \ - Different models may have different URL structures, so be sure to check the \ - documentation of the model you are using. + The ``file`` argument was intentionally omitted in the example above. This is + because the URL depends on the provider, dataset, and date you are running + the simulation. Build the endpoint according to the provider specification + and always validate that the target service is active before running your + simulation workflow. Without OPeNDAP protocol @@ -94,4 +171,3 @@ Environment class, for example: - `Meteomatics `_: `#545 `_ - `Open-Meteo `_: `#520 `_ - diff --git a/rocketpy/environment/environment.py b/rocketpy/environment/environment.py index 6743b06ae..39441ecae 100644 --- a/rocketpy/environment/environment.py +++ b/rocketpy/environment/environment.py @@ -27,6 +27,7 @@ find_latitude_index, find_longitude_index, find_time_index, + geodesic_to_lambert_conformal, geodesic_to_utm, get_elevation_data_from_dataset, get_final_date_from_time_array, @@ -138,15 +139,15 @@ class Environment: Environment.atmospheric_model_type : string Describes the atmospheric model which is being used. Can only assume the following values: ``standard_atmosphere``, ``custom_atmosphere``, - ``wyoming_sounding``, ``Forecast``, ``Reanalysis``, - ``Ensemble``. + ``wyoming_sounding``, ``windy``, ``forecast``, ``reanalysis``, + ``ensemble``. Environment.atmospheric_model_file : string Address of the file used for the atmospheric model being used. Only - defined for ``wyoming_sounding``, ``Forecast``, - ``Reanalysis``, ``Ensemble`` + defined for ``wyoming_sounding``, ``windy``, ``forecast``, + ``reanalysis``, ``ensemble`` Environment.atmospheric_model_dict : dictionary Dictionary used to properly interpret ``netCDF`` and ``OPeNDAP`` files. - Only defined for ``Forecast``, ``Reanalysis``, ``Ensemble``. + Only defined for ``forecast``, ``reanalysis``, ``ensemble``. Environment.atmospheric_model_init_date : datetime Datetime object instance of first available date in ``netCDF`` and ``OPeNDAP`` files when using ``Forecast``, ``Reanalysis`` or @@ -295,21 +296,21 @@ def __init__( - :attr:`Environment.datetime_date`: UTC time of launch. - Must be given if a Forecast, Reanalysis - or Ensemble, will be set as an atmospheric model. + Must be given if a ``windy``, ``forecast``, ``reanalysis`` + or ``ensemble`` atmospheric model will be used. Default is None. See :meth:`Environment.set_date` for more information. latitude : float, optional Latitude in degrees (ranging from -90 to 90) of rocket - launch location. Must be given if a Forecast, Reanalysis - or Ensemble will be used as an atmospheric model or if + launch location. Must be given if a ``windy``, ``forecast``, + ``reanalysis`` or ``ensemble`` atmospheric model will be used or if Open-Elevation will be used to compute elevation. Positive values correspond to the North. Default value is 0, which corresponds to the equator. longitude : float, optional Longitude in degrees (ranging from -180 to 180) of rocket - launch location. Must be given if a Forecast, Reanalysis - or Ensemble will be used as an atmospheric model or if + launch location. Must be given if a ``windy``, ``forecast``, + ``reanalysis`` or ``ensemble`` atmospheric model will be used or if Open-Elevation will be used to compute elevation. Positive values correspond to the East. Default value is 0, which corresponds to the Greenwich Meridian. @@ -605,13 +606,81 @@ def __set_earth_rotation_vector(self): # Validators (used to verify an attribute is being set correctly.) + @staticmethod + def __dictionary_matches_dataset(dictionary, dataset): + """Check whether a mapping dictionary is compatible with a dataset.""" + variables = dataset.variables + required_keys = ( + "time", + "latitude", + "longitude", + "level", + "temperature", + "u_wind", + "v_wind", + ) + + for key in required_keys: + variable_name = dictionary.get(key) + if variable_name is None or variable_name not in variables: + return False + + projection_name = dictionary.get("projection") + if projection_name is not None and projection_name not in variables: + return False + + geopotential_height_name = dictionary.get("geopotential_height") + geopotential_name = dictionary.get("geopotential") + has_geopotential_height = ( + geopotential_height_name is not None + and geopotential_height_name in variables + ) + has_geopotential = ( + geopotential_name is not None and geopotential_name in variables + ) + + return has_geopotential_height or has_geopotential + + def __resolve_dictionary_for_dataset(self, dictionary, dataset): + """Resolve a compatible mapping dictionary for the loaded dataset. + + If the provided mapping is incompatible with the dataset variables, + this method tries built-in mappings and falls back to the first + compatible one. + """ + if self.__dictionary_matches_dataset(dictionary, dataset): + return dictionary + + for model_name, candidate in self.__weather_model_map.all_dictionaries.items(): + if self.__dictionary_matches_dataset(candidate, dataset): + warnings.warn( + "Provided weather mapping does not match dataset variables. " + f"Falling back to built-in mapping '{model_name}'." + ) + return candidate + + return dictionary + def __validate_dictionary(self, file, dictionary): # removed CMC until it is fixed. - available_models = ["GFS", "NAM", "RAP", "HIRESW", "GEFS", "ERA5", "MERRA2"] + available_models = [ + "GFS", + "NAM", + "RAP", + "HIRESW", + "GEFS", + "ERA5", + "MERRA2", + ] if isinstance(dictionary, str): dictionary = self.__weather_model_map.get(dictionary) - elif file in available_models: - dictionary = self.__weather_model_map.get(file) + elif isinstance(file, str): + matching_model = next( + (model for model in available_models if model.lower() == file.lower()), + None, + ) + if matching_model is not None: + dictionary = self.__weather_model_map.get(matching_model) if not isinstance(dictionary, dict): raise TypeError( "Please specify a dictionary or choose a valid model from the " @@ -1045,171 +1114,41 @@ def set_atmospheric_model( # pylint: disable=too-many-statements wind_u=0, wind_v=0, ): - """Defines an atmospheric model for the Environment. Supported - functionality includes using data from the `International Standard - Atmosphere`, importing data from weather reanalysis, forecasts and - ensemble forecasts, importing data from upper air soundings and - inputting data as custom functions, arrays or csv files. + """Define the atmospheric model for this Environment. Parameters ---------- type : string - One of the following options: - - - ``standard_atmosphere``: sets pressure and temperature profiles - corresponding to the International Standard Atmosphere defined by - ISO 2533 and ranging from -2 km to 80 km of altitude above sea - level. Note that the wind profiles are set to zero when this type - is chosen. - - - ``wyoming_sounding``: sets pressure, temperature, wind-u - and wind-v profiles and surface elevation obtained from - an upper air sounding given by the file parameter through - an URL. This URL should point to a data webpage given by - selecting plot type as text: list, a station and a time at - `weather.uwyo`_. - An example of a valid link would be: - - http://weather.uwyo.edu/cgi-bin/sounding?region=samer&TYPE=TEXT%3ALIST&YEAR=2019&MONTH=02&FROM=0200&TO=0200&STNM=82599 - - .. _weather.uwyo: http://weather.uwyo.edu/upperair/sounding.html - - - ``windy_atmosphere``: sets pressure, temperature, wind-u and - wind-v profiles and surface elevation obtained from the Windy API. - See file argument to specify the model as either ``ECMWF``, - ``GFS`` or ``ICON``. - - - ``Forecast``: sets pressure, temperature, wind-u and wind-v - profiles and surface elevation obtained from a weather forecast - file in ``netCDF`` format or from an ``OPeNDAP`` URL, both given - through the file parameter. When this type is chosen, the date - and location of the launch should already have been set through - the date and location parameters when initializing the - Environment. The ``netCDF`` and ``OPeNDAP`` datasets must contain - at least geopotential height or geopotential, temperature, wind-u - and wind-v profiles as a function of pressure levels. If surface - geopotential or geopotential height is given, elevation is also - set. Otherwise, elevation is not changed. Profiles are - interpolated bi-linearly using supplied latitude and longitude. - The date used is the nearest one to the date supplied. - Furthermore, a dictionary must be supplied through the dictionary - parameter in order for the dataset to be accurately read. Lastly, - the dataset must use a rectangular grid sorted in either ascending - or descending order of latitude and longitude. - - - ``Reanalysis``: sets pressure, temperature, wind-u and wind-v - profiles and surface elevation obtained from a weather forecast - file in ``netCDF`` format or from an ``OPeNDAP`` URL, both given - through the file parameter. When this type is chosen, the date and - location of the launch should already have been set through the - date and location parameters when initializing the Environment. - The ``netCDF`` and ``OPeNDAP`` datasets must contain at least - geopotential height or geopotential, temperature, wind-u and - wind-v profiles as a function of pressure levels. If surface - geopotential or geopotential height is given, elevation is also - set. Otherwise, elevation is not changed. Profiles are - interpolated bi-linearly using supplied latitude and longitude. - The date used is the nearest one to the date supplied. - Furthermore, a dictionary must be supplied through the dictionary - parameter in order for the dataset to be accurately read. Lastly, - the dataset must use a rectangular grid sorted in either ascending - or descending order of latitude and longitude. - - - ``Ensemble``: sets pressure, temperature, wind-u and wind-v - profiles and surface elevation obtained from a weather forecast - file in ``netCDF`` format or from an ``OPeNDAP`` URL, both given - through the file parameter. When this type is chosen, the date and - location of the launch should already have been set through the - date and location parameters when initializing the Environment. - The ``netCDF`` and ``OPeNDAP`` datasets must contain at least - geopotential height or geopotential, temperature, wind-u and - wind-v profiles as a function of pressure levels. If surface - geopotential or geopotential height is given, elevation is also - set. Otherwise, elevation is not changed. Profiles are - interpolated bi-linearly using supplied latitude and longitude. - The date used is the nearest one to the date supplied. - Furthermore, a dictionary must be supplied through the dictionary - parameter in order for the dataset to be accurately read. Lastly, - the dataset must use a rectangular grid sorted in either ascending - or descending order of latitude and longitude. By default the - first ensemble forecast is activated. - - .. seealso:: - - To activate other ensemble forecasts see - :meth:`rocketpy.Environment.select_ensemble_member`. - - - ``custom_atmosphere``: sets pressure, temperature, wind-u and - wind-v profiles given though the pressure, temperature, wind-u and - wind-v parameters of this method. If pressure or temperature is - not given, it will default to the `International Standard - Atmosphere`. If the wind components are not given, it will default - to 0. - - file : string, optional - String that must be given when type is either ``wyoming_sounding``, - ``Forecast``, ``Reanalysis``, ``Ensemble`` or ``Windy``. It - specifies the location of the data given, either through a local - file address or a URL. If type is ``Forecast``, this parameter can - also be either ``GFS``, ``FV3``, ``RAP`` or ``NAM`` for latest of - these forecasts. - - .. note:: - - Time reference for the Forecasts are: - - - ``GFS``: `Global` - 0.25deg resolution - Updates every 6 - hours, forecast for 81 points spaced by 3 hours - - ``RAP``: `Regional USA` - 0.19deg resolution - Updates hourly, - forecast for 40 points spaced hourly - - ``NAM``: `Regional CONUS Nest` - 5 km resolution - Updates - every 6 hours, forecast for 21 points spaced by 3 hours - - If type is ``Ensemble``, this parameter can also be ``GEFS`` - for the latest of this ensemble. - - .. note:: - - Time referece for the Ensembles are: - - - GEFS: Global, bias-corrected, 0.5deg resolution, 21 forecast - members, Updates every 6 hours, forecast for 65 points spaced - by 4 hours - - CMC (currently not available): Global, 0.5deg resolution, 21 \ - forecast members, Updates every 12 hours, forecast for 65 \ - points spaced by 4 hours - - If type is ``Windy``, this parameter can be either ``GFS``, - ``ECMWF``, ``ICON`` or ``ICONEU``. Default in this case is ``ECMWF``. - dictionary : dictionary, string, optional - Dictionary that must be given when type is either ``Forecast``, - ``Reanalysis`` or ``Ensemble``. It specifies the dictionary to be - used when reading ``netCDF`` and ``OPeNDAP`` files, allowing the - correct retrieval of data. Acceptable values include ``ECMWF``, - ``NOAA``, ``UCAR`` and ``MERRA2`` for default dictionaries which can generally - be used to read datasets from these institutes. Alternatively, a - dictionary structure can also be given, specifying the short names - used for time, latitude, longitude, pressure levels, temperature - profile, geopotential or geopotential height profile, wind-u and - wind-v profiles in the dataset given in the file parameter. - Additionally, ensemble dictionaries must have the ensemble as well. - An example is the following dictionary, used for ``NOAA``: - - .. code-block:: python - - dictionary = { - "time": "time", - "latitude": "lat", - "longitude": "lon", - "level": "lev", - "ensemble": "ens", - "temperature": "tmpprs", - "surface_geopotential_height": "hgtsfc", - "geopotential_height": "hgtprs", - "geopotential": None, - "u_wind": "ugrdprs", - "v_wind": "vgrdprs", - } + Atmospheric model selector (case-insensitive). Accepted values are + ``"standard_atmosphere"``, ``"wyoming_sounding"``, ``"windy"``, + ``"forecast"``, ``"reanalysis"``, ``"ensemble"`` and + ``"custom_atmosphere"``. + file : string | netCDF4.Dataset, optional + Data source or model shortcut. Meaning depends on ``type``: + + - ``"standard_atmosphere"`` and ``"custom_atmosphere"``: ignored. + - ``"wyoming_sounding"``: URL of the sounding text page. + - ``"windy"``: one of ``"ECMWF"``, ``"GFS"``, ``"ICON"`` or + ``"ICONEU"``. + - ``"forecast"``: local path, OPeNDAP URL, open + ``netCDF4.Dataset``, or one of ``"GFS"``, ``"NAM"`` or ``"RAP"`` + for the latest available forecast. + - ``"reanalysis"``: local path, OPeNDAP URL, or open + ``netCDF4.Dataset``. + - ``"ensemble"``: local path, OPeNDAP URL, open + ``netCDF4.Dataset``, or ``"GEFS"`` for the latest available + forecast. + dictionary : dict | str, optional + Variable-name mapping for ``"forecast"``, ``"reanalysis"`` and + ``"ensemble"``. It may be a custom dictionary or a built-in + mapping name (for example: ``"ECMWF"``, ``"ECMWF_v0"``, + ``"NOAA"``, ``"GFS"``, ``"NAM"``, ``"RAP"``, ``"HIRESW"``, + ``"GEFS"``, ``"MERRA2"`` or ``"CMC"``). + + If ``dictionary`` is omitted and ``file`` is one of RocketPy's + latest-model shortcuts, the matching built-in mapping is selected + automatically. For ensemble datasets, the mapping must include the + ensemble dimension key (typically ``"ensemble"``). pressure : float, string, array, callable, optional This defines the atmospheric pressure profile. @@ -1272,6 +1211,36 @@ def set_atmospheric_model( # pylint: disable=too-many-statements Returns ------- None + + Raises + ------ + ValueError + If ``type`` is unknown, if required launch date/time information is + missing for date-dependent models, if Windy model names are invalid, + or if required atmospheric variables cannot be read from the input + dataset. + TypeError + If ``dictionary`` is invalid for ``"forecast"``, ``"reanalysis"`` + or ``"ensemble"``. + KeyError + If a built-in mapping name passed in ``dictionary`` is unknown. + + See Also + -------- + :ref:`atmospheric_models` + Overview of all atmospheric-model workflows in the user guide. + :ref:`forecast` + Forecast and Windy usage details, including latest-model shortcuts. + :ref:`reanalysis` + Reanalysis and MERRA-2 examples. + :ref:`soundings` + Wyoming sounding workflow and RUC migration notes. + :ref:`custom_atmosphere` + Defining pressure, temperature and wind profiles directly. + :ref:`ensemble_atmosphere` + Ensemble forecasts and member-selection workflow. + :ref:`environment_other_apis` + Building custom mapping dictionaries for NetCDF/OPeNDAP APIs. """ # Save atmospheric model type self.atmospheric_model_type = type @@ -1287,6 +1256,36 @@ def set_atmospheric_model( # pylint: disable=too-many-statements case "windy": self.process_windy_atmosphere(file) case "forecast" | "reanalysis" | "ensemble": + if isinstance(file, str): + shortcut_map = self.__atm_type_file_to_function_map.get(type, {}) + matching_shortcut = next( + ( + shortcut + for shortcut in shortcut_map + if shortcut.lower() == file.lower() + ), + None, + ) + if matching_shortcut is not None: + file = matching_shortcut + + if isinstance(file, str): + file_upper = file.upper() + if type == "forecast" and file_upper == "HIRESW": + raise ValueError( + "The HIRESW latest-model shortcut is currently " + "unavailable because NOMADS OPeNDAP is deactivated. " + "Please use another forecast source or provide a " + "compatible dataset path/URL explicitly." + ) + if type == "ensemble" and file_upper == "GEFS": + raise ValueError( + "The GEFS latest-model shortcut is currently " + "unavailable because NOMADS OPeNDAP is deactivated. " + "Please use another ensemble source or provide a " + "compatible dataset path/URL explicitly." + ) + dictionary = self.__validate_dictionary(file, dictionary) try: fetch_function = self.__atm_type_file_to_function_map[type][file] @@ -1471,6 +1470,12 @@ def process_windy_atmosphere(self, model="ECMWF"): # pylint: disable=too-many-s ``ECMWF`` for the `ECMWF-HRES` model, ``GFS`` for the `GFS` model, ``ICON`` for the `ICON-Global` model or ``ICONEU`` for the `ICON-EU` model. + + Raises + ------ + ValueError + If ``model`` is not one of ``ECMWF``, ``GFS``, ``ICON`` or + ``ICONEU``. """ if model.lower() not in ["ecmwf", "gfs", "icon", "iconeu"]: @@ -1728,6 +1733,13 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- Returns ------- None + + Raises + ------ + ValueError + If launch date/time was not set before loading date-dependent data, + or if required geopotential/geopotential-height, temperature, + wind-u, or wind-v variables cannot be read from the dataset. """ # Check if date, lat and lon are known self.__validate_datetime() @@ -1735,20 +1747,34 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- # Read weather file if isinstance(file, str): data = netCDF4.Dataset(file) - if dictionary["time"] not in data.variables.keys(): - dictionary = self.__weather_model_map.get("ECMWF_v0") else: data = file + dictionary = self.__resolve_dictionary_for_dataset(dictionary, data) + # Get time, latitude and longitude data from file time_array = data.variables[dictionary["time"]] - lon_list = data.variables[dictionary["longitude"]][:].tolist() - lat_list = data.variables[dictionary["latitude"]][:].tolist() + lon_array = data.variables[dictionary["longitude"]] + lat_array = data.variables[dictionary["latitude"]] + + # Some THREDDS datasets use projected x/y coordinates. + if dictionary.get("projection") is not None: + projection_variable = data.variables[dictionary["projection"]] + x_units = getattr(lon_array, "units", "m") + target_lon, target_lat = geodesic_to_lambert_conformal( + self.latitude, + self.longitude, + projection_variable, + x_units=x_units, + ) + else: + target_lon = self.longitude + target_lat = self.latitude # Find time, latitude and longitude indexes time_index = find_time_index(self.datetime_date, time_array) - lon, lon_index = find_longitude_index(self.longitude, lon_list) - _, lat_index = find_latitude_index(self.latitude, lat_list) + lon, lon_index = find_longitude_index(target_lon, lon_array) + _, lat_index = find_latitude_index(target_lat, lat_array) # Get pressure level data from file levels = get_pressure_levels_from_file(data, dictionary) @@ -1806,9 +1832,9 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- ) from e # Prepare for bilinear interpolation - x, y = self.latitude, lon - x1, y1 = lat_list[lat_index - 1], lon_list[lon_index - 1] - x2, y2 = lat_list[lat_index], lon_list[lon_index] + x, y = target_lat, lon + x1, y1 = float(lat_array[lat_index - 1]), float(lon_array[lon_index - 1]) + x2, y2 = float(lat_array[lat_index]), float(lon_array[lon_index]) # Determine properties in lat, lon height = bilinear_interpolation( @@ -1860,6 +1886,17 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- wind_vs[:, 1, 1], ) + # Some datasets expose different level counts between fields + # (e.g., temperature on isobaric1 and geopotential on isobaric). + min_profile_length = min( + len(levels), len(height), len(temper), len(wind_u), len(wind_v) + ) + levels = levels[:min_profile_length] + height = height[:min_profile_length] + temper = temper[:min_profile_length] + wind_u = wind_u[:min_profile_length] + wind_v = wind_v[:min_profile_length] + # Determine wind speed, heading and direction wind_speed = calculate_wind_speed(wind_u, wind_v) wind_heading = calculate_wind_heading(wind_u, wind_v) @@ -1917,14 +1954,14 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- ) else: self.atmospheric_model_interval = 0 - self.atmospheric_model_init_lat = lat_list[0] - self.atmospheric_model_end_lat = lat_list[-1] - self.atmospheric_model_init_lon = lon_list[0] - self.atmospheric_model_end_lon = lon_list[-1] + self.atmospheric_model_init_lat = float(lat_array[0]) + self.atmospheric_model_end_lat = float(lat_array[len(lat_array) - 1]) + self.atmospheric_model_init_lon = float(lon_array[0]) + self.atmospheric_model_end_lon = float(lon_array[len(lon_array) - 1]) # Save debugging data - self.lat_array = lat_list - self.lon_array = lon_list + self.lat_array = [x1, x2] + self.lon_array = [y1, y2] self.lon_index = lon_index self.lat_index = lat_index self.geopotentials = geopotentials @@ -1932,7 +1969,10 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- self.wind_vs = wind_vs self.levels = levels self.temperatures = temperatures - self.time_array = time_array[:].tolist() + self.time_array = [ + float(time_array[0]), + float(time_array[time_array.shape[0] - 1]), + ] self.height = height # Close weather data @@ -1994,6 +2034,13 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals -------- See the :class:``rocketpy.environment.weather_model_mapping`` for some dictionary examples. + + Raises + ------ + ValueError + If launch date/time was not set before loading date-dependent data, + or if required geopotential/geopotential-height, temperature, + wind-u, or wind-v variables cannot be read from the dataset. """ # Check if date, lat and lon are known self.__validate_datetime() @@ -2004,23 +2051,40 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals else: data = file + dictionary = self.__resolve_dictionary_for_dataset(dictionary, data) + # Get time, latitude and longitude data from file time_array = data.variables[dictionary["time"]] - lon_list = data.variables[dictionary["longitude"]][:].tolist() - lat_list = data.variables[dictionary["latitude"]][:].tolist() + lon_array = data.variables[dictionary["longitude"]] + lat_array = data.variables[dictionary["latitude"]] + + # Some THREDDS datasets use projected x/y coordinates. + # TODO CHECK THIS I AM NOT SURE????? + if dictionary.get("projection") is not None: + projection_variable = data.variables[dictionary["projection"]] + x_units = getattr(lon_array, "units", "m") + target_lon, target_lat = geodesic_to_lambert_conformal( + self.latitude, + self.longitude, + projection_variable, + x_units=x_units, + ) + else: + target_lon = self.longitude + target_lat = self.latitude # Find time, latitude and longitude indexes time_index = find_time_index(self.datetime_date, time_array) - lon, lon_index = find_longitude_index(self.longitude, lon_list) - _, lat_index = find_latitude_index(self.latitude, lat_list) + lon, lon_index = find_longitude_index(target_lon, lon_array) + _, lat_index = find_latitude_index(target_lat, lat_array) # Get ensemble data from file + has_ensemble_dimension = True try: num_members = len(data.variables[dictionary["ensemble"]][:]) - except KeyError as e: - raise ValueError( - "Unable to read ensemble data from file. Check file and dictionary." - ) from e + except KeyError: + has_ensemble_dimension = False + num_members = 1 # Get pressure level data from file levels = get_pressure_levels_from_file(data, dictionary) @@ -2079,10 +2143,16 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals "Unable to read wind-v component. Check file and dictionary." ) from e + if not has_ensemble_dimension: + geopotentials = np.expand_dims(geopotentials, axis=0) + temperatures = np.expand_dims(temperatures, axis=0) + wind_us = np.expand_dims(wind_us, axis=0) + wind_vs = np.expand_dims(wind_vs, axis=0) + # Prepare for bilinear interpolation - x, y = self.latitude, lon - x1, y1 = lat_list[lat_index - 1], lon_list[lon_index - 1] - x2, y2 = lat_list[lat_index], lon_list[lon_index] + x, y = target_lat, lon + x1, y1 = float(lat_array[lat_index - 1]), float(lon_array[lon_index - 1]) + x2, y2 = float(lat_array[lat_index]), float(lon_array[lon_index]) # Determine properties in lat, lon height = bilinear_interpolation( @@ -2134,6 +2204,19 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals wind_vs[:, :, 1, 1], ) + min_profile_length = min( + len(levels), + height.shape[1], + temper.shape[1], + wind_u.shape[1], + wind_v.shape[1], + ) + levels = levels[:min_profile_length] + height = height[:, :min_profile_length] + temper = temper[:, :min_profile_length] + wind_u = wind_u[:, :min_profile_length] + wind_v = wind_v[:, :min_profile_length] + # Determine wind speed, heading and direction wind_speed = calculate_wind_speed(wind_u, wind_v) wind_heading = calculate_wind_heading(wind_u, wind_v) @@ -2166,14 +2249,14 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals self.atmospheric_model_init_date = get_initial_date_from_time_array(time_array) self.atmospheric_model_end_date = get_final_date_from_time_array(time_array) self.atmospheric_model_interval = get_interval_date_from_time_array(time_array) - self.atmospheric_model_init_lat = lat_list[0] - self.atmospheric_model_end_lat = lat_list[-1] - self.atmospheric_model_init_lon = lon_list[0] - self.atmospheric_model_end_lon = lon_list[-1] + self.atmospheric_model_init_lat = float(lat_array[0]) + self.atmospheric_model_end_lat = float(lat_array[len(lat_array) - 1]) + self.atmospheric_model_init_lon = float(lon_array[0]) + self.atmospheric_model_end_lon = float(lon_array[len(lon_array) - 1]) # Save debugging data - self.lat_array = lat_list - self.lon_array = lon_list + self.lat_array = [x1, x2] + self.lon_array = [y1, y2] self.lon_index = lon_index self.lat_index = lat_index self.geopotentials = geopotentials @@ -2181,7 +2264,10 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals self.wind_vs = wind_vs self.levels = levels self.temperatures = temperatures - self.time_array = time_array[:].tolist() + self.time_array = [ + float(time_array[0]), + float(time_array[time_array.shape[0] - 1]), + ] self.height = height # Close weather data diff --git a/rocketpy/environment/fetchers.py b/rocketpy/environment/fetchers.py index d5ac2a1df..589159f1c 100644 --- a/rocketpy/environment/fetchers.py +++ b/rocketpy/environment/fetchers.py @@ -113,33 +113,18 @@ def fetch_gfs_file_return_dataset(max_attempts=10, base_delay=2): RuntimeError If unable to load the latest weather data for GFS. """ - time_attempt = datetime.now(tz=timezone.utc) + file_url = ( + "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/GFS/Global_0p25deg/Best" + ) attempt_count = 0 - dataset = None - - # TODO: the code below is trying to determine the hour of the latest available - # forecast by trial and error. This is not the best way to do it. We should - # actually check the NOAA website for the latest forecast time. Refactor needed. while attempt_count < max_attempts: - time_attempt -= timedelta(hours=6) # GFS updates every 6 hours - file_url = ( - f"https://nomads.ncep.noaa.gov/dods/gfs_0p25/gfs" - f"{time_attempt.year:04d}{time_attempt.month:02d}" - f"{time_attempt.day:02d}/" - f"gfs_0p25_{6 * (time_attempt.hour // 6):02d}z" - ) try: - # Attempts to create a dataset from the file using OpenDAP protocol. - dataset = netCDF4.Dataset(file_url) - return dataset + return netCDF4.Dataset(file_url) except OSError: attempt_count += 1 time.sleep(base_delay**attempt_count) - if dataset is None: - raise RuntimeError( - "Unable to load latest weather data for GFS through " + file_url - ) + raise RuntimeError("Unable to load latest weather data for GFS through " + file_url) def fetch_nam_file_return_dataset(max_attempts=10, base_delay=2): @@ -163,28 +148,16 @@ def fetch_nam_file_return_dataset(max_attempts=10, base_delay=2): RuntimeError If unable to load the latest weather data for NAM. """ - # Attempt to get latest forecast - time_attempt = datetime.now(tz=timezone.utc) + file_url = "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/NAM/CONUS_12km/Best" attempt_count = 0 - dataset = None - while attempt_count < max_attempts: - time_attempt -= timedelta(hours=6) # NAM updates every 6 hours - file = ( - f"https://nomads.ncep.noaa.gov/dods/nam/nam{time_attempt.year:04d}" - f"{time_attempt.month:02d}{time_attempt.day:02d}/" - f"nam_conusnest_{6 * (time_attempt.hour // 6):02d}z" - ) try: - # Attempts to create a dataset from the file using OpenDAP protocol. - dataset = netCDF4.Dataset(file) - return dataset + return netCDF4.Dataset(file_url) except OSError: attempt_count += 1 time.sleep(base_delay**attempt_count) - if dataset is None: - raise RuntimeError("Unable to load latest weather data for NAM through " + file) + raise RuntimeError("Unable to load latest weather data for NAM through " + file_url) def fetch_rap_file_return_dataset(max_attempts=10, base_delay=2): @@ -208,28 +181,88 @@ def fetch_rap_file_return_dataset(max_attempts=10, base_delay=2): RuntimeError If unable to load the latest weather data for RAP. """ - # Attempt to get latest forecast - time_attempt = datetime.now(tz=timezone.utc) + file_url = "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/RAP/CONUS_13km/Best" attempt_count = 0 - dataset = None + while attempt_count < max_attempts: + try: + return netCDF4.Dataset(file_url) + except OSError: + attempt_count += 1 + time.sleep(base_delay**attempt_count) + + raise RuntimeError("Unable to load latest weather data for RAP through " + file_url) + +def fetch_hrrr_file_return_dataset(max_attempts=10, base_delay=2): + """Fetches the latest HRRR (High-Resolution Rapid Refresh) dataset from + the NOAA's GrADS data server using the OpenDAP protocol. + + Parameters + ---------- + max_attempts : int, optional + The maximum number of attempts to fetch the dataset. Default is 10. + base_delay : int, optional + The base delay in seconds between attempts. Default is 2. + + Returns + ------- + netCDF4.Dataset + The HRRR dataset. + + Raises + ------ + RuntimeError + If unable to load the latest weather data for HRRR. + """ + file_url = "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/HRRR/CONUS_2p5km/Best" + attempt_count = 0 while attempt_count < max_attempts: - time_attempt -= timedelta(hours=1) # RAP updates every hour - file = ( - f"https://nomads.ncep.noaa.gov/dods/rap/rap{time_attempt.year:04d}" - f"{time_attempt.month:02d}{time_attempt.day:02d}/" - f"rap_{time_attempt.hour:02d}z" - ) try: - # Attempts to create a dataset from the file using OpenDAP protocol. - dataset = netCDF4.Dataset(file) - return dataset + return netCDF4.Dataset(file_url) except OSError: attempt_count += 1 time.sleep(base_delay**attempt_count) - if dataset is None: - raise RuntimeError("Unable to load latest weather data for RAP through " + file) + raise RuntimeError( + "Unable to load latest weather data for HRRR through " + file_url + ) + + +def fetch_aigfs_file_return_dataset(max_attempts=10, base_delay=2): + """Fetches the latest AIGFS (Artificial Intelligence GFS) dataset from + the NOAA's GrADS data server using the OpenDAP protocol. + + Parameters + ---------- + max_attempts : int, optional + The maximum number of attempts to fetch the dataset. Default is 10. + base_delay : int, optional + The base delay in seconds between attempts. Default is 2. + + Returns + ------- + netCDF4.Dataset + The AIGFS dataset. + + Raises + ------ + RuntimeError + If unable to load the latest weather data for AIGFS. + """ + file_url = ( + "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/AIGFS/Global_0p25deg/Best" + ) + attempt_count = 0 + while attempt_count < max_attempts: + try: + return netCDF4.Dataset(file_url) + except OSError: + attempt_count += 1 + time.sleep(base_delay**attempt_count) + + raise RuntimeError( + "Unable to load latest weather data for AIGFS through " + file_url + ) def fetch_hiresw_file_return_dataset(max_attempts=10, base_delay=2): diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index 1239ee6b9..fb0179c9e 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -5,7 +5,7 @@ future to improve their performance and usability. """ -import bisect +import math import warnings import netCDF4 @@ -109,6 +109,63 @@ def calculate_wind_speed(u, v, w=0.0): return np.sqrt(u**2 + v**2 + w**2) +def geodesic_to_lambert_conformal(lat, lon, projection_variable, x_units="m"): + """Convert geodesic coordinates to Lambert conformal projected coordinates. + + Parameters + ---------- + lat : float + Latitude in degrees. + lon : float + Longitude in degrees, ranging from -180 to 180. + projection_variable : netCDF4.Variable + Projection variable containing Lambert conformal metadata. + x_units : str, optional + Units used by the dataset x coordinate. Supported values are meters + and kilometers. Default is "m". + + Returns + ------- + tuple[float, float] + Projected coordinates ``(x, y)`` in the same units as ``x_units``. + """ + lat_radians = math.radians(lat) + lon_radians = math.radians(lon % 360) + + lat_origin = math.radians(float(projection_variable.latitude_of_projection_origin)) + lon_origin = math.radians(float(projection_variable.longitude_of_central_meridian)) + + standard_parallel = projection_variable.standard_parallel + if np.ndim(standard_parallel) == 0: + standard_parallels = [float(standard_parallel)] + else: + standard_parallels = np.asarray(standard_parallel, dtype=float).tolist() + + if len(standard_parallels) >= 2: + phi_1 = math.radians(standard_parallels[0]) + phi_2 = math.radians(standard_parallels[1]) + n = math.log(math.cos(phi_1) / math.cos(phi_2)) / math.log( + math.tan(math.pi / 4 + phi_2 / 2) / math.tan(math.pi / 4 + phi_1 / 2) + ) + else: + phi_1 = math.radians(standard_parallels[0]) + n = math.sin(phi_1) + + earth_radius = float(getattr(projection_variable, "earth_radius", 6371229.0)) + f_const = (math.cos(phi_1) * math.tan(math.pi / 4 + phi_1 / 2) ** n) / n + + rho = earth_radius * f_const / (math.tan(math.pi / 4 + lat_radians / 2) ** n) + rho_origin = earth_radius * f_const / (math.tan(math.pi / 4 + lat_origin / 2) ** n) + theta = n * (lon_radians - lon_origin) + + x_meters = rho * math.sin(theta) + y_meters = rho_origin - rho * math.cos(theta) + + if str(x_units).lower().startswith("km"): + return x_meters / 1000.0, y_meters / 1000.0 + return x_meters, y_meters + + ## These functions are meant to be used with netcdf4 datasets @@ -168,7 +225,7 @@ def mask_and_clean_dataset(*args): return data_array -def find_longitude_index(longitude, lon_list): +def find_longitude_index(longitude, lon_list): # pylint: disable=too-many-statements """Finds the index of the given longitude in a list of longitudes. Parameters @@ -188,30 +245,48 @@ def find_longitude_index(longitude, lon_list): ValueError If the longitude is not within the range covered by the list. """ - # Determine if file uses -180 to 180 or 0 to 360 - if lon_list[0] < 0 or lon_list[-1] < 0: - # Convert input to -180 - 180 - lon = longitude if longitude < 180 else -180 + longitude % 180 - else: - # Convert input to 0 - 360 - lon = longitude % 360 - # Check if reversed or sorted - if lon_list[0] < lon_list[-1]: - # Deal with sorted lon_list - lon_index = bisect.bisect(lon_list, lon) + + def _coord_value(source, index): + return float(source[index]) + + lon_len = len(lon_list) + lon_start = _coord_value(lon_list, 0) + lon_end = _coord_value(lon_list, lon_len - 1) + + # Determine if file uses geographic longitudes in [-180, 180] or [0, 360]. + # Do not remap projected x coordinates. + is_geographic_longitude = abs(lon_start) <= 360 and abs(lon_end) <= 360 + if is_geographic_longitude: + if lon_start < 0 or lon_end < 0: + lon = longitude if longitude < 180 else -180 + longitude % 180 + else: + lon = longitude % 360 else: - # Deal with reversed lon_list - lon_list.reverse() - lon_index = len(lon_list) - bisect.bisect_left(lon_list, lon) - lon_list.reverse() + lon = longitude + + is_ascending = lon_start < lon_end + + # Binary search to find the insertion index such that index-1 and index + # bracket the requested longitude. + low = 0 + high = lon_len + while low < high: + mid = (low + high) // 2 + mid_value = _coord_value(lon_list, mid) + if (mid_value < lon) if is_ascending else (mid_value > lon): + low = mid + 1 + else: + high = mid + lon_index = low + # Take care of longitude value equal to maximum longitude in the grid - if lon_index == len(lon_list) and lon_list[lon_index - 1] == lon: - lon_index = lon_index - 1 + if lon_index == lon_len and _coord_value(lon_list, lon_index - 1) == lon: + lon_index -= 1 # Check if longitude value is inside the grid - if lon_index == 0 or lon_index == len(lon_list): + if lon_index in (0, lon_len): raise ValueError( f"Longitude {lon} not inside region covered by file, which is " - f"from {lon_list[0]} to {lon_list[-1]}." + f"from {lon_start} to {lon_end}." ) return lon, lon_index @@ -237,28 +312,39 @@ def find_latitude_index(latitude, lat_list): ValueError If the latitude is not within the range covered by the list. """ - # Check if reversed or sorted - if lat_list[0] < lat_list[-1]: - # Deal with sorted lat_list - lat_index = bisect.bisect(lat_list, latitude) - else: - # Deal with reversed lat_list - lat_list.reverse() - lat_index = len(lat_list) - bisect.bisect_left(lat_list, latitude) - lat_list.reverse() - # Take care of latitude value equal to maximum longitude in the grid - if lat_index == len(lat_list) and lat_list[lat_index - 1] == latitude: - lat_index = lat_index - 1 + + def _coord_value(source, index): + return float(source[index]) + + lat_len = len(lat_list) + lat_start = _coord_value(lat_list, 0) + lat_end = _coord_value(lat_list, lat_len - 1) + is_ascending = lat_start < lat_end + + low = 0 + high = lat_len + while low < high: + mid = (low + high) // 2 + mid_value = _coord_value(lat_list, mid) + if (mid_value < latitude) if is_ascending else (mid_value > latitude): + low = mid + 1 + else: + high = mid + lat_index = low + + # Take care of latitude value equal to maximum latitude in the grid + if lat_index == lat_len and _coord_value(lat_list, lat_index - 1) == latitude: + lat_index -= 1 # Check if latitude value is inside the grid - if lat_index == 0 or lat_index == len(lat_list): + if lat_index in (0, lat_len): raise ValueError( f"Latitude {latitude} not inside region covered by file, " - f"which is from {lat_list[0]} to {lat_list[-1]}." + f"which is from {lat_start} to {lat_end}." ) return latitude, lat_index -def find_time_index(datetime_date, time_array): +def find_time_index(datetime_date, time_array): # pylint: disable=too-many-statements """Finds the index of the given datetime in a netCDF4 time array. Parameters @@ -280,26 +366,58 @@ def find_time_index(datetime_date, time_array): ValueError If the exact datetime is not available and the nearest datetime is used instead. """ - time_index = netCDF4.date2index( - datetime_date, time_array, calendar="gregorian", select="nearest" - ) - # Convert times do dates and numbers - input_time_num = netCDF4.date2num( - datetime_date, time_array.units, calendar="gregorian" - ) - file_time_num = time_array[time_index] - file_time_date = netCDF4.num2date( - time_array[time_index], time_array.units, calendar="gregorian" - ) + time_len = len(time_array) + time_units = time_array.units + input_time_num = netCDF4.date2num(datetime_date, time_units, calendar="gregorian") + + first_time_num = float(time_array[0]) + last_time_num = float(time_array[time_len - 1]) + is_ascending = first_time_num <= last_time_num + + # Binary search nearest index using scalar probing only. + low = 0 + high = time_len + while low < high: + mid = (low + high) // 2 + mid_time_num = float(time_array[mid]) + if ( + (mid_time_num < input_time_num) + if is_ascending + else (mid_time_num > input_time_num) + ): + low = mid + 1 + else: + high = mid + + right_index = min(max(low, 0), time_len - 1) + left_index = min(max(right_index - 1, 0), time_len - 1) + + right_time_num = float(time_array[right_index]) + left_time_num = float(time_array[left_index]) + if abs(input_time_num - left_time_num) <= abs(right_time_num - input_time_num): + time_index = left_index + file_time_num = left_time_num + else: + time_index = right_index + file_time_num = right_time_num + + file_time_date = netCDF4.num2date(file_time_num, time_units, calendar="gregorian") + # Check if time is inside range supplied by file - if time_index == 0 and input_time_num < file_time_num: + if time_index == 0 and ( + (is_ascending and input_time_num < file_time_num) + or (not is_ascending and input_time_num > file_time_num) + ): raise ValueError( f"The chosen launch time '{datetime_date.strftime('%Y-%m-%d-%H:')} UTC' is" " not available in the provided file. Please choose a time within the range" " of the file, which starts at " f"'{file_time_date.strftime('%Y-%m-%d-%H')} UTC'." ) - elif time_index == len(time_array) - 1 and input_time_num > file_time_num: + elif time_index == time_len - 1 and ( + (is_ascending and input_time_num > file_time_num) + or (not is_ascending and input_time_num < file_time_num) + ): raise ValueError( "Chosen launch time is not available in the provided file, " f"which ends at {file_time_date}." diff --git a/rocketpy/environment/weather_model_mapping.py b/rocketpy/environment/weather_model_mapping.py index 75089f577..c490fad9d 100644 --- a/rocketpy/environment/weather_model_mapping.py +++ b/rocketpy/environment/weather_model_mapping.py @@ -1,9 +1,42 @@ class WeatherModelMapping: - """Class to map the weather model variables to the variables used in the - Environment class. + """Map provider-specific variable names to RocketPy weather fields. + + RocketPy reads forecast/reanalysis/ensemble datasets using canonical keys + such as ``time``, ``latitude``, ``longitude``, ``level``, ``temperature``, + ``geopotential_height``, ``geopotential``, ``u_wind`` and ``v_wind``. + Each dictionary in this class maps those canonical keys to the actual + variable names in a specific data provider format. + + Mapping families + ---------------- + - Base names (for example ``GFS``, ``NAM``, ``RAP``) represent the current + default mappings used by the latest-model shortcuts and THREDDS-style + datasets. + - ``*_LEGACY`` names represent older NOMADS-style variable naming + conventions (for example ``lev``, ``tmpprs``, ``ugrdprs`` and + ``vgrdprs``) and are intended for archived or previously downloaded files. + + Notes + ----- + - Mappings can also include optional keys such as ``projection`` for + projected grids and ``ensemble`` for member dimensions. + - The :meth:`get` method is case-insensitive, so ``"gfs_legacy"`` and + ``"GFS_LEGACY"`` are equivalent. """ GFS = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "isobaric", + "temperature": "Temperature_isobaric", + "surface_geopotential_height": "Geopotential_height_surface", + "geopotential_height": "Geopotential_height_isobaric", + "geopotential": None, + "u_wind": "u-component_of_wind_isobaric", + "v_wind": "v-component_of_wind_isobaric", + } + GFS_LEGACY = { "time": "time", "latitude": "lat", "longitude": "lon", @@ -16,6 +49,19 @@ class WeatherModelMapping: "v_wind": "vgrdprs", } NAM = { + "time": "time", + "latitude": "y", + "longitude": "x", + "projection": "LambertConformal_Projection", + "level": "isobaric", + "temperature": "Temperature_isobaric", + "surface_geopotential_height": None, + "geopotential_height": "Geopotential_height_isobaric", + "geopotential": None, + "u_wind": "u-component_of_wind_isobaric", + "v_wind": "v-component_of_wind_isobaric", + } + NAM_LEGACY = { "time": "time", "latitude": "lat", "longitude": "lon", @@ -54,6 +100,18 @@ class WeatherModelMapping: "v_wind": "v", } NOAA = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "isobaric", + "temperature": "Temperature_isobaric", + "surface_geopotential_height": "Geopotential_height_surface", + "geopotential_height": "Geopotential_height_isobaric", + "geopotential": None, + "u_wind": "u-component_of_wind_isobaric", + "v_wind": "v-component_of_wind_isobaric", + } + NOAA_LEGACY = { "time": "time", "latitude": "lat", "longitude": "lon", @@ -66,6 +124,19 @@ class WeatherModelMapping: "v_wind": "vgrdprs", } RAP = { + "time": "time", + "latitude": "y", + "longitude": "x", + "projection": "LambertConformal_Projection", + "level": "isobaric", + "temperature": "Temperature_isobaric", + "surface_geopotential_height": None, + "geopotential_height": "Geopotential_height_isobaric", + "geopotential": None, + "u_wind": "u-component_of_wind_isobaric", + "v_wind": "v-component_of_wind_isobaric", + } + RAP_LEGACY = { "time": "time", "latitude": "lat", "longitude": "lon", @@ -90,6 +161,19 @@ class WeatherModelMapping: "u_wind": "ugrdprs", "v_wind": "vgrdprs", } + CMC_LEGACY = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "lev", + "ensemble": "ens", + "temperature": "tmpprs", + "surface_geopotential_height": None, + "geopotential_height": "hgtprs", + "geopotential": None, + "u_wind": "ugrdprs", + "v_wind": "vgrdprs", + } GEFS = { "time": "time", "latitude": "lat", @@ -103,6 +187,19 @@ class WeatherModelMapping: "u_wind": "ugrdprs", "v_wind": "vgrdprs", } + GEFS_LEGACY = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "lev", + "ensemble": "ens", + "temperature": "tmpprs", + "surface_geopotential_height": None, + "geopotential_height": "hgtprs", + "geopotential": None, + "u_wind": "ugrdprs", + "v_wind": "vgrdprs", + } HIRESW = { "time": "time", "latitude": "lat", @@ -114,6 +211,17 @@ class WeatherModelMapping: "u_wind": "ugrdprs", "v_wind": "vgrdprs", } + HIRESW_LEGACY = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "lev", + "temperature": "tmpprs", + "surface_geopotential_height": "hgtsfc", + "geopotential_height": "hgtprs", + "u_wind": "ugrdprs", + "v_wind": "vgrdprs", + } MERRA2 = { "time": "time", "latitude": "lat", @@ -127,29 +235,78 @@ class WeatherModelMapping: "u_wind": "U", "v_wind": "V", } + MERRA2_LEGACY = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "lev", + "temperature": "T", + "surface_geopotential_height": None, + "surface_geopotential": "PHIS", + "geopotential_height": "H", + "geopotential": None, + "u_wind": "U", + "v_wind": "V", + } def __init__(self): - """Initialize the class, creates a dictionary with all the weather models - available and their respective dictionaries with the variables.""" + """Build the lookup table with default and legacy mapping aliases.""" self.all_dictionaries = { "GFS": self.GFS, + "GFS_LEGACY": self.GFS_LEGACY, "NAM": self.NAM, + "NAM_LEGACY": self.NAM_LEGACY, "ECMWF_v0": self.ECMWF_v0, "ECMWF": self.ECMWF, "NOAA": self.NOAA, + "NOAA_LEGACY": self.NOAA_LEGACY, "RAP": self.RAP, + "RAP_LEGACY": self.RAP_LEGACY, "CMC": self.CMC, + "CMC_LEGACY": self.CMC_LEGACY, "GEFS": self.GEFS, + "GEFS_LEGACY": self.GEFS_LEGACY, "HIRESW": self.HIRESW, + "HIRESW_LEGACY": self.HIRESW_LEGACY, "MERRA2": self.MERRA2, + "MERRA2_LEGACY": self.MERRA2_LEGACY, } def get(self, model): + """Return a mapping dictionary by model alias (case-insensitive). + + Parameters + ---------- + model : str + Mapping alias name, such as ``"GFS"`` or ``"GFS_LEGACY"``. + + Returns + ------- + dict + Dictionary mapping RocketPy canonical weather keys to dataset + variable names. + + Raises + ------ + KeyError + If ``model`` is unknown or not a string. + """ + if not isinstance(model, str): + raise KeyError( + f"Model {model} not found in the WeatherModelMapping. " + f"The available models are: {self.all_dictionaries.keys()}" + ) + try: return self.all_dictionaries[model] - except KeyError as e: + except KeyError as exc: + model_casefold = model.casefold() + for key, value in self.all_dictionaries.items(): + if key.casefold() == model_casefold: + return value + raise KeyError( f"Model {model} not found in the WeatherModelMapping. " f"The available models are: {self.all_dictionaries.keys()}" - ) from e + ) from exc diff --git a/tests/integration/environment/test_environment.py b/tests/integration/environment/test_environment.py index 3bdd5209a..37078b8fd 100644 --- a/tests/integration/environment/test_environment.py +++ b/tests/integration/environment/test_environment.py @@ -178,8 +178,11 @@ def test_gefs_atmosphere(mock_show, example_spaceport_env): # pylint: disable=u example_spaceport_env : rocketpy.Environment Example environment object to be tested. """ - example_spaceport_env.set_atmospheric_model(type="Ensemble", file="GEFS") - assert example_spaceport_env.all_info() is None + with pytest.raises( + ValueError, + match="GEFS latest-model shortcut is currently unavailable", + ): + example_spaceport_env.set_atmospheric_model(type="Ensemble", file="GEFS") @pytest.mark.slow @@ -234,13 +237,15 @@ def test_hiresw_ensemble_atmosphere(mock_show, example_spaceport_env): # pylint example_spaceport_env.set_date(date_info) - example_spaceport_env.set_atmospheric_model( - type="Forecast", - file="HIRESW", - dictionary="HIRESW", - ) - - assert example_spaceport_env.all_info() is None + with pytest.raises( + ValueError, + match="HIRESW latest-model shortcut is currently unavailable", + ): + example_spaceport_env.set_atmospheric_model( + type="Forecast", + file="HIRESW", + dictionary="HIRESW", + ) @pytest.mark.skip(reason="CMC model is currently not working") diff --git a/tests/unit/environment/test_environment.py b/tests/unit/environment/test_environment.py index 6ad3e51db..6d04c089f 100644 --- a/tests/unit/environment/test_environment.py +++ b/tests/unit/environment/test_environment.py @@ -7,6 +7,7 @@ from rocketpy import Environment from rocketpy.environment.tools import geodesic_to_utm, utm_to_geodesic +from rocketpy.environment.weather_model_mapping import WeatherModelMapping @pytest.mark.parametrize( @@ -243,3 +244,70 @@ def test_environment_export_environment_exports_valid_environment_json( ) os.remove("environment.json") + + +class _DummyDataset: + """Small test double that mimics a netCDF dataset variables mapping.""" + + def __init__(self, variable_names): + self.variables = {name: object() for name in variable_names} + + +def test_resolve_dictionary_keeps_compatible_mapping(example_plain_env): + """Keep the user-selected mapping when it already matches dataset keys.""" + gfs_mapping = example_plain_env._Environment__weather_model_map.get("GFS") + dataset = _DummyDataset( + [ + "time", + "lat", + "lon", + "isobaric", + "Temperature_isobaric", + "Geopotential_height_isobaric", + "u-component_of_wind_isobaric", + "v-component_of_wind_isobaric", + ] + ) + + resolved = example_plain_env._Environment__resolve_dictionary_for_dataset( + gfs_mapping, dataset + ) + + assert resolved is gfs_mapping + + +def test_resolve_dictionary_falls_back_to_legacy_mapping(example_plain_env): + """Fallback to a compatible built-in mapping for legacy NOMADS-style files.""" + thredds_gfs_mapping = example_plain_env._Environment__weather_model_map.get("GFS") + dataset = _DummyDataset( + [ + "time", + "lat", + "lon", + "lev", + "tmpprs", + "hgtprs", + "ugrdprs", + "vgrdprs", + ] + ) + + resolved = example_plain_env._Environment__resolve_dictionary_for_dataset( + thredds_gfs_mapping, dataset + ) + + # Explicit legacy mappings should be preferred over unrelated model mappings. + assert resolved == example_plain_env._Environment__weather_model_map.get( + "GFS_LEGACY" + ) + assert resolved["level"] == "lev" + assert resolved["temperature"] == "tmpprs" + assert resolved["geopotential_height"] == "hgtprs" + + +def test_weather_model_mapping_exposes_legacy_aliases(): + """Legacy mapping names should be available and case-insensitive.""" + mapping = WeatherModelMapping() + + assert mapping.get("GFS_LEGACY")["temperature"] == "tmpprs" + assert mapping.get("gfs_legacy")["temperature"] == "tmpprs" From 7d951f530e46e898691a927a0b86618589e84ee3 Mon Sep 17 00:00:00 2001 From: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Date: Thu, 19 Mar 2026 21:56:49 -0300 Subject: [PATCH 03/44] ENH: Add guidelines for simulation safety, Sphinx documentation, and pytest standards (GitHub Copilot) (#937) --- .github/agents/rocketpy-reviewer.agent.md | 62 ++++ .github/copilot-instructions.md | 301 +++++------------- .../simulation-safety.instructions.md | 41 +++ .../instructions/sphinx-docs.instructions.md | 32 ++ .../instructions/tests-python.instructions.md | 36 +++ 5 files changed, 251 insertions(+), 221 deletions(-) create mode 100644 .github/agents/rocketpy-reviewer.agent.md create mode 100644 .github/instructions/simulation-safety.instructions.md create mode 100644 .github/instructions/sphinx-docs.instructions.md create mode 100644 .github/instructions/tests-python.instructions.md diff --git a/.github/agents/rocketpy-reviewer.agent.md b/.github/agents/rocketpy-reviewer.agent.md new file mode 100644 index 000000000..be1b64b13 --- /dev/null +++ b/.github/agents/rocketpy-reviewer.agent.md @@ -0,0 +1,62 @@ +--- +description: "Physics-safe RocketPy code review agent. Use for pull request review, unit consistency checks, coordinate-frame validation, cached-property risk detection, and regression-focused test-gap analysis." +name: "RocketPy Reviewer" +tools: [read, search, execute] +argument-hint: "Review these changes for physics correctness and regression risk: " +user-invocable: true +--- +You are a RocketPy-focused reviewer for physics safety and regression risk. + +## Goals + +- Detect behavioral regressions and numerical/physics risks before merge. +- Validate unit consistency and coordinate/reference-frame correctness. +- Identify stale-cache risks when `@cached_property` interacts with mutable state. +- Check test coverage quality for changed behavior. +- Verify alignment with RocketPy workflow and contributor conventions. + +## Review Priorities + +1. Correctness and safety issues (highest severity). +2. Behavioral regressions and API compatibility. +3. Numerical stability and tolerance correctness. +4. Missing tests or weak assertions. +5. Documentation mismatches affecting users. +6. Workflow violations (test placement, branch/PR conventions, or missing validation evidence). + +## RocketPy-Specific Checks + +- SI units are explicit and consistent. +- Orientation conventions are unambiguous (`tail_to_nose`, `nozzle_to_combustion_chamber`, etc.). +- New/changed simulation logic does not silently invalidate cached values. +- Floating-point assertions use `pytest.approx` where needed. +- New fixtures are wired through `tests/conftest.py` when applicable. +- Test type is appropriate for scope (`unit`, `integration`, `acceptance`) and `all_info()`-style tests + are not misclassified. +- New behavior includes at least one regression-oriented test and relevant edge-case checks. +- For docs-affecting changes, references and paths remain valid and build warnings are addressed. +- Tooling recommendations match current repository setup (prefer Makefile plus `pyproject.toml` + settings when docs are outdated). + +## Validation Expectations + +- Prefer focused test runs first, then broader relevant suites. +- Recommend `make format` and `make lint` when style/lint risks are present. +- Recommend `make build-docs` when `.rst` files or API docs are changed. + +## Output Format + +Provide findings first, ordered by severity. +For each finding include: +- Severity: Critical, High, Medium, or Low +- Location: file path and line +- Why it matters: behavioral or physics risk +- Suggested fix: concrete, minimal change + +After findings, include: +- Open questions or assumptions +- Residual risks or testing gaps +- Brief change summary +- Suggested validation commands (only when useful) + +If no findings are identified, state that explicitly and still report residual risks/testing gaps. diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index f5366cb3b..382aa15e0 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -1,221 +1,80 @@ -# GitHub Copilot Instructions for RocketPy - -This file provides instructions for GitHub Copilot when working on the RocketPy codebase. -These guidelines help ensure consistency with the project's coding standards and development practices. - -## Project Overview - -RocketPy is a Python library for 6-DOF rocket trajectory simulation. -It's designed for high-power rocketry applications with focus on accuracy, performance, and ease of use. - -## Coding Standards - -### Naming Conventions -- **Use `snake_case` for all new code** - variables, functions, methods, and modules -- **Use descriptive names** - prefer `angle_of_attack` over `a` or `alpha` -- **Class names use PascalCase** - e.g., `SolidMotor`, `Environment`, `Flight` -- **Constants use UPPER_SNAKE_CASE** - e.g., `DEFAULT_GRAVITY`, `EARTH_RADIUS` - -### Code Style -- Follow **PEP 8** guidelines -- Line length: **88 characters** (Black's default) -- Organize imports with **isort** -- Our official formatter is the **ruff frmat** - -### Documentation -- **All public classes, methods, and functions must have docstrings** -- Use **NumPy style docstrings** -- Include **Parameters**, **Returns**, and **Examples** sections -- Document **units** for physical quantities (e.g., "in meters", "in radians") - -### Testing -- Write **unit tests** for all new features using pytest -- Follow **AAA pattern** (Arrange, Act, Assert) -- Use descriptive test names following: `test_methodname_expectedbehaviour` -- Include test docstrings explaining expected behavior -- Use **parameterization** for testing multiple scenarios -- Create pytest fixtures to avoid code repetition - -## Domain-Specific Guidelines - -### Physical Units and Conventions -- **SI units by default** - meters, kilograms, seconds, radians -- **Document coordinate systems** clearly (e.g., "tail_to_nose", "nozzle_to_combustion_chamber") -- **Position parameters** are critical - always document reference points -- Use **descriptive variable names** for physical quantities - -### Rocket Components -- **Motors**: SolidMotor, HybridMotor and LiquidMotor classes are children classes of the Motor class -- **Aerodynamic Surfaces**: They have Drag curves and lift coefficients -- **Parachutes**: Trigger functions, deployment conditions -- **Environment**: Atmospheric models, weather data, wind profiles - -### Mathematical Operations -- Use **numpy arrays** for vectorized operations (this improves performance) -- Prefer **scipy functions** for numerical integration and optimization -- **Handle edge cases** in calculations (division by zero, sqrt of negative numbers) -- **Validate input ranges** for physical parameters -- Monte Carlo simulations: sample from `numpy.random` for random number generation and creates several iterations to assess uncertainty in simulations. - -## File Structure and Organization - -### Source Code Organization - -Reminds that `rocketpy` is a Python package served as a library, and its source code is organized into several modules to facilitate maintainability and clarity. The following structure is recommended: - -``` -rocketpy/ -├── core/ # Core simulation classes -├── motors/ # Motor implementations -├── environment/ # Atmospheric and environmental models -├── plots/ # Plotting and visualization -├── tools/ # Utility functions -└── mathutils/ # Mathematical utilities -``` - -Please refer to popular Python packages like `scipy`, `numpy`, and `matplotlib` for inspiration on module organization. - -### Test Organization -``` -tests/ -├── unit/ # Unit tests -├── integration/ # Integration tests -├── acceptance/ # Acceptance tests -└── fixtures/ # Test fixtures organized by component -``` - -### Documentation Structure -``` -docs/ -├── user/ # User guides and tutorials -├── development/ # Development documentation -├── reference/ # API reference -├── examples/ # Flight examples and notebooks -└── technical/ # Technical documentation -``` - -## Common Patterns and Practices - -### Error Handling -- Use **descriptive error messages** with context -- **Validate inputs** at class initialization and method entry -- Raise **appropriate exception types** (ValueError, TypeError, etc.) -- Include **suggestions for fixes** in error messages - -### Performance Considerations -- Use **vectorized operations** where possible -- **Cache expensive computations** when appropriate (we frequently use `cached_property`) -- Keep in mind that RocketPy must be fast! - -### Backward Compatibility -- **Avoid breaking changes** in public APIs -- Use **deprecation warnings** before removing features -- **Document code changes** in docstrings and CHANGELOG - -## AI Assistant Guidelines - -### Code Generation -- **Always include docstrings** for new functions and classes -- **Follow existing patterns** in the codebase -- **Consider edge cases** and error conditions - -### Code Review and Suggestions -- **Check for consistency** with existing code style -- **Verify physical units** and coordinate systems -- **Ensure proper error handling** and input validation -- **Suggest performance improvements** when applicable -- **Recommend additional tests** for new functionality - -### Documentation Assistance -- **Use NumPy docstring format** consistently -- **Include practical examples** in docstrings -- **Document physical meanings** of parameters -- **Cross-reference related functions** and classes - -## Testing Guidelines - -### Unit Tests -- **Test individual methods** in isolation -- **Use fixtures** from the appropriate test fixture modules -- **Mock external dependencies** when necessary -- **Test both happy path and error conditions** - -### Integration Tests -- **Test interactions** between components -- **Verify end-to-end workflows** (Environment → Motor → Rocket → Flight) - -### Test Data -- **Use realistic parameters** for rocket simulations -- **Include edge cases** (very small/large rockets, extreme conditions) -- **Test with different coordinate systems** and orientations - -## Project-Specific Considerations - -### User Experience -- **Provide helpful error messages** with context and suggestions -- **Include examples** in docstrings and documentation -- **Support common use cases** with reasonable defaults - -## Examples of Good Practices - -### Function Definition -```python -def calculate_drag_force( - velocity, - air_density, - drag_coefficient, - reference_area -): - """Calculate drag force using the standard drag equation. - - Parameters - ---------- - velocity : float - Velocity magnitude in m/s. - air_density : float - Air density in kg/m³. - drag_coefficient : float - Dimensionless drag coefficient. - reference_area : float - Reference area in m². - - Returns - ------- - float - Drag force in N. - - Examples - -------- - >>> drag_force = calculate_drag_force(100, 1.225, 0.5, 0.01) - >>> print(f"Drag force: {drag_force:.2f} N") - """ - if velocity < 0: - raise ValueError("Velocity must be non-negative") - if air_density <= 0: - raise ValueError("Air density must be positive") - if reference_area <= 0: - raise ValueError("Reference area must be positive") - - return 0.5 * air_density * velocity**2 * drag_coefficient * reference_area -``` - -### Test Example -```python -def test_calculate_drag_force_returns_correct_value(): - """Test drag force calculation with known inputs.""" - # Arrange - velocity = 100.0 # m/s - air_density = 1.225 # kg/m³ - drag_coefficient = 0.5 - reference_area = 0.01 # m² - expected_force = 30.625 # N - - # Act - result = calculate_drag_force(velocity, air_density, drag_coefficient, reference_area) - - # Assert - assert abs(result - expected_force) < 1e-6 -``` - - -Remember: RocketPy prioritizes accuracy, performance, and usability. Always consider the physical meaning of calculations and provide clear, well-documented interfaces for users. +# RocketPy Workspace Instructions + +## Code Style +- Use snake_case for variables, functions, methods, and modules. Use descriptive names. +- Use PascalCase for classes and UPPER_SNAKE_CASE for constants. +- Keep lines at 88 characters and follow PEP 8 unless existing code in the target file differs. +- Run Ruff as the source of truth for formatting/import organization: + - `make format` + - `make lint` +- Use NumPy-style docstrings for public classes, methods, and functions, including units. +- In case of tooling drift between docs and config, prefer current repository tooling in `Makefile` + and `pyproject.toml`. + +## Architecture +- RocketPy is a modular Python library; keep feature logic in the correct package boundary: + - `rocketpy/simulation`: flight simulation and Monte Carlo orchestration. + - `rocketpy/rocket`, `rocketpy/motors`, `rocketpy/environment`: domain models. + - `rocketpy/mathutils`: numerical primitives and interpolation utilities. + - `rocketpy/plots`, `rocketpy/prints`: output and visualization layers. +- Prefer extending existing classes/patterns over introducing new top-level abstractions. +- Preserve public API stability in `rocketpy/__init__.py` exports. + +## Build and Test +- Use Makefile targets for OS-agnostic workflows: + - `make install` + - `make pytest` + - `make pytest-slow` + - `make coverage` + - `make coverage-report` + - `make build-docs` +- Before finishing code changes, run focused tests first, then broader relevant suites. +- When running Python directly in this workspace, prefer `.venv/Scripts/python.exe`. +- Slow tests are explicitly marked with `@pytest.mark.slow` and are run with `make pytest-slow`. +- For docs changes, check `make build-docs` output and resolve warnings/errors when practical. + +## Development Workflow +- Target pull requests to `develop` by default; `master` is the stable branch. +- Use branch names in `type/description` format, such as: + - `bug/` + - `doc/` + - `enh/` + - `mnt/` + - `tst/` +- Prefer rebasing feature branches on top of `develop` to keep history linear. +- Keep commit and PR titles explicit and prefixed with project acronyms when possible: + - `BUG`, `DOC`, `ENH`, `MNT`, `TST`, `BLD`, `REL`, `REV`, `STY`, `DEV`. + +## Conventions +- SI units are the default. Document units and coordinate-system references explicitly. +- Position/reference-frame arguments are critical in this codebase. Be explicit about orientation + (for example, `tail_to_nose`, `nozzle_to_combustion_chamber`). +- Include unit tests for new behavior. Follow AAA structure and clear test names. +- Use fixtures from `tests/fixtures`; if adding a new fixture module, update `tests/conftest.py`. +- Use `pytest.approx` for floating-point checks where appropriate. +- Use `@cached_property` for expensive computations when helpful, and be careful with stale-cache + behavior when underlying mutable state changes. +- Keep behavior backward compatible across the public API exported via `rocketpy/__init__.py`. +- Prefer extending existing module patterns over creating new top-level package structure. + +## Testing Taxonomy +- Unit tests are mandatory for new behavior. +- Unit tests in RocketPy can be sociable (real collaborators allowed) but should still be fast and + method-focused. +- Treat tests as integration tests when they are strongly I/O-oriented or broad across many methods, + including `all_info()` convention cases. +- Acceptance tests represent realistic user/flight scenarios and may compare simulation thresholds to + known flight data. + +## Documentation Links +- Contributor workflow and setup: `docs/development/setting_up.rst` +- Style and naming details: `docs/development/style_guide.rst` +- Testing philosophy and structure: `docs/development/testing.rst` +- API reference conventions: `docs/reference/index.rst` +- Domain/physics background: `docs/technical/index.rst` + +## Scoped Customizations +- Simulation-specific rules: `.github/instructions/simulation-safety.instructions.md` +- Test-authoring rules: `.github/instructions/tests-python.instructions.md` +- RST/Sphinx documentation rules: `.github/instructions/sphinx-docs.instructions.md` +- Specialized review persona: `.github/agents/rocketpy-reviewer.agent.md` diff --git a/.github/instructions/simulation-safety.instructions.md b/.github/instructions/simulation-safety.instructions.md new file mode 100644 index 000000000..cc2af5d27 --- /dev/null +++ b/.github/instructions/simulation-safety.instructions.md @@ -0,0 +1,41 @@ +--- +description: "Use when editing rocketpy/simulation code, including Flight state updates, Monte Carlo orchestration, post-processing, or cached computations. Covers simulation state safety, unit/reference-frame clarity, and regression checks." +name: "Simulation Safety" +applyTo: "rocketpy/simulation/**/*.py" +--- +# Simulation Safety Guidelines + +- Keep simulation logic inside `rocketpy/simulation` and avoid leaking domain behavior that belongs in + `rocketpy/rocket`, `rocketpy/motors`, or `rocketpy/environment`. +- Preserve public API behavior and exported names used by `rocketpy/__init__.py`. +- Prefer extending existing simulation components before creating new abstractions: + - `flight.py`: simulation state, integration flow, and post-processing. + - `monte_carlo.py`: orchestration and statistical execution workflows. + - `flight_data_exporter.py` and `flight_data_importer.py`: persistence and interchange. + - `flight_comparator.py`: comparative analysis outputs. +- Be explicit with physical units and reference frames in new parameters, attributes, and docstrings. +- For position/orientation-sensitive behavior, use explicit conventions (for example + `tail_to_nose`, `nozzle_to_combustion_chamber`) and avoid implicit assumptions. +- Treat state mutation carefully when cached values exist. +- If changes can invalidate `@cached_property` values, either avoid post-computation mutation or + explicitly invalidate affected caches in a controlled, documented way. +- Keep numerical behavior deterministic unless stochastic behavior is intentional and documented. +- For Monte Carlo and stochastic code paths, make randomness controllable and reproducible when tests + rely on it. +- Prefer vectorized NumPy operations for hot paths and avoid introducing Python loops in + performance-critical sections without justification. +- Guard against numerical edge cases (zero/near-zero denominators, interpolation limits, and boundary + conditions). +- Do not change default numerical tolerances or integration behavior without documenting motivation and + validating regression impact. +- Add focused regression tests for changed behavior, including edge cases and orientation-dependent + behavior. +- For floating-point expectations, use `pytest.approx` with meaningful tolerances. +- Run focused tests first, then broader relevant tests (`make pytest` and `make pytest-slow` when + applicable). + +See: +- `docs/development/testing.rst` +- `docs/development/style_guide.rst` +- `docs/development/setting_up.rst` +- `docs/technical/index.rst` diff --git a/.github/instructions/sphinx-docs.instructions.md b/.github/instructions/sphinx-docs.instructions.md new file mode 100644 index 000000000..8c24cac53 --- /dev/null +++ b/.github/instructions/sphinx-docs.instructions.md @@ -0,0 +1,32 @@ +--- +description: "Use when writing or editing docs/**/*.rst. Covers Sphinx/reStructuredText conventions, cross-references, toctree hygiene, and RocketPy unit/reference-frame documentation requirements." +name: "Sphinx RST Conventions" +applyTo: "docs/**/*.rst" +--- +# Sphinx and RST Guidelines + +- Follow existing heading hierarchy and style in the target document. +- Prefer linking to existing documentation pages instead of duplicating content. +- Use Sphinx cross-references where appropriate (`:class:`, `:func:`, `:mod:`, `:doc:`, `:ref:`). +- Keep API names and module paths consistent with current code exports. +- Document physical units and coordinate/reference-frame conventions explicitly. +- Include concise, practical examples when introducing new user-facing behavior. +- Keep prose clear and technical; avoid marketing language in development/reference docs. +- When adding a new page, update the relevant `toctree` so it appears in navigation. +- Use RocketPy docs build workflow: + - `make build-docs` from repository root for normal validation. + - If stale artifacts appear, clean docs build outputs via `cd docs && make clean`, then rebuild. +- Treat new Sphinx warnings/errors as issues to fix or explicitly call out in review notes. +- Keep `docs/index.rst` section structure coherent with user, development, reference, technical, and + examples navigation. +- Do not edit Sphinx-generated scaffolding files unless explicitly requested: + - `docs/Makefile` + - `docs/make.bat` +- For API docs, ensure references remain aligned with exported/public objects and current module paths. + +See: +- `docs/index.rst` +- `docs/development/build_docs.rst` +- `docs/development/style_guide.rst` +- `docs/reference/index.rst` +- `docs/technical/index.rst` diff --git a/.github/instructions/tests-python.instructions.md b/.github/instructions/tests-python.instructions.md new file mode 100644 index 000000000..1e9626142 --- /dev/null +++ b/.github/instructions/tests-python.instructions.md @@ -0,0 +1,36 @@ +--- +description: "Use when creating or editing pytest files in tests/. Enforces AAA structure, naming conventions, fixture usage, parameterization, slow-test marking, and numerical assertion practices for RocketPy." +name: "RocketPy Pytest Standards" +applyTo: "tests/**/*.py" +--- +# RocketPy Test Authoring Guidelines + +- Unit tests are mandatory for new behavior. +- Follow AAA structure in each test: Arrange, Act, Assert. +- Use descriptive test names matching project convention: + - `test_methodname` + - `test_methodname_stateundertest` + - `test_methodname_expectedbehaviour` +- Include docstrings that clearly state expected behavior and context. +- Prefer parameterization for scenario matrices instead of duplicated tests. +- Classify tests correctly: + - `tests/unit`: fast, method-focused tests (sociable unit tests are acceptable in RocketPy). + - `tests/integration`: broad multi-method/component interactions and strongly I/O-oriented cases. + - `tests/acceptance`: realistic end-user/flight scenarios with threshold-based expectations. +- By RocketPy convention, tests centered on `all_info()` behavior are integration tests. +- Reuse fixtures from `tests/fixtures` whenever possible. +- Keep fixture organization aligned with existing categories under `tests/fixtures` + (environment, flight, motor, rockets, surfaces, units, etc.). +- If you add a new fixture module, update `tests/conftest.py` so fixtures are discoverable. +- Keep tests deterministic: set seeds when randomness is involved and avoid unstable external + dependencies unless integration behavior explicitly requires them. +- Use `pytest.approx` for floating-point comparisons with realistic tolerances. +- Mark expensive tests with `@pytest.mark.slow` and ensure they can run under the project slow-test + workflow. +- Include at least one negative or edge-case assertion for new behaviors. +- When adding a bug fix, include a regression test that fails before the fix and passes after it. + +See: +- `docs/development/testing.rst` +- `docs/development/style_guide.rst` +- `docs/development/setting_up.rst` From 22489fa11a543a1ebc030374f08340445781832c Mon Sep 17 00:00:00 2001 From: MateusStano Date: Sun, 8 Mar 2026 12:35:38 -0300 Subject: [PATCH 04/44] REL: bump version to 1.12 --- CHANGELOG.md | 15 +++++++++++++++ docs/conf.py | 4 ++-- pyproject.toml | 2 +- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e46ee3faa..2658868a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,6 +32,21 @@ Attention: The newest changes should be on top --> ### Added +- + +### Changed + +- + +### Fixed + +- + +## [v1.12.0] - 2026-03-08 + +### Added + + - ENH: Air brakes controller functions now support 8-parameter signature [#854](https://github.com/RocketPy-Team/RocketPy/pull/854) - TST: Add acceptance tests for 3DOF flight simulation based on Bella Lui rocket [#914] (https://github.com/RocketPy-Team/RocketPy/pull/914_ - ENH: Add background map auto download functionality to Monte Carlo plots [#896](https://github.com/RocketPy-Team/RocketPy/pull/896) diff --git a/docs/conf.py b/docs/conf.py index ae8a4b17d..e535082e7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,12 +22,12 @@ # -- Project information ----------------------------------------------------- project = "RocketPy" -copyright = "2025, RocketPy Team" +copyright = "2026, RocketPy Team" author = "RocketPy Team" # The full version, including alpha/beta/rc tags -release = "1.11.0" +release = "1.12.0" # -- General configuration --------------------------------------------------- diff --git a/pyproject.toml b/pyproject.toml index 35ea34382..b9433c6d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "rocketpy" -version = "1.11.0" +version = "1.12.0" description="Advanced 6-DOF trajectory simulation for High-Power Rocketry." dynamic = ["dependencies"] readme = "README.md" From 8d49e5f97831a4e905340064d3f132594b73faaf Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Wed, 18 Mar 2026 22:34:55 -0300 Subject: [PATCH 05/44] ENH: Add explicit timeouts to ThrustCurve API requests and update changelog (#940) * Initial plan * ENH: Add explicit timeouts to ThrustCurve API requests Co-authored-by: MateusStano <69485049+MateusStano@users.noreply.github.com> * DOC: Add timeout fix PR to changelog Co-authored-by: MateusStano <69485049+MateusStano@users.noreply.github.com> --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: MateusStano <69485049+MateusStano@users.noreply.github.com> --- CHANGELOG.md | 2 +- rocketpy/motors/motor.py | 13 +++++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2658868a7..aae77b29f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -40,7 +40,7 @@ Attention: The newest changes should be on top --> ### Fixed -- +- BUG: Add explicit timeouts to ThrustCurve API requests [#935](https://github.com/RocketPy-Team/RocketPy/pull/935) ## [v1.12.0] - 2026-03-08 diff --git a/rocketpy/motors/motor.py b/rocketpy/motors/motor.py index 373154512..ea42dd71c 100644 --- a/rocketpy/motors/motor.py +++ b/rocketpy/motors/motor.py @@ -1946,8 +1946,11 @@ def _call_thrustcurve_api(name: str, no_cache: bool = False): # pylint: disable ------ ValueError If no motor is found or if the downloaded .eng data is missing. + requests.exceptions.Timeout + If a search or download request to the ThrustCurve API exceeds the + timeout limit (5 s connect / 30 s read). requests.exceptions.RequestException - If a network or HTTP error occurs during the API call. + If any other network or HTTP error occurs during the API call. Notes ----- @@ -1973,8 +1976,13 @@ def _call_thrustcurve_api(name: str, no_cache: bool = False): # pylint: disable ) base_url = "https://www.thrustcurve.org/api/v1" + _timeout = (5, 30) # (connect timeout, read timeout) in seconds # Step 1. Search motor - response = requests.get(f"{base_url}/search.json", params={"commonName": name}) + response = requests.get( + f"{base_url}/search.json", + params={"commonName": name}, + timeout=_timeout, + ) response.raise_for_status() data = response.json() @@ -1994,6 +2002,7 @@ def _call_thrustcurve_api(name: str, no_cache: bool = False): # pylint: disable dl_response = requests.get( f"{base_url}/download.json", params={"motorIds": motor_id, "format": "RASP", "data": "file"}, + timeout=_timeout, ) dl_response.raise_for_status() dl_data = dl_response.json() From 116f327f9f04a375eb528c3575b41836fed2a6a0 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Mar 2026 22:06:15 -0300 Subject: [PATCH 06/44] ENH: Restore power_off/on_drag as Function objects; add _input attributes for raw user input and update changelog (#941) * Initial plan * ENH: Restore power_off/on_drag as Function, add _input attributes for raw user input Co-authored-by: MateusStano <69485049+MateusStano@users.noreply.github.com> * DOC: Add PR #941 compatibility fix to changelog Co-authored-by: MateusStano <69485049+MateusStano@users.noreply.github.com> * Update rocketpy/rocket/rocket.py Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> * MNT: ruff pylint --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: MateusStano <69485049+MateusStano@users.noreply.github.com> Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Co-authored-by: MateusStano --- CHANGELOG.md | 4 +++- rocketpy/rocket/point_mass_rocket.py | 12 ++++++++++++ rocketpy/rocket/rocket.py | 23 ++++++++++++++++------- 3 files changed, 31 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index aae77b29f..fe77d0dda 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -40,7 +40,7 @@ Attention: The newest changes should be on top --> ### Fixed -- BUG: Add explicit timeouts to ThrustCurve API requests [#935](https://github.com/RocketPy-Team/RocketPy/pull/935) +- ## [v1.12.0] - 2026-03-08 @@ -73,6 +73,8 @@ Attention: The newest changes should be on top --> ### Fixed +- BUG: Restore `Rocket.power_off_drag` and `Rocket.power_on_drag` as `Function` objects while preserving raw inputs in `power_off_drag_input` and `power_on_drag_input` [#941](https://github.com/RocketPy-Team/RocketPy/pull/941) +- BUG: Add explicit timeouts to ThrustCurve API requests [#935](https://github.com/RocketPy-Team/RocketPy/pull/935) - BUG: Fix hard-coded radius value for parachute added mass calculation [#889](https://github.com/RocketPy-Team/RocketPy/pull/889) - DOC: Fix documentation build [#908](https://github.com/RocketPy-Team/RocketPy/pull/908) - BUG: energy_data plot not working for 3 dof sims [[#906](https://github.com/RocketPy-Team/RocketPy/issues/906)] diff --git a/rocketpy/rocket/point_mass_rocket.py b/rocketpy/rocket/point_mass_rocket.py index eaddaadec..d94363d2b 100644 --- a/rocketpy/rocket/point_mass_rocket.py +++ b/rocketpy/rocket/point_mass_rocket.py @@ -41,6 +41,18 @@ class PointMassRocket(Rocket): center_of_mass_without_motor : float Position, in meters, of the rocket's center of mass without motor relative to the rocket's coordinate system. + power_off_drag : Function + Rocket's drag coefficient as a function of Mach number when the + motor is off. Alias for ``power_off_drag_by_mach``. + power_on_drag : Function + Rocket's drag coefficient as a function of Mach number when the + motor is on. Alias for ``power_on_drag_by_mach``. + power_off_drag_input : int, float, callable, array, string, Function + Original user input for the drag coefficient with motor off. + Preserved for reconstruction and Monte Carlo workflows. + power_on_drag_input : int, float, callable, array, string, Function + Original user input for the drag coefficient with motor on. + Preserved for reconstruction and Monte Carlo workflows. power_off_drag_7d : Function Drag coefficient function with seven inputs in the order: alpha, beta, mach, reynolds, pitch_rate, yaw_rate, roll_rate. diff --git a/rocketpy/rocket/rocket.py b/rocketpy/rocket/rocket.py index 51719753d..0e44365d6 100644 --- a/rocketpy/rocket/rocket.py +++ b/rocketpy/rocket/rocket.py @@ -147,12 +147,18 @@ class Rocket: Rocket.static_margin : float Float value corresponding to rocket static margin when loaded with propellant in units of rocket diameter or calibers. - Rocket.power_off_drag : int, float, callable, string, array, Function + Rocket.power_off_drag : Function + Rocket's drag coefficient as a function of Mach number when the + motor is off. Alias for ``power_off_drag_by_mach``. + Rocket.power_on_drag : Function + Rocket's drag coefficient as a function of Mach number when the + motor is on. Alias for ``power_on_drag_by_mach``. + Rocket.power_off_drag_input : int, float, callable, string, array, Function Original user input for rocket's drag coefficient when the motor is - off. This is preserved for reconstruction and Monte Carlo workflows. - Rocket.power_on_drag : int, float, callable, string, array, Function + off. Preserved for reconstruction and Monte Carlo workflows. + Rocket.power_on_drag_input : int, float, callable, string, array, Function Original user input for rocket's drag coefficient when the motor is - on. This is preserved for reconstruction and Monte Carlo workflows. + on. Preserved for reconstruction and Monte Carlo workflows. Rocket.power_off_drag_7d : Function Rocket's drag coefficient with motor off as a 7D function of (alpha, beta, mach, reynolds, pitch_rate, yaw_rate, roll_rate). @@ -375,9 +381,12 @@ def __init__( # pylint: disable=too-many-statements interpolation="linear", extrapolation="constant", ) - # Saving user input for monte carlo - self.power_off_drag = power_off_drag - self.power_on_drag = power_on_drag + # Saving raw user input for reconstruction and Monte Carlo + self._power_off_drag_input = power_off_drag + self._power_on_drag_input = power_on_drag + # Public API attributes: keep as Function (Mach-only) for backward compatibility + self.power_off_drag = self.power_off_drag_by_mach + self.power_on_drag = self.power_on_drag_by_mach # Create a, possibly, temporary empty motor # self.motors = Components() # currently unused, only 1 motor is supported From 5f1a3ebe3a78b63945b386710457757e0fb94d5e Mon Sep 17 00:00:00 2001 From: MateusStano Date: Thu, 19 Mar 2026 22:56:20 -0300 Subject: [PATCH 07/44] MNT: Remove unused imports and deprecated functions from mathutils/function.py --- rocketpy/mathutils/function.py | 54 +--------------------------------- 1 file changed, 1 insertion(+), 53 deletions(-) diff --git a/rocketpy/mathutils/function.py b/rocketpy/mathutils/function.py index 622d7a676..3f73dd840 100644 --- a/rocketpy/mathutils/function.py +++ b/rocketpy/mathutils/function.py @@ -5,19 +5,15 @@ carefully as it may impact all the rest of the project. """ -import base64 -import functools import operator import warnings from bisect import bisect_left from collections.abc import Iterable from copy import deepcopy -from enum import Enum from functools import cached_property from inspect import signature from pathlib import Path -import dill import matplotlib.pyplot as plt import numpy as np from scipy import integrate, linalg, optimize @@ -29,6 +25,7 @@ ) from rocketpy.plots.plot_helpers import show_or_save_plot +from rocketpy.tools import deprecated # Numpy 1.x compatibility, # TODO: remove these lines when all dependencies support numpy>=2.0.0 @@ -51,55 +48,6 @@ EXTRAPOLATION_TYPES = {"zero": 0, "natural": 1, "constant": 2} -def deprecated(reason=None, version=None, alternative=None): - """Decorator to mark functions or methods as deprecated. - - This decorator issues a DeprecationWarning when the decorated function - is called, indicating that it will be removed in future versions. - """ - - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - if reason: - message = reason - else: - message = f"The function `{func.__name__}` is deprecated" - - if version: - message += f" and will be removed in {version}" - - if alternative: - message += f". Use `{alternative}` instead" - - message += "." - warnings.warn(message, DeprecationWarning, stacklevel=2) - return func(*args, **kwargs) - - return wrapper - - return decorator - - -def to_hex_encode(obj, encoder=base64.b85encode): - """Converts an object to hex representation using dill.""" - return encoder(dill.dumps(obj)).hex() - - -def from_hex_decode(obj_bytes, decoder=base64.b85decode): - """Converts an object from hex representation using dill.""" - return dill.loads(decoder(bytes.fromhex(obj_bytes))) - - -class SourceType(Enum): - """Enumeration of the source types for the Function class. - The source can be either a callable or an array. - """ - - CALLABLE = 0 - ARRAY = 1 - - class Function: # pylint: disable=too-many-public-methods """Class converts a python function or a data sequence into an object which can be handled more naturally, enabling easy interpolation, From 877e1474e0f82e0a63d48b41e02f3140b10716d4 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Thu, 19 Mar 2026 23:03:00 -0300 Subject: [PATCH 08/44] BUG: Readd SourceType enumeration for function source types and clean up imports --- rocketpy/mathutils/function.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/rocketpy/mathutils/function.py b/rocketpy/mathutils/function.py index 3f73dd840..f11e4879e 100644 --- a/rocketpy/mathutils/function.py +++ b/rocketpy/mathutils/function.py @@ -10,6 +10,7 @@ from bisect import bisect_left from collections.abc import Iterable from copy import deepcopy +from enum import Enum from functools import cached_property from inspect import signature from pathlib import Path @@ -25,7 +26,7 @@ ) from rocketpy.plots.plot_helpers import show_or_save_plot -from rocketpy.tools import deprecated +from rocketpy.tools import deprecated, from_hex_decode, to_hex_encode # Numpy 1.x compatibility, # TODO: remove these lines when all dependencies support numpy>=2.0.0 @@ -47,6 +48,13 @@ } EXTRAPOLATION_TYPES = {"zero": 0, "natural": 1, "constant": 2} +class SourceType(Enum): + """Enumeration of the source types for the Function class. + The source can be either a callable or an array. + """ + + CALLABLE = 0 + ARRAY = 1 class Function: # pylint: disable=too-many-public-methods """Class converts a python function or a data sequence into an object From 9d0ec386c02ff7242af3c345955003c03e2caf5a Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Thu, 19 Mar 2026 23:09:22 -0300 Subject: [PATCH 09/44] BUG: Fix incorrect Jacobian in `only_radial_burn` branch of `SolidMotor.evaluate_geometry` (#944) * Initial plan * BUG: Fix incorrect Jacobian in only_radial_burn branch of evaluate_geometry Co-authored-by: MateusStano <69485049+MateusStano@users.noreply.github.com> --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: MateusStano <69485049+MateusStano@users.noreply.github.com> --- CHANGELOG.md | 2 ++ rocketpy/motors/solid_motor.py | 11 ++++++----- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fe77d0dda..6b8d0aee4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -82,6 +82,8 @@ Attention: The newest changes should be on top --> - BUG: Fix parallel Monte Carlo simulation showing incorrect iteration count [#806](https://github.com/RocketPy-Team/RocketPy/pull/806) - BUG: Fix missing titles in roll parameter plots for fin sets [#934](https://github.com/RocketPy-Team/RocketPy/pull/934) - BUG: Duplicate _controllers in Flight.TimeNodes.merge() [#931](https://github.com/RocketPy-Team/RocketPy/pull/931) +- BUG: Fix incorrect Jacobian in `only_radial_burn` branch of `SolidMotor.evaluate_geometry` [#935](https://github.com/RocketPy-Team/RocketPy/pull/935) +- BUG: Add explicit timeouts to ThrustCurve API requests [#935](https://github.com/RocketPy-Team/RocketPy/pull/935) ## [v1.11.0] - 2025-11-01 diff --git a/rocketpy/motors/solid_motor.py b/rocketpy/motors/solid_motor.py index f5e89c2f8..590a02511 100644 --- a/rocketpy/motors/solid_motor.py +++ b/rocketpy/motors/solid_motor.py @@ -546,13 +546,14 @@ def geometry_jacobian(t, y): 2 * np.pi * (grain_inner_radius * grain_height) ** 2 ) - inner_radius_derivative_wrt_inner_radius = factor * ( - grain_height - 2 * grain_inner_radius - ) - inner_radius_derivative_wrt_height = 0 + # burn_area = 2π*r*h, so ṙ = -vdiff/(2π*r*h): + # ∂ṙ/∂r = vdiff/(2π*r²*h) = factor * h + # ∂ṙ/∂h = vdiff/(2π*r*h²) = factor * r + inner_radius_derivative_wrt_inner_radius = factor * grain_height + inner_radius_derivative_wrt_height = factor * grain_inner_radius + # dh/dt = 0, so all partial derivatives of height are zero height_derivative_wrt_inner_radius = 0 height_derivative_wrt_height = 0 - # Height is a constant, so all the derivatives with respect to it are set to zero return [ [ From b205b75df9d7fa113fcf41cce981752c51986906 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Thu, 19 Mar 2026 23:35:22 -0300 Subject: [PATCH 10/44] ENH: move weathercocking_coeff to PointMassRockt --- docs/user/three_dof_simulation.rst | 13 ++++++----- rocketpy/rocket/point_mass_rocket.py | 10 +++++++++ rocketpy/simulation/flight.py | 19 ++++------------ tests/acceptance/test_3dof_flight.py | 8 +++---- tests/fixtures/flight/flight_fixtures.py | 5 +++-- .../simulation/test_flight_3dof.py | 22 ++++++------------- 6 files changed, 35 insertions(+), 42 deletions(-) diff --git a/docs/user/three_dof_simulation.rst b/docs/user/three_dof_simulation.rst index 3ac88dca0..70cec6d98 100644 --- a/docs/user/three_dof_simulation.rst +++ b/docs/user/three_dof_simulation.rst @@ -381,7 +381,7 @@ The ``weathercock_coeff`` Parameter ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The weathercocking behavior is controlled by the ``weathercock_coeff`` parameter -in the :class:`rocketpy.Flight` class: +in the :class:`rocketpy.PointMassRocket` class: .. jupyter-execute:: @@ -407,10 +407,11 @@ in the :class:`rocketpy.Flight` class: center_of_mass_without_motor=0.0, power_off_drag=0.43, power_on_drag=0.43, + weathercock_coeff=1.0, # Example with weathercocking enabled ) rocket.add_motor(motor, position=0) - # Flight with weathercocking enabled + # Flight uses the weathercocking configured on the point-mass rocket flight = Flight( rocket=rocket, environment=env, @@ -418,7 +419,6 @@ in the :class:`rocketpy.Flight` class: inclination=85, heading=45, simulation_mode="3 DOF", - weathercock_coeff=1.0, # Example with weathercocking enabled ) print(f"Apogee: {flight.apogee - env.elevation:.2f} m") @@ -540,6 +540,7 @@ accuracy. center_of_mass_without_motor=0, power_off_drag=0.43, power_on_drag=0.43, + weathercock_coeff=0.0, ) rocket_3dof.add_motor(motor_3dof, -1.1356) @@ -561,6 +562,7 @@ accuracy. # 3-DOF with no weathercocking start = time.time() + rocket_3dof.weathercock_coeff = 0.0 flight_3dof_0 = Flight( rocket=rocket_3dof, environment=env, @@ -569,12 +571,12 @@ accuracy. heading=45, terminate_on_apogee=True, simulation_mode="3 DOF", - weathercock_coeff=0.0, ) time_3dof_0 = time.time() - start # 3-DOF with default weathercocking start = time.time() + rocket_3dof.weathercock_coeff = 1.0 flight_3dof_1 = Flight( rocket=rocket_3dof, environment=env, @@ -583,12 +585,12 @@ accuracy. heading=45, terminate_on_apogee=True, simulation_mode="3 DOF", - weathercock_coeff=1.0, ) time_3dof_1 = time.time() - start # 3-DOF with high weathercocking start = time.time() + rocket_3dof.weathercock_coeff = 5.0 flight_3dof_5 = Flight( rocket=rocket_3dof, environment=env, @@ -597,7 +599,6 @@ accuracy. heading=45, terminate_on_apogee=True, simulation_mode="3 DOF", - weathercock_coeff=5.0, ) time_3dof_5 = time.time() - start diff --git a/rocketpy/rocket/point_mass_rocket.py b/rocketpy/rocket/point_mass_rocket.py index d94363d2b..32681ee0d 100644 --- a/rocketpy/rocket/point_mass_rocket.py +++ b/rocketpy/rocket/point_mass_rocket.py @@ -31,6 +31,10 @@ class PointMassRocket(Rocket): as :class:`rocketpy.Rocket`, including 1D (Mach-only) and 7D (alpha, beta, mach, reynolds, pitch_rate, yaw_rate, roll_rate) definitions. + weathercock_coeff : float, optional + Proportionality coefficient for the alignment rate of the point-mass + rocket body axis with the relative wind direction in 3-DOF + simulations. Must be non-negative. Default is 0.0. Attributes ---------- @@ -63,6 +67,9 @@ class PointMassRocket(Rocket): Convenience wrapper for power-off drag as a Mach-only function. power_on_drag_by_mach : Function Convenience wrapper for power-on drag as a Mach-only function. + weathercock_coeff : float + Proportionality coefficient for weathercocking alignment in 3-DOF + simulations. """ def __init__( @@ -72,6 +79,7 @@ def __init__( center_of_mass_without_motor: float, power_off_drag, power_on_drag, + weathercock_coeff: float = 0.0, ): self._center_of_mass_without_motor_pointmass = center_of_mass_without_motor self._center_of_dry_mass_position = center_of_mass_without_motor @@ -84,6 +92,8 @@ def __init__( self.dry_I_13 = 0.0 self.dry_I_23 = 0.0 + self.weathercock_coeff = float(weathercock_coeff) + # Call base init with safe defaults super().__init__( radius=radius, diff --git a/rocketpy/simulation/flight.py b/rocketpy/simulation/flight.py index eb82b1998..1443d1d80 100644 --- a/rocketpy/simulation/flight.py +++ b/rocketpy/simulation/flight.py @@ -504,7 +504,6 @@ def __init__( # pylint: disable=too-many-arguments,too-many-statements equations_of_motion="standard", ode_solver="LSODA", simulation_mode="6 DOF", - weathercock_coeff=0.0, ): """Run a trajectory simulation. @@ -588,16 +587,6 @@ def __init__( # pylint: disable=too-many-arguments,too-many-statements A custom ``scipy.integrate.OdeSolver`` can be passed as well. For more information on the integration methods, see the scipy documentation [1]_. - weathercock_coeff : float, optional - Proportionality coefficient (rate coefficient) for the alignment rate of the rocket's body axis - with the relative wind direction in 3-DOF simulations, in rad/s. The actual angular velocity - applied to align the rocket is calculated as ``weathercock_coeff * sin(angle)``, where ``angle`` - is the angle between the rocket's axis and the wind direction. A higher value means faster alignment - (quasi-static weathercocking). This parameter is only used when simulation_mode is '3 DOF'. - Default is 0.0 to mimic a pure 3-DOF simulation without any weathercocking (fixed attitude). - Set to a positive value to enable quasi-static weathercocking behaviour. - - Returns ------- None @@ -627,7 +616,6 @@ def __init__( # pylint: disable=too-many-arguments,too-many-statements self.equations_of_motion = equations_of_motion self.simulation_mode = simulation_mode self.ode_solver = ode_solver - self.weathercock_coeff = weathercock_coeff # Controller initialization self.__init_controllers() @@ -2310,7 +2298,8 @@ def u_dot_generalized_3dof(self, t, u, post_processing=False): r_dot = [vx, vy, vz] # Weathercocking: evolve body axis direction toward relative wind # The body z-axis (attitude vector) should align with -freestream_velocity - if self.weathercock_coeff > 0 and free_stream_speed > 1e-6: + weathercock_coeff = getattr(self.rocket, "weathercock_coeff", 0.0) + if weathercock_coeff > 0 and free_stream_speed > 1e-6: # Current body z-axis in inertial frame (attitude vector) # From rotation matrix: column 3 gives the body z-axis in inertial frame body_z_inertial = Vector( @@ -2342,7 +2331,7 @@ def u_dot_generalized_3dof(self, t, u, post_processing=False): sin_angle = min(1.0, max(-1.0, rotation_axis_mag)) # Angular velocity magnitude proportional to misalignment angle - omega_mag = self.weathercock_coeff * sin_angle + omega_mag = weathercock_coeff * sin_angle # Angular velocity in inertial frame, then transform to body frame omega_body = Kt @ (rotation_axis * omega_mag) @@ -2363,7 +2352,7 @@ def u_dot_generalized_3dof(self, t, u, post_processing=False): ) rotation_axis = perp_axis.unit_vector # 180 degree rotation: sin(angle) = 1 - omega_mag = self.weathercock_coeff * 1.0 + omega_mag = weathercock_coeff * 1.0 omega_body = Kt @ (rotation_axis * omega_mag) # else: aligned (dot > 0.999) - no rotation needed, omega_body stays None diff --git a/tests/acceptance/test_3dof_flight.py b/tests/acceptance/test_3dof_flight.py index 08c3cb2f4..cdd44f9b7 100644 --- a/tests/acceptance/test_3dof_flight.py +++ b/tests/acceptance/test_3dof_flight.py @@ -202,7 +202,7 @@ def test_3dof_weathercocking_coefficient_stored(flight_3dof_with_weathercock): flight_3dof_with_weathercock : rocketpy.Flight A 3 DOF flight simulation with weathercocking enabled. """ - assert flight_3dof_with_weathercock.weathercock_coeff == 1.0 + assert flight_3dof_with_weathercock.rocket.weathercock_coeff == 1.0 def test_3dof_flight_post_processing_attributes(flight_3dof_no_weathercock): @@ -399,6 +399,8 @@ def test_3dof_flight_reproducibility( acceptance_point_mass_rocket : rocketpy.PointMassRocket Rocket fixture for testing. """ + acceptance_point_mass_rocket.weathercock_coeff = 0.5 + # Run simulation twice with same parameters flight1 = Flight( rocket=acceptance_point_mass_rocket, @@ -407,7 +409,6 @@ def test_3dof_flight_reproducibility( inclination=LAUNCH_INCLINATION, heading=LAUNCH_HEADING, simulation_mode="3 DOF", - weathercock_coeff=0.5, ) flight2 = Flight( @@ -417,7 +418,6 @@ def test_3dof_flight_reproducibility( inclination=LAUNCH_INCLINATION, heading=LAUNCH_HEADING, simulation_mode="3 DOF", - weathercock_coeff=0.5, ) # Results should be identical @@ -452,6 +452,7 @@ def test_3dof_flight_different_weathercock_coefficients( flights = [] for coeff in coefficients: + acceptance_point_mass_rocket.weathercock_coeff = coeff flight = Flight( rocket=acceptance_point_mass_rocket, environment=example_spaceport_env, @@ -459,7 +460,6 @@ def test_3dof_flight_different_weathercock_coefficients( inclination=LAUNCH_INCLINATION, heading=LAUNCH_HEADING, simulation_mode="3 DOF", - weathercock_coeff=coeff, ) flights.append(flight) diff --git a/tests/fixtures/flight/flight_fixtures.py b/tests/fixtures/flight/flight_fixtures.py index b13b52b6b..f18b45f9e 100644 --- a/tests/fixtures/flight/flight_fixtures.py +++ b/tests/fixtures/flight/flight_fixtures.py @@ -352,6 +352,7 @@ def acceptance_point_mass_rocket(acceptance_point_mass_motor): center_of_mass_without_motor=0, power_off_drag=0.43, power_on_drag=0.43, + weathercock_coeff=0.0, ) rocket.add_motor(acceptance_point_mass_motor, position=0) return rocket @@ -376,6 +377,7 @@ def flight_3dof_no_weathercock(example_spaceport_env, acceptance_point_mass_rock rocketpy.Flight A 3 DOF flight simulation with weathercock_coeff=0.0. """ + acceptance_point_mass_rocket.weathercock_coeff = 0.0 return Flight( rocket=acceptance_point_mass_rocket, environment=example_spaceport_env, @@ -383,7 +385,6 @@ def flight_3dof_no_weathercock(example_spaceport_env, acceptance_point_mass_rock inclination=LAUNCH_INCLINATION, heading=LAUNCH_HEADING, simulation_mode="3 DOF", - weathercock_coeff=0.0, ) @@ -406,6 +407,7 @@ def flight_3dof_with_weathercock(example_spaceport_env, acceptance_point_mass_ro rocketpy.Flight A 3 DOF flight simulation with weathercock_coeff=1.0. """ + acceptance_point_mass_rocket.weathercock_coeff = 1.0 return Flight( rocket=acceptance_point_mass_rocket, environment=example_spaceport_env, @@ -413,5 +415,4 @@ def flight_3dof_with_weathercock(example_spaceport_env, acceptance_point_mass_ro inclination=LAUNCH_INCLINATION, heading=LAUNCH_HEADING, simulation_mode="3 DOF", - weathercock_coeff=1.0, ) diff --git a/tests/integration/simulation/test_flight_3dof.py b/tests/integration/simulation/test_flight_3dof.py index ff504a7c6..5c8929ddf 100644 --- a/tests/integration/simulation/test_flight_3dof.py +++ b/tests/integration/simulation/test_flight_3dof.py @@ -59,12 +59,12 @@ def flight_weathercock_zero(example_plain_env, point_mass_rocket): rocketpy.simulation.flight.Flight A Flight object configured for 3-DOF with zero weathercock coefficient. """ + point_mass_rocket.weathercock_coeff = 0.0 return Flight( rocket=point_mass_rocket, environment=example_plain_env, rail_length=1, simulation_mode="3 DOF", - weathercock_coeff=0.0, ) @@ -94,12 +94,12 @@ def flight_weathercock_pos(example_plain_env, point_mass_rocket): rocketpy.simulation.flight.Flight A Flight object configured for 3-DOF with weathercocking enabled. """ + point_mass_rocket.weathercock_coeff = 1.0 return Flight( rocket=point_mass_rocket, environment=example_plain_env, rail_length=1, simulation_mode="3 DOF", - weathercock_coeff=1.0, ) @@ -169,24 +169,16 @@ def test_invalid_simulation_mode(example_plain_env, calisto): ) -def test_weathercock_coeff_stored(example_plain_env, point_mass_rocket): - """Tests that the weathercock_coeff parameter is correctly stored. +def test_weathercock_coeff_stored(point_mass_rocket): + """Tests that weathercock coefficient is stored in PointMassRocket. Parameters ---------- - example_plain_env : rocketpy.Environment - A basic environment fixture for flight simulation. point_mass_rocket : rocketpy.PointMassRocket A point mass rocket fixture for 3-DOF simulation. """ - flight = Flight( - rocket=point_mass_rocket, - environment=example_plain_env, - rail_length=1, - simulation_mode="3 DOF", - weathercock_coeff=2.5, - ) - assert flight.weathercock_coeff == 2.5 + point_mass_rocket.weathercock_coeff = 2.5 + assert point_mass_rocket.weathercock_coeff == 2.5 def test_weathercock_coeff_default(flight_3dof): @@ -197,7 +189,7 @@ def test_weathercock_coeff_default(flight_3dof): flight_3dof : rocketpy.Flight A Flight object for a 3-DOF simulation, provided by the flight_3dof fixture. """ - assert flight_3dof.weathercock_coeff == 0.0 + assert flight_3dof.rocket.weathercock_coeff == 0.0 def test_point_mass_rocket_3dof_uses_7d_drag_inputs( From dc6e87817847bfa7f28589869febdbf63db1c247 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Thu, 19 Mar 2026 23:36:30 -0300 Subject: [PATCH 11/44] MNT: ruff --- rocketpy/mathutils/function.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rocketpy/mathutils/function.py b/rocketpy/mathutils/function.py index f11e4879e..e7ef294ad 100644 --- a/rocketpy/mathutils/function.py +++ b/rocketpy/mathutils/function.py @@ -48,6 +48,7 @@ } EXTRAPOLATION_TYPES = {"zero": 0, "natural": 1, "constant": 2} + class SourceType(Enum): """Enumeration of the source types for the Function class. The source can be either a callable or an array. @@ -56,6 +57,7 @@ class SourceType(Enum): CALLABLE = 0 ARRAY = 1 + class Function: # pylint: disable=too-many-public-methods """Class converts a python function or a data sequence into an object which can be handled more naturally, enabling easy interpolation, From df52c158226334ced85979d23cde5630e85bb4a5 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Fri, 20 Mar 2026 11:10:38 -0300 Subject: [PATCH 12/44] MNT: fix cyclic import --- rocketpy/mathutils/function.py | 9 +- .../rocket/aero_surface/generic_surface.py | 90 +++++- rocketpy/rocket/rocket.py | 130 ++++++++- rocketpy/tools.py | 256 ------------------ 4 files changed, 217 insertions(+), 268 deletions(-) diff --git a/rocketpy/mathutils/function.py b/rocketpy/mathutils/function.py index e7ef294ad..33a82ec01 100644 --- a/rocketpy/mathutils/function.py +++ b/rocketpy/mathutils/function.py @@ -17,6 +17,7 @@ import matplotlib.pyplot as plt import numpy as np +from numpy import trapezoid from scipy import integrate, linalg, optimize from scipy.interpolate import ( LinearNDInterpolator, @@ -28,14 +29,6 @@ from rocketpy.plots.plot_helpers import show_or_save_plot from rocketpy.tools import deprecated, from_hex_decode, to_hex_encode -# Numpy 1.x compatibility, -# TODO: remove these lines when all dependencies support numpy>=2.0.0 -if np.lib.NumpyVersion(np.__version__) >= "2.0.0b1": - # pylint: disable=no-name-in-module - from numpy import trapezoid # pragma: no cover -else: - from numpy import trapz as trapezoid # pragma: no cover - NUMERICAL_TYPES = (float, int, complex, np.integer, np.floating) INTERPOLATION_TYPES = { "linear": 0, diff --git a/rocketpy/rocket/aero_surface/generic_surface.py b/rocketpy/rocket/aero_surface/generic_surface.py index 23ccb0d77..8ab438620 100644 --- a/rocketpy/rocket/aero_surface/generic_surface.py +++ b/rocketpy/rocket/aero_surface/generic_surface.py @@ -1,11 +1,11 @@ import copy +import csv import math import numpy as np from rocketpy.mathutils import Function from rocketpy.mathutils.vector_matrix import Matrix, Vector -from rocketpy.tools import load_generic_surface_csv class GenericSurface: @@ -328,7 +328,7 @@ def _process_input(self, input_data, coeff_name): """ if isinstance(input_data, str): # Input is assumed to be a file path to a CSV - return load_generic_surface_csv(input_data, coeff_name) + return self.__load_generic_surface_csv(input_data, coeff_name) elif isinstance(input_data, Function): if input_data.__dom_dim__ != 7: raise ValueError( @@ -379,3 +379,89 @@ def _process_input(self, input_data, coeff_name): f"Invalid input for {coeff_name}: must be a CSV file path" " or a callable." ) + + def __load_generic_surface_csv(self, file_path, coeff_name): # pylint: disable=too-many-statements,import-outside-toplevel + """Load GenericSurface coefficient CSV into a 7D Function. + + This loader expects header-based CSV data with one or more independent + variables among: alpha, beta, mach, reynolds, pitch_rate, yaw_rate, + roll_rate. + """ + independent_vars = [ + "alpha", + "beta", + "mach", + "reynolds", + "pitch_rate", + "yaw_rate", + "roll_rate", + ] + + try: + with open(file_path, mode="r") as file: + reader = csv.reader(file) + header = next(reader) + except (FileNotFoundError, IOError) as e: + raise ValueError(f"Error reading {coeff_name} CSV file: {e}") from e + except StopIteration as e: + raise ValueError(f"Invalid or empty CSV file for {coeff_name}.") from e + + if not header: + raise ValueError(f"Invalid or empty CSV file for {coeff_name}.") + + header = [column.strip() for column in header] + present_columns = [col for col in independent_vars if col in header] + + invalid_columns = [col for col in header[:-1] if col not in independent_vars] + if invalid_columns: + raise ValueError( + f"Invalid independent variable(s) in {coeff_name} CSV: " + f"{invalid_columns}. Valid options are: {independent_vars}." + ) + + if header[-1] in independent_vars: + raise ValueError( + f"Last column in {coeff_name} CSV must be the coefficient" + " value, not an independent variable." + ) + + if not present_columns: + raise ValueError(f"No independent variables found in {coeff_name} CSV.") + + ordered_present_columns = [ + col for col in header[:-1] if col in independent_vars + ] + + csv_func = Function.from_regular_grid_csv( + file_path, + ordered_present_columns, + coeff_name, + extrapolation="natural", + ) + if csv_func is None: + csv_func = Function( + file_path, + interpolation="linear", + extrapolation="natural", + ) + + def wrapper(alpha, beta, mach, reynolds, pitch_rate, yaw_rate, roll_rate): + args_by_name = { + "alpha": alpha, + "beta": beta, + "mach": mach, + "reynolds": reynolds, + "pitch_rate": pitch_rate, + "yaw_rate": yaw_rate, + "roll_rate": roll_rate, + } + selected_args = [args_by_name[col] for col in ordered_present_columns] + return csv_func(*selected_args) + + return Function( + wrapper, + independent_vars, + [coeff_name], + interpolation="linear", + extrapolation="natural", + ) diff --git a/rocketpy/rocket/rocket.py b/rocketpy/rocket/rocket.py index 0e44365d6..e3692d2e8 100644 --- a/rocketpy/rocket/rocket.py +++ b/rocketpy/rocket/rocket.py @@ -1,3 +1,4 @@ +import csv import inspect import math import warnings @@ -27,7 +28,6 @@ from rocketpy.tools import ( deprecated, find_obj_from_hash, - load_rocket_drag_csv, parallel_axis_theorem_from_com, ) @@ -2243,7 +2243,7 @@ def _count_positional_args(callable_obj): # Case 1: string input can be a CSV path or any Function-supported source. if isinstance(input_data, str): if input_data.lower().endswith(".csv"): - return load_rocket_drag_csv(input_data, coeff_name) + return self.__load_rocket_drag_csv(input_data, coeff_name) function_data = Function(input_data) _validate_function_domain_dimension(function_data) @@ -2319,3 +2319,129 @@ def _count_positional_args(callable_obj): f"Invalid input for {coeff_name}: must be int, float, CSV file path, " "Function, or callable." ) + + def __load_rocket_drag_csv(self, file_path, coeff_name): # pylint: disable=too-many-statements,import-outside-toplevel + """Load Rocket drag CSV into a 7D Function. + + Supports either headerless two-column (mach, coefficient) tables or + header-based multi-variable CSV tables. + """ + independent_vars = [ + "alpha", + "beta", + "mach", + "reynolds", + "pitch_rate", + "yaw_rate", + "roll_rate", + ] + + def _is_numeric(value): + try: + float(value) + return True + except (TypeError, ValueError): + try: + int(value) + return True + except (TypeError, ValueError): + return False + + try: + with open(file_path, mode="r") as file: + reader = csv.reader(file) + first_row = next(reader) + except (FileNotFoundError, IOError) as e: + raise ValueError(f"Error reading {coeff_name} CSV file: {e}") from e + except StopIteration as e: + raise ValueError(f"Invalid or empty CSV file for {coeff_name}.") from e + + if not first_row: + raise ValueError(f"Invalid or empty CSV file for {coeff_name}.") + + is_headerless_two_column = len(first_row) == 2 and all( + _is_numeric(cell) for cell in first_row + ) + + if is_headerless_two_column: + csv_func = Function( + file_path, + interpolation="linear", + extrapolation="constant", + ) + + def mach_wrapper( + _alpha, + _beta, + mach, + _reynolds, + _pitch_rate, + _yaw_rate, + _roll_rate, + ): + return csv_func(mach) + + return Function( + mach_wrapper, + independent_vars, + [coeff_name], + interpolation="linear", + extrapolation="constant", + ) + + header = [column.strip() for column in first_row] + present_columns = [col for col in independent_vars if col in header] + + invalid_columns = [col for col in header[:-1] if col not in independent_vars] + if invalid_columns: + raise ValueError( + f"Invalid independent variable(s) in {coeff_name} CSV: " + f"{invalid_columns}. Valid options are: {independent_vars}." + ) + + if header[-1] in independent_vars: + raise ValueError( + f"Last column in {coeff_name} CSV must be the coefficient " + "value, not an independent variable." + ) + + if not present_columns: + raise ValueError(f"No independent variables found in {coeff_name} CSV.") + + ordered_present_columns = [ + col for col in header[:-1] if col in independent_vars + ] + + csv_func = Function.from_regular_grid_csv( + file_path, + ordered_present_columns, + coeff_name, + extrapolation="constant", + ) + if csv_func is None: + csv_func = Function( + file_path, + interpolation="linear", + extrapolation="constant", + ) + + def wrapper(alpha, beta, mach, reynolds, pitch_rate, yaw_rate, roll_rate): + args_by_name = { + "alpha": alpha, + "beta": beta, + "mach": mach, + "reynolds": reynolds, + "pitch_rate": pitch_rate, + "yaw_rate": yaw_rate, + "roll_rate": roll_rate, + } + selected_args = [args_by_name[col] for col in ordered_present_columns] + return csv_func(*selected_args) + + return Function( + wrapper, + independent_vars, + [coeff_name], + interpolation="linear", + extrapolation="constant", + ) diff --git a/rocketpy/tools.py b/rocketpy/tools.py index 6c8572a47..68ab3404a 100644 --- a/rocketpy/tools.py +++ b/rocketpy/tools.py @@ -7,7 +7,6 @@ """ import base64 -import csv import functools import importlib import importlib.metadata @@ -117,261 +116,6 @@ def tuple_handler(value): raise ValueError("value must be a list or tuple of length 1 or 2.") -def create_regular_grid_function( - csv_source, - variable_names, - coeff_name, - extrapolation, -): # pylint: disable=import-outside-toplevel - """Create a regular-grid Function when CSV samples form a full grid. - - Parameters - ---------- - csv_source : str - Path to the CSV file. - variable_names : list[str] - Ordered independent variable names present in the CSV. - coeff_name : str - Name of the coefficient output. - extrapolation : str - Extrapolation method passed to the Function constructor. - - Returns - ------- - Function or None - A ``Function`` configured with ``regular_grid`` interpolation when the - CSV data forms a strict Cartesian grid, otherwise ``None``. - """ - from rocketpy.mathutils.function import ( # pylint: disable=import-outside-toplevel - Function, # pylint: disable=import-outside-toplevel - ) - - return Function.from_regular_grid_csv( - csv_source, - variable_names, - coeff_name, - extrapolation, - ) - - -def load_generic_surface_csv(file_path, coeff_name): # pylint: disable=too-many-statements,import-outside-toplevel - """Load GenericSurface coefficient CSV into a 7D Function. - - This loader expects header-based CSV data with one or more independent - variables among: alpha, beta, mach, reynolds, pitch_rate, yaw_rate, - roll_rate. - """ - from rocketpy.mathutils.function import ( # pylint: disable=import-outside-toplevel - Function, # pylint: disable=import-outside-toplevel - ) - - independent_vars = [ - "alpha", - "beta", - "mach", - "reynolds", - "pitch_rate", - "yaw_rate", - "roll_rate", - ] - - try: - with open(file_path, mode="r") as file: - reader = csv.reader(file) - header = next(reader) - except (FileNotFoundError, IOError) as e: - raise ValueError(f"Error reading {coeff_name} CSV file: {e}") from e - except StopIteration as e: - raise ValueError(f"Invalid or empty CSV file for {coeff_name}.") from e - - if not header: - raise ValueError(f"Invalid or empty CSV file for {coeff_name}.") - - header = [column.strip() for column in header] - present_columns = [col for col in independent_vars if col in header] - - invalid_columns = [col for col in header[:-1] if col not in independent_vars] - if invalid_columns: - raise ValueError( - f"Invalid independent variable(s) in {coeff_name} CSV: " - f"{invalid_columns}. Valid options are: {independent_vars}." - ) - - if header[-1] in independent_vars: - raise ValueError( - f"Last column in {coeff_name} CSV must be the coefficient" - " value, not an independent variable." - ) - - if not present_columns: - raise ValueError(f"No independent variables found in {coeff_name} CSV.") - - ordered_present_columns = [col for col in header[:-1] if col in independent_vars] - - csv_func = create_regular_grid_function( - file_path, - ordered_present_columns, - coeff_name, - extrapolation="natural", - ) - if csv_func is None: - csv_func = Function( - file_path, - interpolation="linear", - extrapolation="natural", - ) - - def wrapper(alpha, beta, mach, reynolds, pitch_rate, yaw_rate, roll_rate): - args_by_name = { - "alpha": alpha, - "beta": beta, - "mach": mach, - "reynolds": reynolds, - "pitch_rate": pitch_rate, - "yaw_rate": yaw_rate, - "roll_rate": roll_rate, - } - selected_args = [args_by_name[col] for col in ordered_present_columns] - return csv_func(*selected_args) - - return Function( - wrapper, - independent_vars, - [coeff_name], - interpolation="linear", - extrapolation="natural", - ) - - -def load_rocket_drag_csv(file_path, coeff_name): # pylint: disable=too-many-statements,import-outside-toplevel - """Load Rocket drag CSV into a 7D Function. - - Supports either headerless two-column (mach, coefficient) tables or - header-based multi-variable CSV tables. - """ - from rocketpy.mathutils.function import ( # pylint: disable=import-outside-toplevel - Function, # pylint: disable=import-outside-toplevel - ) - - independent_vars = [ - "alpha", - "beta", - "mach", - "reynolds", - "pitch_rate", - "yaw_rate", - "roll_rate", - ] - - def _is_numeric(value): - try: - float(value) - return True - except (TypeError, ValueError): - try: - int(value) - return True - except (TypeError, ValueError): - return False - - try: - with open(file_path, mode="r") as file: - reader = csv.reader(file) - first_row = next(reader) - except (FileNotFoundError, IOError) as e: - raise ValueError(f"Error reading {coeff_name} CSV file: {e}") from e - except StopIteration as e: - raise ValueError(f"Invalid or empty CSV file for {coeff_name}.") from e - - if not first_row: - raise ValueError(f"Invalid or empty CSV file for {coeff_name}.") - - is_headerless_two_column = len(first_row) == 2 and all( - _is_numeric(cell) for cell in first_row - ) - - if is_headerless_two_column: - csv_func = Function( - file_path, - interpolation="linear", - extrapolation="constant", - ) - - def mach_wrapper( - _alpha, - _beta, - mach, - _reynolds, - _pitch_rate, - _yaw_rate, - _roll_rate, - ): - return csv_func(mach) - - return Function( - mach_wrapper, - independent_vars, - [coeff_name], - interpolation="linear", - extrapolation="constant", - ) - - header = [column.strip() for column in first_row] - present_columns = [col for col in independent_vars if col in header] - - invalid_columns = [col for col in header[:-1] if col not in independent_vars] - if invalid_columns: - raise ValueError( - f"Invalid independent variable(s) in {coeff_name} CSV: " - f"{invalid_columns}. Valid options are: {independent_vars}." - ) - - if header[-1] in independent_vars: - raise ValueError( - f"Last column in {coeff_name} CSV must be the coefficient " - "value, not an independent variable." - ) - - if not present_columns: - raise ValueError(f"No independent variables found in {coeff_name} CSV.") - - ordered_present_columns = [col for col in header[:-1] if col in independent_vars] - - csv_func = create_regular_grid_function( - file_path, - ordered_present_columns, - coeff_name, - extrapolation="constant", - ) - if csv_func is None: - csv_func = Function( - file_path, - interpolation="linear", - extrapolation="constant", - ) - - def wrapper(alpha, beta, mach, reynolds, pitch_rate, yaw_rate, roll_rate): - args_by_name = { - "alpha": alpha, - "beta": beta, - "mach": mach, - "reynolds": reynolds, - "pitch_rate": pitch_rate, - "yaw_rate": yaw_rate, - "roll_rate": roll_rate, - } - selected_args = [args_by_name[col] for col in ordered_present_columns] - return csv_func(*selected_args) - - return Function( - wrapper, - independent_vars, - [coeff_name], - interpolation="linear", - extrapolation="constant", - ) - - def calculate_cubic_hermite_coefficients(x0, x1, y0, yp0, y1, yp1): """Calculate the coefficients of a cubic Hermite interpolation function. The function is defined as ax**3 + bx**2 + cx + d. From 1f4f7927b4e957e7308011e230ef3f97393ca31c Mon Sep 17 00:00:00 2001 From: Khushal Kottaru Date: Wed, 25 Mar 2026 03:29:20 -0700 Subject: [PATCH 13/44] BUG: Add wraparound logic for wind direction in environment plots (#939) * chore: added personal toolkit files * update branch name in workflow * chore: update toolkit files * Fix: add wraparound logic for wind direction and related tests * style: fix ruff formatting * Remove unused import Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> * refactor: move repetitive logic into helper method * fix: update test logic in test_environment * add changelog entry --------- Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> --- CHANGELOG.md | 1 + rocketpy/plots/environment_plots.py | 42 +++++++++++++-- .../environment/test_environment.py | 53 +++++++++++++++++++ 3 files changed, 92 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b8d0aee4..f838cc64a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -73,6 +73,7 @@ Attention: The newest changes should be on top --> ### Fixed +- BUG: Add wraparound logic for wind direction in environment plots [#939](https://github.com/RocketPy-Team/RocketPy/pull/939) - BUG: Restore `Rocket.power_off_drag` and `Rocket.power_on_drag` as `Function` objects while preserving raw inputs in `power_off_drag_input` and `power_on_drag_input` [#941](https://github.com/RocketPy-Team/RocketPy/pull/941) - BUG: Add explicit timeouts to ThrustCurve API requests [#935](https://github.com/RocketPy-Team/RocketPy/pull/935) - BUG: Fix hard-coded radius value for parachute added mass calculation [#889](https://github.com/RocketPy-Team/RocketPy/pull/889) diff --git a/rocketpy/plots/environment_plots.py b/rocketpy/plots/environment_plots.py index 4b8a91e15..f53cecc1b 100644 --- a/rocketpy/plots/environment_plots.py +++ b/rocketpy/plots/environment_plots.py @@ -33,6 +33,30 @@ def __init__(self, environment): self.grid = np.linspace(environment.elevation, environment.max_expected_height) self.environment = environment + def _break_direction_wraparound(self, directions, altitudes): + """Inserts NaN into direction and altitude arrays at 0°/360° wraparound + points so matplotlib does not draw a horizontal line across the plot. + + Parameters + ---------- + directions : numpy.ndarray + Wind direction values in degrees, dtype float. + altitudes : numpy.ndarray + Altitude values corresponding to each direction, dtype float. + + Returns + ------- + directions : numpy.ndarray + Direction array with NaN inserted at wraparound points. + altitudes : numpy.ndarray + Altitude array with NaN inserted at wraparound points. + """ + WRAP_THRESHOLD = 180 # degrees; half the full circle + wrap_indices = np.where(np.abs(np.diff(directions)) > WRAP_THRESHOLD)[0] + 1 + directions = np.insert(directions, wrap_indices, np.nan) + altitudes = np.insert(altitudes, wrap_indices, np.nan) + return directions, altitudes + def __wind(self, ax): """Adds wind speed and wind direction graphs to the same axis. @@ -55,9 +79,14 @@ def __wind(self, ax): ax.set_xlabel("Wind Speed (m/s)", color="#ff7f0e") ax.tick_params("x", colors="#ff7f0e") axup = ax.twiny() + directions = np.array( + [self.environment.wind_direction(i) for i in self.grid], dtype=float + ) + altitudes = np.array(self.grid, dtype=float) + directions, altitudes = self._break_direction_wraparound(directions, altitudes) axup.plot( - [self.environment.wind_direction(i) for i in self.grid], - self.grid, + directions, + altitudes, color="#1f77b4", label="Wind Direction", ) @@ -311,9 +340,14 @@ def ensemble_member_comparison(self, *, filename=None): ax8 = plt.subplot(324) for i in range(self.environment.num_ensemble_members): self.environment.select_ensemble_member(i) + dirs = np.array( + [self.environment.wind_direction(j) for j in self.grid], dtype=float + ) + alts = np.array(self.grid, dtype=float) + dirs, alts = self._break_direction_wraparound(dirs, alts) ax8.plot( - [self.environment.wind_direction(i) for i in self.grid], - self.grid, + dirs, + alts, label=i, ) ax8.set_ylabel("Height Above Sea Level (m)") diff --git a/tests/integration/environment/test_environment.py b/tests/integration/environment/test_environment.py index 3bdd5209a..d919c535d 100644 --- a/tests/integration/environment/test_environment.py +++ b/tests/integration/environment/test_environment.py @@ -92,6 +92,59 @@ def test_standard_atmosphere(mock_show, example_plain_env): # pylint: disable=u assert example_plain_env.prints.print_earth_details() is None +@patch("matplotlib.pyplot.show") +def test_wind_plots_wrapping_direction(mock_show, example_plain_env): # pylint: disable=unused-argument + """Tests that wind direction plots handle 360°→0° wraparound without + drawing a horizontal line across the graph. + + Parameters + ---------- + mock_show : mock + Mock object to replace matplotlib.pyplot.show() method. + example_plain_env : rocketpy.Environment + Example environment object to be tested. + """ + # Set a custom atmosphere where wind direction wraps from ~350° to ~10° + # across the altitude range by choosing wind_u and wind_v to create a + # direction near 350° at low altitude and ~10° at higher altitude. + # wind_direction = (180 + atan2(wind_u, wind_v)) % 360 + # For direction ~350°: need atan2(wind_u, wind_v) ≈ 170° → wind_u>0, wind_v<0 + # For direction ~10°: need atan2(wind_u, wind_v) ≈ -170° → wind_u<0, wind_v<0 + example_plain_env.set_atmospheric_model( + type="custom_atmosphere", + pressure=None, + temperature=300, + wind_u=[(0, 1), (5000, -1)], # changes sign across altitude + wind_v=[(0, -6), (5000, -6)], # stays negative → heading near 350°/10° + ) + # Verify that the wind direction actually wraps through 0°/360° in this + # atmosphere so the test exercises the wraparound code path. + low_dir = example_plain_env.wind_direction(0) + high_dir = example_plain_env.wind_direction(5000) + assert abs(low_dir - high_dir) > 180, ( + "Test setup error: wind direction should cross 0°/360° boundary" + ) + # Verify that the helper inserts NaN breaks into the direction and altitude + # arrays at the wraparound point, which is the core of the fix. + directions = np.array( + [example_plain_env.wind_direction(i) for i in example_plain_env.plots.grid], + dtype=float, + ) + altitudes = np.array(example_plain_env.plots.grid, dtype=float) + directions_broken, altitudes_broken = ( + example_plain_env.plots._break_direction_wraparound(directions, altitudes) + ) + assert np.any(np.isnan(directions_broken)), ( + "Expected NaN breaks in direction array at 0°/360° wraparound" + ) + assert np.any(np.isnan(altitudes_broken)), ( + "Expected NaN breaks in altitude array at 0°/360° wraparound" + ) + # Verify info() and atmospheric_model() plots complete without error + assert example_plain_env.info() is None + assert example_plain_env.plots.atmospheric_model() is None + + @pytest.mark.parametrize( "model_name", [ From d0ce62af0cfa89fa8ed496b1d9a2ee54869a213b Mon Sep 17 00:00:00 2001 From: MateusStano Date: Fri, 27 Mar 2026 19:28:56 -0300 Subject: [PATCH 14/44] MNT: add numpy import to test_environment.py --- tests/integration/environment/test_environment.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/environment/test_environment.py b/tests/integration/environment/test_environment.py index d919c535d..c5e1103bd 100644 --- a/tests/integration/environment/test_environment.py +++ b/tests/integration/environment/test_environment.py @@ -2,6 +2,7 @@ from datetime import date, datetime, timezone from unittest.mock import patch +import numpy as np import pytest From 9cd2d34541b181d51b84cd4e0bf6e945a13a0af5 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Fri, 27 Mar 2026 19:34:28 -0300 Subject: [PATCH 15/44] MNT: rename constant for wraparound threshold in _break_direction_wraparound method --- rocketpy/plots/environment_plots.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketpy/plots/environment_plots.py b/rocketpy/plots/environment_plots.py index f53cecc1b..add5e4efb 100644 --- a/rocketpy/plots/environment_plots.py +++ b/rocketpy/plots/environment_plots.py @@ -51,8 +51,8 @@ def _break_direction_wraparound(self, directions, altitudes): altitudes : numpy.ndarray Altitude array with NaN inserted at wraparound points. """ - WRAP_THRESHOLD = 180 # degrees; half the full circle - wrap_indices = np.where(np.abs(np.diff(directions)) > WRAP_THRESHOLD)[0] + 1 + wrap_threshold = 180 # degrees; half the full circle + wrap_indices = np.where(np.abs(np.diff(directions)) > wrap_threshold)[0] + 1 directions = np.insert(directions, wrap_indices, np.nan) altitudes = np.insert(altitudes, wrap_indices, np.nan) return directions, altitudes From e0173e20c5951fd17c38d357af656e516bfb05a9 Mon Sep 17 00:00:00 2001 From: "Mohammed S. Al-Mahrouqi" Date: Sat, 28 Mar 2026 21:26:19 -0400 Subject: [PATCH 16/44] ENH: Adaptive Monte Carlo via Convergence Criteria (#922) * ENH: added a new function (simulate_convergence) * DOC: added a cell to show simulate_convergence function usage * TST: integration test for simulate_convergence * DOC: updated changelog for this PR * ENH: ran black to lint intg test file * new fixes thx to copilot comments * linted rocketpy/simulation/monte_carlo.py --------- Co-authored-by: Malmahrouqi3 --- CHANGELOG.md | 3 +- .../monte_carlo_class_usage.ipynb | 22 ++++++ rocketpy/simulation/monte_carlo.py | 67 +++++++++++++++++++ .../simulation/test_monte_carlo.py | 27 ++++++++ 4 files changed, 118 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f838cc64a..6d3e6e053 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,7 +32,8 @@ Attention: The newest changes should be on top --> ### Added -- +- ENH: Adaptive Monte Carlo via Convergence Criteria [#922](https://github.com/RocketPy-Team/RocketPy/pull/922) +- TST: Add acceptance tests for 3DOF flight simulation based on Bella Lui rocket [#914](https://github.com/RocketPy-Team/RocketPy/pull/914) ### Changed diff --git a/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb b/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb index 2fb46fa86..8181c03ba 100644 --- a/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb +++ b/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb @@ -800,6 +800,28 @@ ")" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Alternatively, we can target an attribute using the method `MonteCarlo.simulate_convergence()` such that when the tolerance is met, the flight simulations would terminate early." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "test_dispersion.simulate_convergence(\n", + " target_attribute=\"apogee_time\",\n", + " target_confidence=0.95,\n", + " tolerance=0.5, # in seconds\n", + " max_simulations=1000,\n", + " batch_size=50,\n", + ")" + ] + }, { "attachments": {}, "cell_type": "markdown", diff --git a/rocketpy/simulation/monte_carlo.py b/rocketpy/simulation/monte_carlo.py index e10789a7d..42a566b7b 100644 --- a/rocketpy/simulation/monte_carlo.py +++ b/rocketpy/simulation/monte_carlo.py @@ -525,6 +525,73 @@ def estimate_confidence_interval( return res.confidence_interval + def simulate_convergence( + self, + target_attribute="apogee_time", + target_confidence=0.95, + tolerance=0.5, + max_simulations=1000, + batch_size=50, + parallel=False, + n_workers=None, + ): + """Run Monte Carlo simulations in batches until the confidence interval + width converges within the specified tolerance or the maximum number of + simulations is reached. + + Parameters + ---------- + target_attribute : str + The target attribute to track its convergence (e.g., "apogee", "apogee_time", etc.). + target_confidence : float, optional + The confidence level for the interval (between 0 and 1). Default is 0.95. + tolerance : float, optional + The desired width of the confidence interval in seconds, meters, or other units. Default is 0.5. + max_simulations : int, optional + The maximum number of simulations to run to avoid infinite loops. Default is 1000. + batch_size : int, optional + The number of simulations to run in each batch. Default is 50. + parallel : bool, optional + Whether to run simulations in parallel. Default is False. + n_workers : int, optional + The number of worker processes to use if running in parallel. Default is None. + + Returns + ------- + confidence_interval_history : list of float + History of confidence interval widths, one value per batch of simulations. + The last element corresponds to the width when the simulation stopped for + either meeting the tolerance or reaching the maximum number of simulations. + """ + + self.import_outputs(self.filename.with_suffix(".outputs.txt")) + confidence_interval_history = [] + + while self.num_of_loaded_sims < max_simulations: + total_sims = min(self.num_of_loaded_sims + batch_size, max_simulations) + + self.simulate( + number_of_simulations=total_sims, + append=True, + include_function_data=False, + parallel=parallel, + n_workers=n_workers, + ) + + self.import_outputs(self.filename.with_suffix(".outputs.txt")) + + ci = self.estimate_confidence_interval( + attribute=target_attribute, + confidence_level=target_confidence, + ) + + confidence_interval_history.append(float(ci.high - ci.low)) + + if float(ci.high - ci.low) <= tolerance: + break + + return confidence_interval_history + def __evaluate_flight_inputs(self, sim_idx): """Evaluates the inputs of a single flight simulation. diff --git a/tests/integration/simulation/test_monte_carlo.py b/tests/integration/simulation/test_monte_carlo.py index 4b1b82392..98af2431d 100644 --- a/tests/integration/simulation/test_monte_carlo.py +++ b/tests/integration/simulation/test_monte_carlo.py @@ -236,3 +236,30 @@ def invalid_data_collector(flight): monte_carlo_calisto.simulate(number_of_simulations=10, append=False) finally: _post_test_file_cleanup() + + +@pytest.mark.slow +def test_monte_carlo_simulate_convergence(monte_carlo_calisto): + """Tests the simulate_convergence method of the MonteCarlo class. + + Parameters + ---------- + monte_carlo_calisto : MonteCarlo + The MonteCarlo object, this is a pytest fixture. + """ + try: + ci_history = monte_carlo_calisto.simulate_convergence( + target_attribute="apogee", + target_confidence=0.95, + tolerance=5.0, + max_simulations=20, + batch_size=5, + parallel=False, + ) + + assert isinstance(ci_history, list) + assert all(isinstance(width, float) for width in ci_history) + assert len(ci_history) >= 1 + assert monte_carlo_calisto.num_of_loaded_sims <= 20 + finally: + _post_test_file_cleanup() From 5412b34089bc4e6b15bcf4e30734232152179d03 Mon Sep 17 00:00:00 2001 From: Mateus Stano Junqueira <69485049+MateusStano@users.noreply.github.com> Date: Tue, 31 Mar 2026 20:11:03 -0300 Subject: [PATCH 17/44] DOC: add latitude range in docs Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> --- rocketpy/environment/tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index fb0179c9e..9081ff42a 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -115,7 +115,7 @@ def geodesic_to_lambert_conformal(lat, lon, projection_variable, x_units="m"): Parameters ---------- lat : float - Latitude in degrees. + Latitude in degrees, ranging from -90 to 90 lon : float Longitude in degrees, ranging from -180 to 180. projection_variable : netCDF4.Variable From 036e22e9d8b2217c010af5d83b618090d538bddb Mon Sep 17 00:00:00 2001 From: Mateus Stano Junqueira <69485049+MateusStano@users.noreply.github.com> Date: Tue, 31 Mar 2026 20:12:35 -0300 Subject: [PATCH 18/44] MNT: remove unnecessary pylint warning Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> --- rocketpy/environment/tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index 9081ff42a..6514de0b9 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -225,7 +225,7 @@ def mask_and_clean_dataset(*args): return data_array -def find_longitude_index(longitude, lon_list): # pylint: disable=too-many-statements +def find_longitude_index(longitude, lon_list): """Finds the index of the given longitude in a list of longitudes. Parameters From 1dc13725345a16000d1903b72c024ef2918f1fc9 Mon Sep 17 00:00:00 2001 From: Mateus Stano Junqueira <69485049+MateusStano@users.noreply.github.com> Date: Tue, 31 Mar 2026 20:12:55 -0300 Subject: [PATCH 19/44] MNT: remove unnecessary pylint warning Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> --- rocketpy/environment/tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index 6514de0b9..5335acb36 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -344,7 +344,7 @@ def _coord_value(source, index): return latitude, lat_index -def find_time_index(datetime_date, time_array): # pylint: disable=too-many-statements +def find_time_index(datetime_date, time_array): """Finds the index of the given datetime in a netCDF4 time array. Parameters From 264eb4f2b43ff8cd33285b7d5abd2ab8bc36cd27 Mon Sep 17 00:00:00 2001 From: Mateus Stano Junqueira <69485049+MateusStano@users.noreply.github.com> Date: Tue, 31 Mar 2026 20:17:08 -0300 Subject: [PATCH 20/44] DOC: correctly link to WeatherModelMapping Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- rocketpy/environment/environment.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketpy/environment/environment.py b/rocketpy/environment/environment.py index 39441ecae..6e69b343f 100644 --- a/rocketpy/environment/environment.py +++ b/rocketpy/environment/environment.py @@ -2032,8 +2032,8 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals See also -------- - See the :class:``rocketpy.environment.weather_model_mapping`` for some - dictionary examples. + See the :class:`rocketpy.environment.weather_model_mapping.WeatherModelMapping` + class for some dictionary examples. Raises ------ From def001c68bed52e021b69eabd1e17724afcb508f Mon Sep 17 00:00:00 2001 From: Mateus Stano Junqueira <69485049+MateusStano@users.noreply.github.com> Date: Tue, 31 Mar 2026 20:22:31 -0300 Subject: [PATCH 21/44] DOCS: checked todo Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- rocketpy/environment/environment.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/rocketpy/environment/environment.py b/rocketpy/environment/environment.py index 6e69b343f..e91a62428 100644 --- a/rocketpy/environment/environment.py +++ b/rocketpy/environment/environment.py @@ -2058,8 +2058,10 @@ class for some dictionary examples. lon_array = data.variables[dictionary["longitude"]] lat_array = data.variables[dictionary["latitude"]] - # Some THREDDS datasets use projected x/y coordinates. - # TODO CHECK THIS I AM NOT SURE????? + # Some THREDDS datasets use projected x/y coordinates. When a + # "projection" variable is provided in the mapping dictionary, convert + # the launch site's geodesic coordinates to the model's projected + # coordinate system before locating the nearest grid cell. if dictionary.get("projection") is not None: projection_variable = data.variables[dictionary["projection"]] x_units = getattr(lon_array, "units", "m") From 1b5d9feb0d0be494e01493b71d73ba62b66bb04e Mon Sep 17 00:00:00 2001 From: MateusStano Date: Tue, 31 Mar 2026 20:38:51 -0300 Subject: [PATCH 22/44] ENH: address copilot comments --- rocketpy/environment/environment.py | 15 ++-- rocketpy/environment/fetchers.py | 84 ++----------------- rocketpy/environment/weather_model_mapping.py | 28 +++---- 3 files changed, 28 insertions(+), 99 deletions(-) diff --git a/rocketpy/environment/environment.py b/rocketpy/environment/environment.py index 39441ecae..6479fa011 100644 --- a/rocketpy/environment/environment.py +++ b/rocketpy/environment/environment.py @@ -178,14 +178,14 @@ class Environment: ``Ensemble``. Environment.lat_array : array Defined if ``netCDF`` or ``OPeNDAP`` file is used, for Forecasts, - Reanalysis and Ensembles. 2x2 matrix for each pressure level of - latitudes corresponding to the vertices of the grid cell which - surrounds the launch site. + Reanalysis and Ensembles. Two-element list ``[x1, x2]`` containing + the latitude coordinates of the grid-cell vertices that bracket the + launch site and are used in bilinear interpolation. Environment.lon_array : array Defined if ``netCDF`` or ``OPeNDAP`` file is used, for Forecasts, - Reanalysis and Ensembles. 2x2 matrix for each pressure level of - longitudes corresponding to the vertices of the grid cell which - surrounds the launch site. + Reanalysis and Ensembles. Two-element list ``[y1, y2]`` containing + the longitude coordinates of the grid-cell vertices that bracket the + launch site and are used in bilinear interpolation. Environment.lon_index : int Defined if ``netCDF`` or ``OPeNDAP`` file is used, for Forecasts, Reanalysis and Ensembles. Index to a grid longitude which @@ -223,7 +223,8 @@ class Environment: surrounds the launch site. Environment.time_array : array Defined if ``netCDF`` or ``OPeNDAP`` file is used, for Forecasts, - Reanalysis and Ensembles. Array of dates available in the file. + Reanalysis and Ensembles. Two-element list with the first and last + values from the dataset time variable in the dataset native units. Environment.height : array Defined if ``netCDF`` or ``OPeNDAP`` file is used, for Forecasts, Reanalysis and Ensembles. List of geometric height corresponding to diff --git a/rocketpy/environment/fetchers.py b/rocketpy/environment/fetchers.py index 589159f1c..5cf03add8 100644 --- a/rocketpy/environment/fetchers.py +++ b/rocketpy/environment/fetchers.py @@ -93,8 +93,8 @@ def fetch_atmospheric_data_from_windy(lat, lon, model): def fetch_gfs_file_return_dataset(max_attempts=10, base_delay=2): - """Fetches the latest GFS (Global Forecast System) dataset from the NOAA's - GrADS data server using the OpenDAP protocol. + """Fetches the latest GFS (Global Forecast System) dataset from the UCAR + THREDDS data server using the OPeNDAP protocol. Parameters ---------- @@ -128,8 +128,8 @@ def fetch_gfs_file_return_dataset(max_attempts=10, base_delay=2): def fetch_nam_file_return_dataset(max_attempts=10, base_delay=2): - """Fetches the latest NAM (North American Mesoscale) dataset from the NOAA's - GrADS data server using the OpenDAP protocol. + """Fetches the latest NAM (North American Mesoscale) dataset from the UCAR + THREDDS data server using the OPeNDAP protocol. Parameters ---------- @@ -161,8 +161,8 @@ def fetch_nam_file_return_dataset(max_attempts=10, base_delay=2): def fetch_rap_file_return_dataset(max_attempts=10, base_delay=2): - """Fetches the latest RAP (Rapid Refresh) dataset from the NOAA's GrADS data - server using the OpenDAP protocol. + """Fetches the latest RAP (Rapid Refresh) dataset from the UCAR THREDDS + data server using the OPeNDAP protocol. Parameters ---------- @@ -193,78 +193,6 @@ def fetch_rap_file_return_dataset(max_attempts=10, base_delay=2): raise RuntimeError("Unable to load latest weather data for RAP through " + file_url) -def fetch_hrrr_file_return_dataset(max_attempts=10, base_delay=2): - """Fetches the latest HRRR (High-Resolution Rapid Refresh) dataset from - the NOAA's GrADS data server using the OpenDAP protocol. - - Parameters - ---------- - max_attempts : int, optional - The maximum number of attempts to fetch the dataset. Default is 10. - base_delay : int, optional - The base delay in seconds between attempts. Default is 2. - - Returns - ------- - netCDF4.Dataset - The HRRR dataset. - - Raises - ------ - RuntimeError - If unable to load the latest weather data for HRRR. - """ - file_url = "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/HRRR/CONUS_2p5km/Best" - attempt_count = 0 - while attempt_count < max_attempts: - try: - return netCDF4.Dataset(file_url) - except OSError: - attempt_count += 1 - time.sleep(base_delay**attempt_count) - - raise RuntimeError( - "Unable to load latest weather data for HRRR through " + file_url - ) - - -def fetch_aigfs_file_return_dataset(max_attempts=10, base_delay=2): - """Fetches the latest AIGFS (Artificial Intelligence GFS) dataset from - the NOAA's GrADS data server using the OpenDAP protocol. - - Parameters - ---------- - max_attempts : int, optional - The maximum number of attempts to fetch the dataset. Default is 10. - base_delay : int, optional - The base delay in seconds between attempts. Default is 2. - - Returns - ------- - netCDF4.Dataset - The AIGFS dataset. - - Raises - ------ - RuntimeError - If unable to load the latest weather data for AIGFS. - """ - file_url = ( - "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/AIGFS/Global_0p25deg/Best" - ) - attempt_count = 0 - while attempt_count < max_attempts: - try: - return netCDF4.Dataset(file_url) - except OSError: - attempt_count += 1 - time.sleep(base_delay**attempt_count) - - raise RuntimeError( - "Unable to load latest weather data for AIGFS through " + file_url - ) - - def fetch_hiresw_file_return_dataset(max_attempts=10, base_delay=2): """Fetches the latest HiResW (High-Resolution Window) dataset from the NOAA's GrADS data server using the OpenDAP protocol. diff --git a/rocketpy/environment/weather_model_mapping.py b/rocketpy/environment/weather_model_mapping.py index c490fad9d..b054a35c4 100644 --- a/rocketpy/environment/weather_model_mapping.py +++ b/rocketpy/environment/weather_model_mapping.py @@ -7,21 +7,21 @@ class WeatherModelMapping: Each dictionary in this class maps those canonical keys to the actual variable names in a specific data provider format. - Mapping families - ---------------- - - Base names (for example ``GFS``, ``NAM``, ``RAP``) represent the current - default mappings used by the latest-model shortcuts and THREDDS-style - datasets. - - ``*_LEGACY`` names represent older NOMADS-style variable naming - conventions (for example ``lev``, ``tmpprs``, ``ugrdprs`` and - ``vgrdprs``) and are intended for archived or previously downloaded files. + Mapping families + ---------------- + - Base names (for example ``GFS``, ``NAM``, ``RAP``) represent the current + default mappings used by the latest-model shortcuts and THREDDS-style + datasets. + - ``*_LEGACY`` names represent older NOMADS-style variable naming + conventions (for example ``lev``, ``tmpprs``, ``ugrdprs`` and + ``vgrdprs``) and are intended for archived or previously downloaded files. - Notes - ----- - - Mappings can also include optional keys such as ``projection`` for - projected grids and ``ensemble`` for member dimensions. - - The :meth:`get` method is case-insensitive, so ``"gfs_legacy"`` and - ``"GFS_LEGACY"`` are equivalent. + Notes + ----- + - Mappings can also include optional keys such as ``projection`` for + projected grids and ``ensemble`` for member dimensions. + - The :meth:`get` method is case-insensitive, so ``"gfs_legacy"`` and + ``"GFS_LEGACY"`` are equivalent. """ GFS = { From ee2663b74510c5f07ae5fa9bc932a0e642284eed Mon Sep 17 00:00:00 2001 From: MateusStano Date: Tue, 31 Mar 2026 21:41:13 -0300 Subject: [PATCH 23/44] TST: improve tests --- rocketpy/environment/tools.py | 4 +- tests/unit/environment/test_environment.py | 231 +++++++++++++++++++++ tests/unit/environment/test_fetchers.py | 83 ++++++++ 3 files changed, 316 insertions(+), 2 deletions(-) create mode 100644 tests/unit/environment/test_fetchers.py diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index 5335acb36..9081ff42a 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -225,7 +225,7 @@ def mask_and_clean_dataset(*args): return data_array -def find_longitude_index(longitude, lon_list): +def find_longitude_index(longitude, lon_list): # pylint: disable=too-many-statements """Finds the index of the given longitude in a list of longitudes. Parameters @@ -344,7 +344,7 @@ def _coord_value(source, index): return latitude, lat_index -def find_time_index(datetime_date, time_array): +def find_time_index(datetime_date, time_array): # pylint: disable=too-many-statements """Finds the index of the given datetime in a netCDF4 time array. Parameters diff --git a/tests/unit/environment/test_environment.py b/tests/unit/environment/test_environment.py index 6d04c089f..eb196e591 100644 --- a/tests/unit/environment/test_environment.py +++ b/tests/unit/environment/test_environment.py @@ -311,3 +311,234 @@ def test_weather_model_mapping_exposes_legacy_aliases(): assert mapping.get("GFS_LEGACY")["temperature"] == "tmpprs" assert mapping.get("gfs_legacy")["temperature"] == "tmpprs" + + +def test_dictionary_matches_dataset_rejects_missing_projection(example_plain_env): + """Reject mapping when projection key is declared but variable is missing.""" + # Arrange + mapping = { + "time": "time", + "latitude": "y", + "longitude": "x", + "projection": "LambertConformal_Projection", + "level": "isobaric", + "temperature": "Temperature_isobaric", + "geopotential_height": "Geopotential_height_isobaric", + "geopotential": None, + "u_wind": "u-component_of_wind_isobaric", + "v_wind": "v-component_of_wind_isobaric", + } + dataset = _DummyDataset( + [ + "time", + "y", + "x", + "isobaric", + "Temperature_isobaric", + "Geopotential_height_isobaric", + "u-component_of_wind_isobaric", + "v-component_of_wind_isobaric", + ] + ) + + # Act + is_compatible = example_plain_env._Environment__dictionary_matches_dataset( + mapping, dataset + ) + + # Assert + assert not is_compatible + + +def test_dictionary_matches_dataset_accepts_geopotential_only(example_plain_env): + """Accept mapping when geopotential exists and geopotential height is absent.""" + # Arrange + mapping = { + "time": "time", + "latitude": "latitude", + "longitude": "longitude", + "level": "level", + "temperature": "t", + "geopotential_height": None, + "geopotential": "z", + "u_wind": "u", + "v_wind": "v", + } + dataset = _DummyDataset( + [ + "time", + "latitude", + "longitude", + "level", + "t", + "z", + "u", + "v", + ] + ) + + # Act + is_compatible = example_plain_env._Environment__dictionary_matches_dataset( + mapping, dataset + ) + + # Assert + assert is_compatible + + +def test_resolve_dictionary_warns_when_falling_back(example_plain_env): + """Emit warning and return a built-in mapping when fallback is required.""" + # Arrange + incompatible_mapping = { + "time": "bad_time", + "latitude": "bad_lat", + "longitude": "bad_lon", + "level": "bad_level", + "temperature": "bad_temp", + "geopotential_height": "bad_height", + "geopotential": None, + "u_wind": "bad_u", + "v_wind": "bad_v", + } + dataset = _DummyDataset( + [ + "time", + "lat", + "lon", + "isobaric", + "Temperature_isobaric", + "Geopotential_height_isobaric", + "u-component_of_wind_isobaric", + "v-component_of_wind_isobaric", + ] + ) + + # Act + with pytest.warns(UserWarning, match="Falling back to built-in mapping"): + resolved = example_plain_env._Environment__resolve_dictionary_for_dataset( + incompatible_mapping, dataset + ) + + # Assert + assert resolved == example_plain_env._Environment__weather_model_map.get("GFS") + + +def test_resolve_dictionary_returns_original_when_no_compatible_builtin( + example_plain_env, +): + """Return original mapping unchanged when no built-in mapping can match.""" + # Arrange + original_mapping = { + "time": "a", + "latitude": "b", + "longitude": "c", + "level": "d", + "temperature": "e", + "geopotential_height": "f", + "geopotential": None, + "u_wind": "g", + "v_wind": "h", + } + dataset = _DummyDataset(["foo", "bar"]) + + # Act + resolved = example_plain_env._Environment__resolve_dictionary_for_dataset( + original_mapping, dataset + ) + + # Assert + assert resolved is original_mapping + + +@pytest.mark.parametrize( + "model_type,file_name,error_message", + [ + ( + "Forecast", + "hiresw", + "HIRESW latest-model shortcut is currently unavailable", + ), + ( + "Ensemble", + "gefs", + "GEFS latest-model shortcut is currently unavailable", + ), + ], +) +def test_set_atmospheric_model_blocks_deactivated_shortcuts_case_insensitive( + example_plain_env, + model_type, + file_name, + error_message, +): + """Reject deactivated shortcut aliases regardless of input string case.""" + # Arrange + environment = example_plain_env + + # Act / Assert + with pytest.raises(ValueError, match=error_message): + environment.set_atmospheric_model(type=model_type, file=file_name) + + +def test_validate_dictionary_uses_case_insensitive_file_shortcut(example_plain_env): + """Infer built-in mapping from file shortcut even when shortcut is lowercase.""" + # Arrange + environment = example_plain_env + + # Act + mapping = environment._Environment__validate_dictionary("gfs", None) + + # Assert + assert mapping == environment._Environment__weather_model_map.get("GFS") + + +def test_validate_dictionary_raises_type_error_for_invalid_dictionary( + example_plain_env, +): + """Raise TypeError when no valid dictionary can be inferred.""" + # Arrange + environment = example_plain_env + + # Act / Assert + with pytest.raises(TypeError, match="Please specify a dictionary"): + environment._Environment__validate_dictionary("not_a_model", None) + + +def test_set_atmospheric_model_normalizes_shortcut_case_for_forecast(example_plain_env): + """Normalize shortcut name before lookup and process forecast data.""" + # Arrange + environment = example_plain_env + + environment._Environment__atm_type_file_to_function_map = { + "forecast": { + "GFS": lambda: "fake-dataset", + }, + "ensemble": {}, + } + + called_arguments = {} + + def fake_process_forecast_reanalysis(dataset, dictionary): + called_arguments["dataset"] = dataset + called_arguments["dictionary"] = dictionary + + environment.process_forecast_reanalysis = fake_process_forecast_reanalysis + + # Act + environment.set_atmospheric_model(type="Forecast", file="gfs") + + # Assert + assert called_arguments["dataset"] == "fake-dataset" + assert called_arguments[ + "dictionary" + ] == environment._Environment__weather_model_map.get("GFS") + + +def test_set_atmospheric_model_raises_for_unknown_model_type(example_plain_env): + """Raise ValueError for unknown atmospheric model selector.""" + # Arrange + environment = example_plain_env + + # Act / Assert + with pytest.raises(ValueError, match="Unknown model type"): + environment.set_atmospheric_model(type="unknown_type") diff --git a/tests/unit/environment/test_fetchers.py b/tests/unit/environment/test_fetchers.py new file mode 100644 index 000000000..eea06f977 --- /dev/null +++ b/tests/unit/environment/test_fetchers.py @@ -0,0 +1,83 @@ +import pytest + +from rocketpy.environment import fetchers + + +@pytest.mark.parametrize( + "fetcher,expected_url", + [ + ( + fetchers.fetch_gfs_file_return_dataset, + "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/GFS/Global_0p25deg/Best", + ), + ( + fetchers.fetch_nam_file_return_dataset, + "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/NAM/CONUS_12km/Best", + ), + ( + fetchers.fetch_rap_file_return_dataset, + "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/RAP/CONUS_13km/Best", + ), + ], +) +def test_fetcher_returns_dataset_on_first_attempt(fetcher, expected_url, monkeypatch): + """Return dataset immediately when the first OPeNDAP attempt succeeds.""" + # Arrange + calls = [] + sentinel_dataset = object() + + def fake_dataset(url): + calls.append(url) + return sentinel_dataset + + monkeypatch.setattr(fetchers.netCDF4, "Dataset", fake_dataset) + + # Act + dataset = fetcher(max_attempts=3, base_delay=2) + + # Assert + assert dataset is sentinel_dataset + assert calls == [expected_url] + + +def test_fetch_gfs_retries_then_succeeds(monkeypatch): + """Retry GFS fetch after OSError and return data once endpoint responds.""" + # Arrange + attempt_counter = {"count": 0} + sleep_calls = [] + + def fake_dataset(_): + attempt_counter["count"] += 1 + if attempt_counter["count"] < 3: + raise OSError("temporary failure") + return "gfs-dataset" + + monkeypatch.setattr(fetchers.netCDF4, "Dataset", fake_dataset) + monkeypatch.setattr(fetchers.time, "sleep", sleep_calls.append) + + # Act + dataset = fetchers.fetch_gfs_file_return_dataset(max_attempts=3, base_delay=2) + + # Assert + assert dataset == "gfs-dataset" + assert sleep_calls == [2, 4] + + +def test_fetch_rap_raises_runtime_error_after_max_attempts(monkeypatch): + """Raise RuntimeError when all RAP attempts fail with OSError.""" + # Arrange + sleep_calls = [] + + def always_fails(_): + raise OSError("endpoint down") + + monkeypatch.setattr(fetchers.netCDF4, "Dataset", always_fails) + monkeypatch.setattr(fetchers.time, "sleep", sleep_calls.append) + + # Act / Assert + with pytest.raises( + RuntimeError, match="Unable to load latest weather data for RAP" + ): + fetchers.fetch_rap_file_return_dataset(max_attempts=2, base_delay=2) + + assert sleep_calls == [2, 4] From f8811c9f533916942a33773b165bd605934fe886 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Wed, 18 Mar 2026 22:28:37 -0300 Subject: [PATCH 24/44] ENH: get changes from BUG: All NOAA NOMADS Dependent Atmosphere Models Broken Fixes #933 --- CHANGELOG.md | 1 + .../environment/1-atm-models/ensemble.rst | 34 +- .../environment/1-atm-models/forecast.rst | 40 +- .../environment/1-atm-models/soundings.rst | 25 +- .../1-atm-models/standard_atmosphere.rst | 4 +- .../user/environment/3-further/other_apis.rst | 96 +++- rocketpy/environment/environment.py | 504 ++++++++++-------- rocketpy/environment/fetchers.py | 131 +++-- rocketpy/environment/tools.py | 216 ++++++-- rocketpy/environment/weather_model_mapping.py | 169 +++++- .../environment/test_environment.py | 23 +- tests/unit/environment/test_environment.py | 68 +++ 12 files changed, 919 insertions(+), 392 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4faa0db04..8fe5bd03f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -73,6 +73,7 @@ Attention: The newest changes should be on top --> - BUG: Restore `Rocket.power_off_drag` and `Rocket.power_on_drag` as `Function` objects while preserving raw inputs in `power_off_drag_input` and `power_on_drag_input` [#941](https://github.com/RocketPy-Team/RocketPy/pull/941) - BUG: Add explicit timeouts to ThrustCurve API requests [#935](https://github.com/RocketPy-Team/RocketPy/pull/935) +- BUG: Migrate Forecasts to UCAR THREDDS [#943](https://github.com/RocketPy-Team/RocketPy/pull/943) - BUG: Fix hard-coded radius value for parachute added mass calculation [#889](https://github.com/RocketPy-Team/RocketPy/pull/889) - DOC: Fix documentation build [#908](https://github.com/RocketPy-Team/RocketPy/pull/908) - BUG: energy_data plot not working for 3 dof sims [[#906](https://github.com/RocketPy-Team/RocketPy/issues/906)] diff --git a/docs/user/environment/1-atm-models/ensemble.rst b/docs/user/environment/1-atm-models/ensemble.rst index 97c247f68..504cbfe60 100644 --- a/docs/user/environment/1-atm-models/ensemble.rst +++ b/docs/user/environment/1-atm-models/ensemble.rst @@ -1,3 +1,5 @@ +.. _ensemble_atmosphere: + Ensemble ======== @@ -21,7 +23,21 @@ Ensemble Forecast Global Ensemble Forecast System (GEFS) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The ``GEFS`` model is a global ensemble forecast model ... +.. danger:: + + **GEFS shortcut unavailable**: ``file="GEFS"`` is currently disabled in + RocketPy because NOMADS OPeNDAP is deactivated for this endpoint. + +.. note:: + + If you have a GEFS-compatible NetCDF or OPeNDAP dataset from another + provider (or a local copy), you can still load it explicitly by passing the + dataset path/URL in ``file`` and a compatible mapping in ``dictionary``. + + +The ``GEFS`` model is a global ensemble forecast system useful for uncertainty +analysis, but RocketPy's automatic ``file="GEFS"`` shortcut is temporarily +disabled. .. code-block:: python @@ -71,20 +87,16 @@ CMC Ensemble resulted in a change of the model's endpoint. Efforts are underway to \ restore access to the CMC Ensemble model as swiftly as possible. -.. code-block:: python +At the moment, there is no built-in ``file="CMC"`` shortcut in +``Environment.set_atmospheric_model``. - env_cmc = Environment( - date=date_info, - latitude=-21.960641, - longitude=-47.482122, - elevation=640, - ) - env_cmc.set_atmospheric_model(type="Ensemble", file="CMC") - env_cmc.all_info() +If you have a CMC-compatible NetCDF or OPeNDAP dataset, load it explicitly by +passing the dataset path/URL in ``file`` and a matching mapping dictionary in +``dictionary``. Ensemble Reanalysis ------------------- Ensemble reanalyses are also possible with RocketPy. See the -:ref:`reanalysis_ensemble` section for more information. +:ref:`reanalysis_ensemble` section for more information. \ No newline at end of file diff --git a/docs/user/environment/1-atm-models/forecast.rst b/docs/user/environment/1-atm-models/forecast.rst index c88c71ff2..ac91504e0 100644 --- a/docs/user/environment/1-atm-models/forecast.rst +++ b/docs/user/environment/1-atm-models/forecast.rst @@ -24,7 +24,7 @@ Global Forecast System (GFS) Using the latest forecast from GFS is simple. Set the atmospheric model to ``forecast`` and specify that GFS is the file you want. -Note that since data is downloaded from the NOMADS server, this line of code can +Note that since data is downloaded from a remote OPeNDAP server, this line of code can take longer than usual. .. jupyter-execute:: @@ -111,36 +111,15 @@ The same coordinates for SpacePort America will be used. High Resolution Window (HIRESW) ------------------------------- -The High Resolution Window (HIRESW) model is a sophisticated weather forecasting -system that operates at a high spatial resolution of approximately 3 km. -It utilizes two main dynamical cores: the Advanced Research WRF (WRF-ARW) and -the Finite Volume Cubed Sphere (FV3), each designed to enhance the accuracy of -weather predictions. +.. danger:: -You can easily set up HIRESW in RocketPy by specifying the date, latitude, and -longitude of your location. Let's use SpacePort America as an example. + **HIRESW shortcut unavailable**: ``file="HIRESW"`` is currently disabled in + RocketPy because NOMADS OPeNDAP is deactivated for this endpoint. -.. jupyter-execute:: - - env_hiresw = Environment( - date=tomorrow, - latitude=32.988528, - longitude=-106.975056, - ) +If you have a HIRESW-compatible dataset from another provider (or a local copy), +you can still load it explicitly by passing the path/URL in ``file`` and an +appropriate mapping in ``dictionary``. - env_hiresw.set_atmospheric_model( - type="Forecast", - file="HIRESW", - dictionary="HIRESW", - ) - - env_hiresw.plots.atmospheric_model() - -.. note:: - - The HRES model is updated every 12 hours, providing forecasts with a \ - resolution of 3 km. The model can predict weather conditions up to 48 hours \ - in advance. RocketPy uses the CONUS domain with ARW core. Using Windy Atmosphere @@ -248,6 +227,5 @@ Also, the servers may be down or may face high traffic. .. seealso:: - To see a complete list of available models on the NOAA's NOMADS server, visit - `NOMADS `_. - + To browse available NCEP model collections on UCAR THREDDS, visit + `THREDDS NCEP Catalog `_. \ No newline at end of file diff --git a/docs/user/environment/1-atm-models/soundings.rst b/docs/user/environment/1-atm-models/soundings.rst index 9a276477e..279750df5 100644 --- a/docs/user/environment/1-atm-models/soundings.rst +++ b/docs/user/environment/1-atm-models/soundings.rst @@ -57,31 +57,22 @@ This service allows users to download virtual soundings from numerical weather prediction models such as GFS, RAP, and NAM, and also real soundings from the Integrated Global Radiosonde Archive (IGRA). -These options can be retrieved as a text file in GSD format. -By generating such a file through the link above, the file's URL can be used to -import the atmospheric data into RocketPy. - -We will use the same sounding station as we did for the Wyoming Soundings. +These options can be retrieved as a text file in GSD format. However, +RocketPy no longer provides a dedicated ``set_atmospheric_model`` type for +NOAA RUC Soundings. .. note:: Select ROABs as the initial data source, specify the station through its \ WMO-ID, and opt for the ASCII (GSD format) button. -Initialize a new Environment instance: - -.. code-block:: python +If you need to use RUC-sounding-like data in RocketPy, convert it to one of the +supported workflows: - url = r"https://rucsoundings.noaa.gov/get_raobs.cgi?data_source=RAOB&latest=latest&start_year=2019&start_month_name=Feb&start_mday=5&start_hour=12&start_min=0&n_hrs=1.0&fcst_len=shortest&airport=83779&text=Ascii%20text%20%28GSD%20format%29&hydrometeors=false&start=latest" - - env = Environment() - env.set_atmospheric_model(type="NOAARucSounding", file=url) - env.plots.atmospheric_model() +- Use :ref:`custom_atmosphere` after parsing the text data. +- Use :ref:`reanalysis` or :ref:`forecast` with NetCDF/OPeNDAP sources. .. note:: The leading `r` in the URL string is used to indicate a raw string, which \ - is useful when dealing with backslashes in URLs. - - - + is useful when dealing with backslashes in URLs. \ No newline at end of file diff --git a/docs/user/environment/1-atm-models/standard_atmosphere.rst b/docs/user/environment/1-atm-models/standard_atmosphere.rst index 0c125dfd8..d6c1de782 100644 --- a/docs/user/environment/1-atm-models/standard_atmosphere.rst +++ b/docs/user/environment/1-atm-models/standard_atmosphere.rst @@ -1,3 +1,5 @@ +.. _standard_atmosphere: + Standard Atmosphere =================== @@ -29,4 +31,4 @@ The International Standard Atmosphere can also be reset at any time by using the .. jupyter-execute:: - env.set_atmospheric_model(type="standard_atmosphere") + env.set_atmospheric_model(type="standard_atmosphere") \ No newline at end of file diff --git a/docs/user/environment/3-further/other_apis.rst b/docs/user/environment/3-further/other_apis.rst index c70fd58f7..01d4b9a30 100644 --- a/docs/user/environment/3-further/other_apis.rst +++ b/docs/user/environment/3-further/other_apis.rst @@ -1,3 +1,5 @@ +.. _environment_other_apis: + Connecting to other APIs ======================== @@ -25,14 +27,19 @@ the following dimensions and variables: - Latitude - Longitude - Pressure Levels +- Temperature (as a function of Time, Pressure Levels, Latitude and Longitude) - Geopotential Height (as a function of Time, Pressure Levels, Latitude and Longitude) +- or Geopotential (as a function of Time, Pressure Levels, Latitude and Longitude) - Surface Geopotential Height (as a function of Time, Latitude and Longitude) + (optional) - Wind - U Component (as a function of Time, Pressure Levels, Latitude and Longitude) - Wind - V Component (as a function of Time, Pressure Levels, Latitude and Longitude) +Some projected grids also require a ``projection`` key in the mapping. + -For example, let's imagine we want to use the HIRESW model from this endpoint: -`https://nomads.ncep.noaa.gov/dods/hiresw/ `_ +For example, let's imagine we want to use a forecast model available via an +OPeNDAP endpoint. Looking through the variable list in the link above, we find the following correspondence: @@ -72,15 +79,85 @@ Therefore, we can create an environment like this: dictionary=name_mapping, ) +Built-in mapping dictionaries +----------------------------- + +Instead of a custom dictionary, you can pass a built-in mapping name in the +``dictionary`` argument. Common options include: + +- ``"ECMWF"`` +- ``"ECMWF_v0"`` +- ``"NOAA"`` +- ``"GFS"`` +- ``"NAM"`` +- ``"RAP"`` +- ``"HIRESW"`` (mapping available; latest-model shortcut currently disabled) +- ``"GEFS"`` (mapping available; latest-model shortcut currently disabled) +- ``"MERRA2"`` +- ``"CMC"`` (for compatible datasets loaded explicitly) + +What a mapping name means +^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Base mapping names (for example ``"GFS"``, ``"NAM"`` and ``"RAP"``) map + RocketPy weather keys to the current default variable naming used by the + corresponding provider datasets. +- These defaults are aligned with current shortcut workflows (for example, + THREDDS-backed latest model sources) and may use projected coordinates + (``x``/``y`` plus ``projection``) depending on the model. + +Legacy mapping names +^^^^^^^^^^^^^^^^^^^^ + +If you are loading archived or older NOMADS-style datasets, use the explicit +legacy aliases: + +- ``"GFS_LEGACY"`` +- ``"NAM_LEGACY"`` +- ``"NOAA_LEGACY"`` +- ``"RAP_LEGACY"`` +- ``"CMC_LEGACY"`` +- ``"GEFS_LEGACY"`` +- ``"HIRESW_LEGACY"`` +- ``"MERRA2_LEGACY"`` + +Legacy aliases primarily cover older variable naming patterns such as +``lev``, ``tmpprs``, ``hgtprs``, ``ugrdprs`` and ``vgrdprs``. + +.. note:: + + Mapping names are case-insensitive. For example, + ``"gfs_legacy"`` and ``"GFS_LEGACY"`` are equivalent. + +For custom dictionaries, the canonical structure is: + +.. code-block:: python + + mapping = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "lev", + "temperature": "tmpprs", + "surface_geopotential_height": "hgtsfc", # optional + "geopotential_height": "hgtprs", # or geopotential + "geopotential": None, + "u_wind": "ugrdprs", + "v_wind": "vgrdprs", + } + +.. important:: + + Ensemble datasets require an additional key for member selection: + ``"ensemble": ""``. + .. caution:: - Notice the ``file`` argument were suppressed in the code above. This is because \ - the URL depends on the date you are running the simulation. For example, as \ - it for now, a possible link could be: https://nomads.ncep.noaa.gov/dods/hiresw/hiresw20240803/hiresw_conusfv3_12z \ - (for the 3rd of August, 2024, at 12:00 UTC). \ - You should replace the date in the URL with the date you are running the simulation. \ - Different models may have different URL structures, so be sure to check the \ - documentation of the model you are using. + The ``file`` argument was intentionally omitted in the example above. This is + because the URL depends on the provider, dataset, and date you are running + the simulation. Build the endpoint according to the provider specification + and always validate that the target service is active before running your + simulation workflow. Without OPeNDAP protocol @@ -94,4 +171,3 @@ Environment class, for example: - `Meteomatics `_: `#545 `_ - `Open-Meteo `_: `#520 `_ - diff --git a/rocketpy/environment/environment.py b/rocketpy/environment/environment.py index 6743b06ae..39441ecae 100644 --- a/rocketpy/environment/environment.py +++ b/rocketpy/environment/environment.py @@ -27,6 +27,7 @@ find_latitude_index, find_longitude_index, find_time_index, + geodesic_to_lambert_conformal, geodesic_to_utm, get_elevation_data_from_dataset, get_final_date_from_time_array, @@ -138,15 +139,15 @@ class Environment: Environment.atmospheric_model_type : string Describes the atmospheric model which is being used. Can only assume the following values: ``standard_atmosphere``, ``custom_atmosphere``, - ``wyoming_sounding``, ``Forecast``, ``Reanalysis``, - ``Ensemble``. + ``wyoming_sounding``, ``windy``, ``forecast``, ``reanalysis``, + ``ensemble``. Environment.atmospheric_model_file : string Address of the file used for the atmospheric model being used. Only - defined for ``wyoming_sounding``, ``Forecast``, - ``Reanalysis``, ``Ensemble`` + defined for ``wyoming_sounding``, ``windy``, ``forecast``, + ``reanalysis``, ``ensemble`` Environment.atmospheric_model_dict : dictionary Dictionary used to properly interpret ``netCDF`` and ``OPeNDAP`` files. - Only defined for ``Forecast``, ``Reanalysis``, ``Ensemble``. + Only defined for ``forecast``, ``reanalysis``, ``ensemble``. Environment.atmospheric_model_init_date : datetime Datetime object instance of first available date in ``netCDF`` and ``OPeNDAP`` files when using ``Forecast``, ``Reanalysis`` or @@ -295,21 +296,21 @@ def __init__( - :attr:`Environment.datetime_date`: UTC time of launch. - Must be given if a Forecast, Reanalysis - or Ensemble, will be set as an atmospheric model. + Must be given if a ``windy``, ``forecast``, ``reanalysis`` + or ``ensemble`` atmospheric model will be used. Default is None. See :meth:`Environment.set_date` for more information. latitude : float, optional Latitude in degrees (ranging from -90 to 90) of rocket - launch location. Must be given if a Forecast, Reanalysis - or Ensemble will be used as an atmospheric model or if + launch location. Must be given if a ``windy``, ``forecast``, + ``reanalysis`` or ``ensemble`` atmospheric model will be used or if Open-Elevation will be used to compute elevation. Positive values correspond to the North. Default value is 0, which corresponds to the equator. longitude : float, optional Longitude in degrees (ranging from -180 to 180) of rocket - launch location. Must be given if a Forecast, Reanalysis - or Ensemble will be used as an atmospheric model or if + launch location. Must be given if a ``windy``, ``forecast``, + ``reanalysis`` or ``ensemble`` atmospheric model will be used or if Open-Elevation will be used to compute elevation. Positive values correspond to the East. Default value is 0, which corresponds to the Greenwich Meridian. @@ -605,13 +606,81 @@ def __set_earth_rotation_vector(self): # Validators (used to verify an attribute is being set correctly.) + @staticmethod + def __dictionary_matches_dataset(dictionary, dataset): + """Check whether a mapping dictionary is compatible with a dataset.""" + variables = dataset.variables + required_keys = ( + "time", + "latitude", + "longitude", + "level", + "temperature", + "u_wind", + "v_wind", + ) + + for key in required_keys: + variable_name = dictionary.get(key) + if variable_name is None or variable_name not in variables: + return False + + projection_name = dictionary.get("projection") + if projection_name is not None and projection_name not in variables: + return False + + geopotential_height_name = dictionary.get("geopotential_height") + geopotential_name = dictionary.get("geopotential") + has_geopotential_height = ( + geopotential_height_name is not None + and geopotential_height_name in variables + ) + has_geopotential = ( + geopotential_name is not None and geopotential_name in variables + ) + + return has_geopotential_height or has_geopotential + + def __resolve_dictionary_for_dataset(self, dictionary, dataset): + """Resolve a compatible mapping dictionary for the loaded dataset. + + If the provided mapping is incompatible with the dataset variables, + this method tries built-in mappings and falls back to the first + compatible one. + """ + if self.__dictionary_matches_dataset(dictionary, dataset): + return dictionary + + for model_name, candidate in self.__weather_model_map.all_dictionaries.items(): + if self.__dictionary_matches_dataset(candidate, dataset): + warnings.warn( + "Provided weather mapping does not match dataset variables. " + f"Falling back to built-in mapping '{model_name}'." + ) + return candidate + + return dictionary + def __validate_dictionary(self, file, dictionary): # removed CMC until it is fixed. - available_models = ["GFS", "NAM", "RAP", "HIRESW", "GEFS", "ERA5", "MERRA2"] + available_models = [ + "GFS", + "NAM", + "RAP", + "HIRESW", + "GEFS", + "ERA5", + "MERRA2", + ] if isinstance(dictionary, str): dictionary = self.__weather_model_map.get(dictionary) - elif file in available_models: - dictionary = self.__weather_model_map.get(file) + elif isinstance(file, str): + matching_model = next( + (model for model in available_models if model.lower() == file.lower()), + None, + ) + if matching_model is not None: + dictionary = self.__weather_model_map.get(matching_model) if not isinstance(dictionary, dict): raise TypeError( "Please specify a dictionary or choose a valid model from the " @@ -1045,171 +1114,41 @@ def set_atmospheric_model( # pylint: disable=too-many-statements wind_u=0, wind_v=0, ): - """Defines an atmospheric model for the Environment. Supported - functionality includes using data from the `International Standard - Atmosphere`, importing data from weather reanalysis, forecasts and - ensemble forecasts, importing data from upper air soundings and - inputting data as custom functions, arrays or csv files. + """Define the atmospheric model for this Environment. Parameters ---------- type : string - One of the following options: - - - ``standard_atmosphere``: sets pressure and temperature profiles - corresponding to the International Standard Atmosphere defined by - ISO 2533 and ranging from -2 km to 80 km of altitude above sea - level. Note that the wind profiles are set to zero when this type - is chosen. - - - ``wyoming_sounding``: sets pressure, temperature, wind-u - and wind-v profiles and surface elevation obtained from - an upper air sounding given by the file parameter through - an URL. This URL should point to a data webpage given by - selecting plot type as text: list, a station and a time at - `weather.uwyo`_. - An example of a valid link would be: - - http://weather.uwyo.edu/cgi-bin/sounding?region=samer&TYPE=TEXT%3ALIST&YEAR=2019&MONTH=02&FROM=0200&TO=0200&STNM=82599 - - .. _weather.uwyo: http://weather.uwyo.edu/upperair/sounding.html - - - ``windy_atmosphere``: sets pressure, temperature, wind-u and - wind-v profiles and surface elevation obtained from the Windy API. - See file argument to specify the model as either ``ECMWF``, - ``GFS`` or ``ICON``. - - - ``Forecast``: sets pressure, temperature, wind-u and wind-v - profiles and surface elevation obtained from a weather forecast - file in ``netCDF`` format or from an ``OPeNDAP`` URL, both given - through the file parameter. When this type is chosen, the date - and location of the launch should already have been set through - the date and location parameters when initializing the - Environment. The ``netCDF`` and ``OPeNDAP`` datasets must contain - at least geopotential height or geopotential, temperature, wind-u - and wind-v profiles as a function of pressure levels. If surface - geopotential or geopotential height is given, elevation is also - set. Otherwise, elevation is not changed. Profiles are - interpolated bi-linearly using supplied latitude and longitude. - The date used is the nearest one to the date supplied. - Furthermore, a dictionary must be supplied through the dictionary - parameter in order for the dataset to be accurately read. Lastly, - the dataset must use a rectangular grid sorted in either ascending - or descending order of latitude and longitude. - - - ``Reanalysis``: sets pressure, temperature, wind-u and wind-v - profiles and surface elevation obtained from a weather forecast - file in ``netCDF`` format or from an ``OPeNDAP`` URL, both given - through the file parameter. When this type is chosen, the date and - location of the launch should already have been set through the - date and location parameters when initializing the Environment. - The ``netCDF`` and ``OPeNDAP`` datasets must contain at least - geopotential height or geopotential, temperature, wind-u and - wind-v profiles as a function of pressure levels. If surface - geopotential or geopotential height is given, elevation is also - set. Otherwise, elevation is not changed. Profiles are - interpolated bi-linearly using supplied latitude and longitude. - The date used is the nearest one to the date supplied. - Furthermore, a dictionary must be supplied through the dictionary - parameter in order for the dataset to be accurately read. Lastly, - the dataset must use a rectangular grid sorted in either ascending - or descending order of latitude and longitude. - - - ``Ensemble``: sets pressure, temperature, wind-u and wind-v - profiles and surface elevation obtained from a weather forecast - file in ``netCDF`` format or from an ``OPeNDAP`` URL, both given - through the file parameter. When this type is chosen, the date and - location of the launch should already have been set through the - date and location parameters when initializing the Environment. - The ``netCDF`` and ``OPeNDAP`` datasets must contain at least - geopotential height or geopotential, temperature, wind-u and - wind-v profiles as a function of pressure levels. If surface - geopotential or geopotential height is given, elevation is also - set. Otherwise, elevation is not changed. Profiles are - interpolated bi-linearly using supplied latitude and longitude. - The date used is the nearest one to the date supplied. - Furthermore, a dictionary must be supplied through the dictionary - parameter in order for the dataset to be accurately read. Lastly, - the dataset must use a rectangular grid sorted in either ascending - or descending order of latitude and longitude. By default the - first ensemble forecast is activated. - - .. seealso:: - - To activate other ensemble forecasts see - :meth:`rocketpy.Environment.select_ensemble_member`. - - - ``custom_atmosphere``: sets pressure, temperature, wind-u and - wind-v profiles given though the pressure, temperature, wind-u and - wind-v parameters of this method. If pressure or temperature is - not given, it will default to the `International Standard - Atmosphere`. If the wind components are not given, it will default - to 0. - - file : string, optional - String that must be given when type is either ``wyoming_sounding``, - ``Forecast``, ``Reanalysis``, ``Ensemble`` or ``Windy``. It - specifies the location of the data given, either through a local - file address or a URL. If type is ``Forecast``, this parameter can - also be either ``GFS``, ``FV3``, ``RAP`` or ``NAM`` for latest of - these forecasts. - - .. note:: - - Time reference for the Forecasts are: - - - ``GFS``: `Global` - 0.25deg resolution - Updates every 6 - hours, forecast for 81 points spaced by 3 hours - - ``RAP``: `Regional USA` - 0.19deg resolution - Updates hourly, - forecast for 40 points spaced hourly - - ``NAM``: `Regional CONUS Nest` - 5 km resolution - Updates - every 6 hours, forecast for 21 points spaced by 3 hours - - If type is ``Ensemble``, this parameter can also be ``GEFS`` - for the latest of this ensemble. - - .. note:: - - Time referece for the Ensembles are: - - - GEFS: Global, bias-corrected, 0.5deg resolution, 21 forecast - members, Updates every 6 hours, forecast for 65 points spaced - by 4 hours - - CMC (currently not available): Global, 0.5deg resolution, 21 \ - forecast members, Updates every 12 hours, forecast for 65 \ - points spaced by 4 hours - - If type is ``Windy``, this parameter can be either ``GFS``, - ``ECMWF``, ``ICON`` or ``ICONEU``. Default in this case is ``ECMWF``. - dictionary : dictionary, string, optional - Dictionary that must be given when type is either ``Forecast``, - ``Reanalysis`` or ``Ensemble``. It specifies the dictionary to be - used when reading ``netCDF`` and ``OPeNDAP`` files, allowing the - correct retrieval of data. Acceptable values include ``ECMWF``, - ``NOAA``, ``UCAR`` and ``MERRA2`` for default dictionaries which can generally - be used to read datasets from these institutes. Alternatively, a - dictionary structure can also be given, specifying the short names - used for time, latitude, longitude, pressure levels, temperature - profile, geopotential or geopotential height profile, wind-u and - wind-v profiles in the dataset given in the file parameter. - Additionally, ensemble dictionaries must have the ensemble as well. - An example is the following dictionary, used for ``NOAA``: - - .. code-block:: python - - dictionary = { - "time": "time", - "latitude": "lat", - "longitude": "lon", - "level": "lev", - "ensemble": "ens", - "temperature": "tmpprs", - "surface_geopotential_height": "hgtsfc", - "geopotential_height": "hgtprs", - "geopotential": None, - "u_wind": "ugrdprs", - "v_wind": "vgrdprs", - } + Atmospheric model selector (case-insensitive). Accepted values are + ``"standard_atmosphere"``, ``"wyoming_sounding"``, ``"windy"``, + ``"forecast"``, ``"reanalysis"``, ``"ensemble"`` and + ``"custom_atmosphere"``. + file : string | netCDF4.Dataset, optional + Data source or model shortcut. Meaning depends on ``type``: + + - ``"standard_atmosphere"`` and ``"custom_atmosphere"``: ignored. + - ``"wyoming_sounding"``: URL of the sounding text page. + - ``"windy"``: one of ``"ECMWF"``, ``"GFS"``, ``"ICON"`` or + ``"ICONEU"``. + - ``"forecast"``: local path, OPeNDAP URL, open + ``netCDF4.Dataset``, or one of ``"GFS"``, ``"NAM"`` or ``"RAP"`` + for the latest available forecast. + - ``"reanalysis"``: local path, OPeNDAP URL, or open + ``netCDF4.Dataset``. + - ``"ensemble"``: local path, OPeNDAP URL, open + ``netCDF4.Dataset``, or ``"GEFS"`` for the latest available + forecast. + dictionary : dict | str, optional + Variable-name mapping for ``"forecast"``, ``"reanalysis"`` and + ``"ensemble"``. It may be a custom dictionary or a built-in + mapping name (for example: ``"ECMWF"``, ``"ECMWF_v0"``, + ``"NOAA"``, ``"GFS"``, ``"NAM"``, ``"RAP"``, ``"HIRESW"``, + ``"GEFS"``, ``"MERRA2"`` or ``"CMC"``). + + If ``dictionary`` is omitted and ``file`` is one of RocketPy's + latest-model shortcuts, the matching built-in mapping is selected + automatically. For ensemble datasets, the mapping must include the + ensemble dimension key (typically ``"ensemble"``). pressure : float, string, array, callable, optional This defines the atmospheric pressure profile. @@ -1272,6 +1211,36 @@ def set_atmospheric_model( # pylint: disable=too-many-statements Returns ------- None + + Raises + ------ + ValueError + If ``type`` is unknown, if required launch date/time information is + missing for date-dependent models, if Windy model names are invalid, + or if required atmospheric variables cannot be read from the input + dataset. + TypeError + If ``dictionary`` is invalid for ``"forecast"``, ``"reanalysis"`` + or ``"ensemble"``. + KeyError + If a built-in mapping name passed in ``dictionary`` is unknown. + + See Also + -------- + :ref:`atmospheric_models` + Overview of all atmospheric-model workflows in the user guide. + :ref:`forecast` + Forecast and Windy usage details, including latest-model shortcuts. + :ref:`reanalysis` + Reanalysis and MERRA-2 examples. + :ref:`soundings` + Wyoming sounding workflow and RUC migration notes. + :ref:`custom_atmosphere` + Defining pressure, temperature and wind profiles directly. + :ref:`ensemble_atmosphere` + Ensemble forecasts and member-selection workflow. + :ref:`environment_other_apis` + Building custom mapping dictionaries for NetCDF/OPeNDAP APIs. """ # Save atmospheric model type self.atmospheric_model_type = type @@ -1287,6 +1256,36 @@ def set_atmospheric_model( # pylint: disable=too-many-statements case "windy": self.process_windy_atmosphere(file) case "forecast" | "reanalysis" | "ensemble": + if isinstance(file, str): + shortcut_map = self.__atm_type_file_to_function_map.get(type, {}) + matching_shortcut = next( + ( + shortcut + for shortcut in shortcut_map + if shortcut.lower() == file.lower() + ), + None, + ) + if matching_shortcut is not None: + file = matching_shortcut + + if isinstance(file, str): + file_upper = file.upper() + if type == "forecast" and file_upper == "HIRESW": + raise ValueError( + "The HIRESW latest-model shortcut is currently " + "unavailable because NOMADS OPeNDAP is deactivated. " + "Please use another forecast source or provide a " + "compatible dataset path/URL explicitly." + ) + if type == "ensemble" and file_upper == "GEFS": + raise ValueError( + "The GEFS latest-model shortcut is currently " + "unavailable because NOMADS OPeNDAP is deactivated. " + "Please use another ensemble source or provide a " + "compatible dataset path/URL explicitly." + ) + dictionary = self.__validate_dictionary(file, dictionary) try: fetch_function = self.__atm_type_file_to_function_map[type][file] @@ -1471,6 +1470,12 @@ def process_windy_atmosphere(self, model="ECMWF"): # pylint: disable=too-many-s ``ECMWF`` for the `ECMWF-HRES` model, ``GFS`` for the `GFS` model, ``ICON`` for the `ICON-Global` model or ``ICONEU`` for the `ICON-EU` model. + + Raises + ------ + ValueError + If ``model`` is not one of ``ECMWF``, ``GFS``, ``ICON`` or + ``ICONEU``. """ if model.lower() not in ["ecmwf", "gfs", "icon", "iconeu"]: @@ -1728,6 +1733,13 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- Returns ------- None + + Raises + ------ + ValueError + If launch date/time was not set before loading date-dependent data, + or if required geopotential/geopotential-height, temperature, + wind-u, or wind-v variables cannot be read from the dataset. """ # Check if date, lat and lon are known self.__validate_datetime() @@ -1735,20 +1747,34 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- # Read weather file if isinstance(file, str): data = netCDF4.Dataset(file) - if dictionary["time"] not in data.variables.keys(): - dictionary = self.__weather_model_map.get("ECMWF_v0") else: data = file + dictionary = self.__resolve_dictionary_for_dataset(dictionary, data) + # Get time, latitude and longitude data from file time_array = data.variables[dictionary["time"]] - lon_list = data.variables[dictionary["longitude"]][:].tolist() - lat_list = data.variables[dictionary["latitude"]][:].tolist() + lon_array = data.variables[dictionary["longitude"]] + lat_array = data.variables[dictionary["latitude"]] + + # Some THREDDS datasets use projected x/y coordinates. + if dictionary.get("projection") is not None: + projection_variable = data.variables[dictionary["projection"]] + x_units = getattr(lon_array, "units", "m") + target_lon, target_lat = geodesic_to_lambert_conformal( + self.latitude, + self.longitude, + projection_variable, + x_units=x_units, + ) + else: + target_lon = self.longitude + target_lat = self.latitude # Find time, latitude and longitude indexes time_index = find_time_index(self.datetime_date, time_array) - lon, lon_index = find_longitude_index(self.longitude, lon_list) - _, lat_index = find_latitude_index(self.latitude, lat_list) + lon, lon_index = find_longitude_index(target_lon, lon_array) + _, lat_index = find_latitude_index(target_lat, lat_array) # Get pressure level data from file levels = get_pressure_levels_from_file(data, dictionary) @@ -1806,9 +1832,9 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- ) from e # Prepare for bilinear interpolation - x, y = self.latitude, lon - x1, y1 = lat_list[lat_index - 1], lon_list[lon_index - 1] - x2, y2 = lat_list[lat_index], lon_list[lon_index] + x, y = target_lat, lon + x1, y1 = float(lat_array[lat_index - 1]), float(lon_array[lon_index - 1]) + x2, y2 = float(lat_array[lat_index]), float(lon_array[lon_index]) # Determine properties in lat, lon height = bilinear_interpolation( @@ -1860,6 +1886,17 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- wind_vs[:, 1, 1], ) + # Some datasets expose different level counts between fields + # (e.g., temperature on isobaric1 and geopotential on isobaric). + min_profile_length = min( + len(levels), len(height), len(temper), len(wind_u), len(wind_v) + ) + levels = levels[:min_profile_length] + height = height[:min_profile_length] + temper = temper[:min_profile_length] + wind_u = wind_u[:min_profile_length] + wind_v = wind_v[:min_profile_length] + # Determine wind speed, heading and direction wind_speed = calculate_wind_speed(wind_u, wind_v) wind_heading = calculate_wind_heading(wind_u, wind_v) @@ -1917,14 +1954,14 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- ) else: self.atmospheric_model_interval = 0 - self.atmospheric_model_init_lat = lat_list[0] - self.atmospheric_model_end_lat = lat_list[-1] - self.atmospheric_model_init_lon = lon_list[0] - self.atmospheric_model_end_lon = lon_list[-1] + self.atmospheric_model_init_lat = float(lat_array[0]) + self.atmospheric_model_end_lat = float(lat_array[len(lat_array) - 1]) + self.atmospheric_model_init_lon = float(lon_array[0]) + self.atmospheric_model_end_lon = float(lon_array[len(lon_array) - 1]) # Save debugging data - self.lat_array = lat_list - self.lon_array = lon_list + self.lat_array = [x1, x2] + self.lon_array = [y1, y2] self.lon_index = lon_index self.lat_index = lat_index self.geopotentials = geopotentials @@ -1932,7 +1969,10 @@ def process_forecast_reanalysis(self, file, dictionary): # pylint: disable=too- self.wind_vs = wind_vs self.levels = levels self.temperatures = temperatures - self.time_array = time_array[:].tolist() + self.time_array = [ + float(time_array[0]), + float(time_array[time_array.shape[0] - 1]), + ] self.height = height # Close weather data @@ -1994,6 +2034,13 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals -------- See the :class:``rocketpy.environment.weather_model_mapping`` for some dictionary examples. + + Raises + ------ + ValueError + If launch date/time was not set before loading date-dependent data, + or if required geopotential/geopotential-height, temperature, + wind-u, or wind-v variables cannot be read from the dataset. """ # Check if date, lat and lon are known self.__validate_datetime() @@ -2004,23 +2051,40 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals else: data = file + dictionary = self.__resolve_dictionary_for_dataset(dictionary, data) + # Get time, latitude and longitude data from file time_array = data.variables[dictionary["time"]] - lon_list = data.variables[dictionary["longitude"]][:].tolist() - lat_list = data.variables[dictionary["latitude"]][:].tolist() + lon_array = data.variables[dictionary["longitude"]] + lat_array = data.variables[dictionary["latitude"]] + + # Some THREDDS datasets use projected x/y coordinates. + # TODO CHECK THIS I AM NOT SURE????? + if dictionary.get("projection") is not None: + projection_variable = data.variables[dictionary["projection"]] + x_units = getattr(lon_array, "units", "m") + target_lon, target_lat = geodesic_to_lambert_conformal( + self.latitude, + self.longitude, + projection_variable, + x_units=x_units, + ) + else: + target_lon = self.longitude + target_lat = self.latitude # Find time, latitude and longitude indexes time_index = find_time_index(self.datetime_date, time_array) - lon, lon_index = find_longitude_index(self.longitude, lon_list) - _, lat_index = find_latitude_index(self.latitude, lat_list) + lon, lon_index = find_longitude_index(target_lon, lon_array) + _, lat_index = find_latitude_index(target_lat, lat_array) # Get ensemble data from file + has_ensemble_dimension = True try: num_members = len(data.variables[dictionary["ensemble"]][:]) - except KeyError as e: - raise ValueError( - "Unable to read ensemble data from file. Check file and dictionary." - ) from e + except KeyError: + has_ensemble_dimension = False + num_members = 1 # Get pressure level data from file levels = get_pressure_levels_from_file(data, dictionary) @@ -2079,10 +2143,16 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals "Unable to read wind-v component. Check file and dictionary." ) from e + if not has_ensemble_dimension: + geopotentials = np.expand_dims(geopotentials, axis=0) + temperatures = np.expand_dims(temperatures, axis=0) + wind_us = np.expand_dims(wind_us, axis=0) + wind_vs = np.expand_dims(wind_vs, axis=0) + # Prepare for bilinear interpolation - x, y = self.latitude, lon - x1, y1 = lat_list[lat_index - 1], lon_list[lon_index - 1] - x2, y2 = lat_list[lat_index], lon_list[lon_index] + x, y = target_lat, lon + x1, y1 = float(lat_array[lat_index - 1]), float(lon_array[lon_index - 1]) + x2, y2 = float(lat_array[lat_index]), float(lon_array[lon_index]) # Determine properties in lat, lon height = bilinear_interpolation( @@ -2134,6 +2204,19 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals wind_vs[:, :, 1, 1], ) + min_profile_length = min( + len(levels), + height.shape[1], + temper.shape[1], + wind_u.shape[1], + wind_v.shape[1], + ) + levels = levels[:min_profile_length] + height = height[:, :min_profile_length] + temper = temper[:, :min_profile_length] + wind_u = wind_u[:, :min_profile_length] + wind_v = wind_v[:, :min_profile_length] + # Determine wind speed, heading and direction wind_speed = calculate_wind_speed(wind_u, wind_v) wind_heading = calculate_wind_heading(wind_u, wind_v) @@ -2166,14 +2249,14 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals self.atmospheric_model_init_date = get_initial_date_from_time_array(time_array) self.atmospheric_model_end_date = get_final_date_from_time_array(time_array) self.atmospheric_model_interval = get_interval_date_from_time_array(time_array) - self.atmospheric_model_init_lat = lat_list[0] - self.atmospheric_model_end_lat = lat_list[-1] - self.atmospheric_model_init_lon = lon_list[0] - self.atmospheric_model_end_lon = lon_list[-1] + self.atmospheric_model_init_lat = float(lat_array[0]) + self.atmospheric_model_end_lat = float(lat_array[len(lat_array) - 1]) + self.atmospheric_model_init_lon = float(lon_array[0]) + self.atmospheric_model_end_lon = float(lon_array[len(lon_array) - 1]) # Save debugging data - self.lat_array = lat_list - self.lon_array = lon_list + self.lat_array = [x1, x2] + self.lon_array = [y1, y2] self.lon_index = lon_index self.lat_index = lat_index self.geopotentials = geopotentials @@ -2181,7 +2264,10 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals self.wind_vs = wind_vs self.levels = levels self.temperatures = temperatures - self.time_array = time_array[:].tolist() + self.time_array = [ + float(time_array[0]), + float(time_array[time_array.shape[0] - 1]), + ] self.height = height # Close weather data diff --git a/rocketpy/environment/fetchers.py b/rocketpy/environment/fetchers.py index d5ac2a1df..589159f1c 100644 --- a/rocketpy/environment/fetchers.py +++ b/rocketpy/environment/fetchers.py @@ -113,33 +113,18 @@ def fetch_gfs_file_return_dataset(max_attempts=10, base_delay=2): RuntimeError If unable to load the latest weather data for GFS. """ - time_attempt = datetime.now(tz=timezone.utc) + file_url = ( + "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/GFS/Global_0p25deg/Best" + ) attempt_count = 0 - dataset = None - - # TODO: the code below is trying to determine the hour of the latest available - # forecast by trial and error. This is not the best way to do it. We should - # actually check the NOAA website for the latest forecast time. Refactor needed. while attempt_count < max_attempts: - time_attempt -= timedelta(hours=6) # GFS updates every 6 hours - file_url = ( - f"https://nomads.ncep.noaa.gov/dods/gfs_0p25/gfs" - f"{time_attempt.year:04d}{time_attempt.month:02d}" - f"{time_attempt.day:02d}/" - f"gfs_0p25_{6 * (time_attempt.hour // 6):02d}z" - ) try: - # Attempts to create a dataset from the file using OpenDAP protocol. - dataset = netCDF4.Dataset(file_url) - return dataset + return netCDF4.Dataset(file_url) except OSError: attempt_count += 1 time.sleep(base_delay**attempt_count) - if dataset is None: - raise RuntimeError( - "Unable to load latest weather data for GFS through " + file_url - ) + raise RuntimeError("Unable to load latest weather data for GFS through " + file_url) def fetch_nam_file_return_dataset(max_attempts=10, base_delay=2): @@ -163,28 +148,16 @@ def fetch_nam_file_return_dataset(max_attempts=10, base_delay=2): RuntimeError If unable to load the latest weather data for NAM. """ - # Attempt to get latest forecast - time_attempt = datetime.now(tz=timezone.utc) + file_url = "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/NAM/CONUS_12km/Best" attempt_count = 0 - dataset = None - while attempt_count < max_attempts: - time_attempt -= timedelta(hours=6) # NAM updates every 6 hours - file = ( - f"https://nomads.ncep.noaa.gov/dods/nam/nam{time_attempt.year:04d}" - f"{time_attempt.month:02d}{time_attempt.day:02d}/" - f"nam_conusnest_{6 * (time_attempt.hour // 6):02d}z" - ) try: - # Attempts to create a dataset from the file using OpenDAP protocol. - dataset = netCDF4.Dataset(file) - return dataset + return netCDF4.Dataset(file_url) except OSError: attempt_count += 1 time.sleep(base_delay**attempt_count) - if dataset is None: - raise RuntimeError("Unable to load latest weather data for NAM through " + file) + raise RuntimeError("Unable to load latest weather data for NAM through " + file_url) def fetch_rap_file_return_dataset(max_attempts=10, base_delay=2): @@ -208,28 +181,88 @@ def fetch_rap_file_return_dataset(max_attempts=10, base_delay=2): RuntimeError If unable to load the latest weather data for RAP. """ - # Attempt to get latest forecast - time_attempt = datetime.now(tz=timezone.utc) + file_url = "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/RAP/CONUS_13km/Best" attempt_count = 0 - dataset = None + while attempt_count < max_attempts: + try: + return netCDF4.Dataset(file_url) + except OSError: + attempt_count += 1 + time.sleep(base_delay**attempt_count) + + raise RuntimeError("Unable to load latest weather data for RAP through " + file_url) + +def fetch_hrrr_file_return_dataset(max_attempts=10, base_delay=2): + """Fetches the latest HRRR (High-Resolution Rapid Refresh) dataset from + the NOAA's GrADS data server using the OpenDAP protocol. + + Parameters + ---------- + max_attempts : int, optional + The maximum number of attempts to fetch the dataset. Default is 10. + base_delay : int, optional + The base delay in seconds between attempts. Default is 2. + + Returns + ------- + netCDF4.Dataset + The HRRR dataset. + + Raises + ------ + RuntimeError + If unable to load the latest weather data for HRRR. + """ + file_url = "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/HRRR/CONUS_2p5km/Best" + attempt_count = 0 while attempt_count < max_attempts: - time_attempt -= timedelta(hours=1) # RAP updates every hour - file = ( - f"https://nomads.ncep.noaa.gov/dods/rap/rap{time_attempt.year:04d}" - f"{time_attempt.month:02d}{time_attempt.day:02d}/" - f"rap_{time_attempt.hour:02d}z" - ) try: - # Attempts to create a dataset from the file using OpenDAP protocol. - dataset = netCDF4.Dataset(file) - return dataset + return netCDF4.Dataset(file_url) except OSError: attempt_count += 1 time.sleep(base_delay**attempt_count) - if dataset is None: - raise RuntimeError("Unable to load latest weather data for RAP through " + file) + raise RuntimeError( + "Unable to load latest weather data for HRRR through " + file_url + ) + + +def fetch_aigfs_file_return_dataset(max_attempts=10, base_delay=2): + """Fetches the latest AIGFS (Artificial Intelligence GFS) dataset from + the NOAA's GrADS data server using the OpenDAP protocol. + + Parameters + ---------- + max_attempts : int, optional + The maximum number of attempts to fetch the dataset. Default is 10. + base_delay : int, optional + The base delay in seconds between attempts. Default is 2. + + Returns + ------- + netCDF4.Dataset + The AIGFS dataset. + + Raises + ------ + RuntimeError + If unable to load the latest weather data for AIGFS. + """ + file_url = ( + "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/AIGFS/Global_0p25deg/Best" + ) + attempt_count = 0 + while attempt_count < max_attempts: + try: + return netCDF4.Dataset(file_url) + except OSError: + attempt_count += 1 + time.sleep(base_delay**attempt_count) + + raise RuntimeError( + "Unable to load latest weather data for AIGFS through " + file_url + ) def fetch_hiresw_file_return_dataset(max_attempts=10, base_delay=2): diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index 1239ee6b9..fb0179c9e 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -5,7 +5,7 @@ future to improve their performance and usability. """ -import bisect +import math import warnings import netCDF4 @@ -109,6 +109,63 @@ def calculate_wind_speed(u, v, w=0.0): return np.sqrt(u**2 + v**2 + w**2) +def geodesic_to_lambert_conformal(lat, lon, projection_variable, x_units="m"): + """Convert geodesic coordinates to Lambert conformal projected coordinates. + + Parameters + ---------- + lat : float + Latitude in degrees. + lon : float + Longitude in degrees, ranging from -180 to 180. + projection_variable : netCDF4.Variable + Projection variable containing Lambert conformal metadata. + x_units : str, optional + Units used by the dataset x coordinate. Supported values are meters + and kilometers. Default is "m". + + Returns + ------- + tuple[float, float] + Projected coordinates ``(x, y)`` in the same units as ``x_units``. + """ + lat_radians = math.radians(lat) + lon_radians = math.radians(lon % 360) + + lat_origin = math.radians(float(projection_variable.latitude_of_projection_origin)) + lon_origin = math.radians(float(projection_variable.longitude_of_central_meridian)) + + standard_parallel = projection_variable.standard_parallel + if np.ndim(standard_parallel) == 0: + standard_parallels = [float(standard_parallel)] + else: + standard_parallels = np.asarray(standard_parallel, dtype=float).tolist() + + if len(standard_parallels) >= 2: + phi_1 = math.radians(standard_parallels[0]) + phi_2 = math.radians(standard_parallels[1]) + n = math.log(math.cos(phi_1) / math.cos(phi_2)) / math.log( + math.tan(math.pi / 4 + phi_2 / 2) / math.tan(math.pi / 4 + phi_1 / 2) + ) + else: + phi_1 = math.radians(standard_parallels[0]) + n = math.sin(phi_1) + + earth_radius = float(getattr(projection_variable, "earth_radius", 6371229.0)) + f_const = (math.cos(phi_1) * math.tan(math.pi / 4 + phi_1 / 2) ** n) / n + + rho = earth_radius * f_const / (math.tan(math.pi / 4 + lat_radians / 2) ** n) + rho_origin = earth_radius * f_const / (math.tan(math.pi / 4 + lat_origin / 2) ** n) + theta = n * (lon_radians - lon_origin) + + x_meters = rho * math.sin(theta) + y_meters = rho_origin - rho * math.cos(theta) + + if str(x_units).lower().startswith("km"): + return x_meters / 1000.0, y_meters / 1000.0 + return x_meters, y_meters + + ## These functions are meant to be used with netcdf4 datasets @@ -168,7 +225,7 @@ def mask_and_clean_dataset(*args): return data_array -def find_longitude_index(longitude, lon_list): +def find_longitude_index(longitude, lon_list): # pylint: disable=too-many-statements """Finds the index of the given longitude in a list of longitudes. Parameters @@ -188,30 +245,48 @@ def find_longitude_index(longitude, lon_list): ValueError If the longitude is not within the range covered by the list. """ - # Determine if file uses -180 to 180 or 0 to 360 - if lon_list[0] < 0 or lon_list[-1] < 0: - # Convert input to -180 - 180 - lon = longitude if longitude < 180 else -180 + longitude % 180 - else: - # Convert input to 0 - 360 - lon = longitude % 360 - # Check if reversed or sorted - if lon_list[0] < lon_list[-1]: - # Deal with sorted lon_list - lon_index = bisect.bisect(lon_list, lon) + + def _coord_value(source, index): + return float(source[index]) + + lon_len = len(lon_list) + lon_start = _coord_value(lon_list, 0) + lon_end = _coord_value(lon_list, lon_len - 1) + + # Determine if file uses geographic longitudes in [-180, 180] or [0, 360]. + # Do not remap projected x coordinates. + is_geographic_longitude = abs(lon_start) <= 360 and abs(lon_end) <= 360 + if is_geographic_longitude: + if lon_start < 0 or lon_end < 0: + lon = longitude if longitude < 180 else -180 + longitude % 180 + else: + lon = longitude % 360 else: - # Deal with reversed lon_list - lon_list.reverse() - lon_index = len(lon_list) - bisect.bisect_left(lon_list, lon) - lon_list.reverse() + lon = longitude + + is_ascending = lon_start < lon_end + + # Binary search to find the insertion index such that index-1 and index + # bracket the requested longitude. + low = 0 + high = lon_len + while low < high: + mid = (low + high) // 2 + mid_value = _coord_value(lon_list, mid) + if (mid_value < lon) if is_ascending else (mid_value > lon): + low = mid + 1 + else: + high = mid + lon_index = low + # Take care of longitude value equal to maximum longitude in the grid - if lon_index == len(lon_list) and lon_list[lon_index - 1] == lon: - lon_index = lon_index - 1 + if lon_index == lon_len and _coord_value(lon_list, lon_index - 1) == lon: + lon_index -= 1 # Check if longitude value is inside the grid - if lon_index == 0 or lon_index == len(lon_list): + if lon_index in (0, lon_len): raise ValueError( f"Longitude {lon} not inside region covered by file, which is " - f"from {lon_list[0]} to {lon_list[-1]}." + f"from {lon_start} to {lon_end}." ) return lon, lon_index @@ -237,28 +312,39 @@ def find_latitude_index(latitude, lat_list): ValueError If the latitude is not within the range covered by the list. """ - # Check if reversed or sorted - if lat_list[0] < lat_list[-1]: - # Deal with sorted lat_list - lat_index = bisect.bisect(lat_list, latitude) - else: - # Deal with reversed lat_list - lat_list.reverse() - lat_index = len(lat_list) - bisect.bisect_left(lat_list, latitude) - lat_list.reverse() - # Take care of latitude value equal to maximum longitude in the grid - if lat_index == len(lat_list) and lat_list[lat_index - 1] == latitude: - lat_index = lat_index - 1 + + def _coord_value(source, index): + return float(source[index]) + + lat_len = len(lat_list) + lat_start = _coord_value(lat_list, 0) + lat_end = _coord_value(lat_list, lat_len - 1) + is_ascending = lat_start < lat_end + + low = 0 + high = lat_len + while low < high: + mid = (low + high) // 2 + mid_value = _coord_value(lat_list, mid) + if (mid_value < latitude) if is_ascending else (mid_value > latitude): + low = mid + 1 + else: + high = mid + lat_index = low + + # Take care of latitude value equal to maximum latitude in the grid + if lat_index == lat_len and _coord_value(lat_list, lat_index - 1) == latitude: + lat_index -= 1 # Check if latitude value is inside the grid - if lat_index == 0 or lat_index == len(lat_list): + if lat_index in (0, lat_len): raise ValueError( f"Latitude {latitude} not inside region covered by file, " - f"which is from {lat_list[0]} to {lat_list[-1]}." + f"which is from {lat_start} to {lat_end}." ) return latitude, lat_index -def find_time_index(datetime_date, time_array): +def find_time_index(datetime_date, time_array): # pylint: disable=too-many-statements """Finds the index of the given datetime in a netCDF4 time array. Parameters @@ -280,26 +366,58 @@ def find_time_index(datetime_date, time_array): ValueError If the exact datetime is not available and the nearest datetime is used instead. """ - time_index = netCDF4.date2index( - datetime_date, time_array, calendar="gregorian", select="nearest" - ) - # Convert times do dates and numbers - input_time_num = netCDF4.date2num( - datetime_date, time_array.units, calendar="gregorian" - ) - file_time_num = time_array[time_index] - file_time_date = netCDF4.num2date( - time_array[time_index], time_array.units, calendar="gregorian" - ) + time_len = len(time_array) + time_units = time_array.units + input_time_num = netCDF4.date2num(datetime_date, time_units, calendar="gregorian") + + first_time_num = float(time_array[0]) + last_time_num = float(time_array[time_len - 1]) + is_ascending = first_time_num <= last_time_num + + # Binary search nearest index using scalar probing only. + low = 0 + high = time_len + while low < high: + mid = (low + high) // 2 + mid_time_num = float(time_array[mid]) + if ( + (mid_time_num < input_time_num) + if is_ascending + else (mid_time_num > input_time_num) + ): + low = mid + 1 + else: + high = mid + + right_index = min(max(low, 0), time_len - 1) + left_index = min(max(right_index - 1, 0), time_len - 1) + + right_time_num = float(time_array[right_index]) + left_time_num = float(time_array[left_index]) + if abs(input_time_num - left_time_num) <= abs(right_time_num - input_time_num): + time_index = left_index + file_time_num = left_time_num + else: + time_index = right_index + file_time_num = right_time_num + + file_time_date = netCDF4.num2date(file_time_num, time_units, calendar="gregorian") + # Check if time is inside range supplied by file - if time_index == 0 and input_time_num < file_time_num: + if time_index == 0 and ( + (is_ascending and input_time_num < file_time_num) + or (not is_ascending and input_time_num > file_time_num) + ): raise ValueError( f"The chosen launch time '{datetime_date.strftime('%Y-%m-%d-%H:')} UTC' is" " not available in the provided file. Please choose a time within the range" " of the file, which starts at " f"'{file_time_date.strftime('%Y-%m-%d-%H')} UTC'." ) - elif time_index == len(time_array) - 1 and input_time_num > file_time_num: + elif time_index == time_len - 1 and ( + (is_ascending and input_time_num > file_time_num) + or (not is_ascending and input_time_num < file_time_num) + ): raise ValueError( "Chosen launch time is not available in the provided file, " f"which ends at {file_time_date}." diff --git a/rocketpy/environment/weather_model_mapping.py b/rocketpy/environment/weather_model_mapping.py index 75089f577..c490fad9d 100644 --- a/rocketpy/environment/weather_model_mapping.py +++ b/rocketpy/environment/weather_model_mapping.py @@ -1,9 +1,42 @@ class WeatherModelMapping: - """Class to map the weather model variables to the variables used in the - Environment class. + """Map provider-specific variable names to RocketPy weather fields. + + RocketPy reads forecast/reanalysis/ensemble datasets using canonical keys + such as ``time``, ``latitude``, ``longitude``, ``level``, ``temperature``, + ``geopotential_height``, ``geopotential``, ``u_wind`` and ``v_wind``. + Each dictionary in this class maps those canonical keys to the actual + variable names in a specific data provider format. + + Mapping families + ---------------- + - Base names (for example ``GFS``, ``NAM``, ``RAP``) represent the current + default mappings used by the latest-model shortcuts and THREDDS-style + datasets. + - ``*_LEGACY`` names represent older NOMADS-style variable naming + conventions (for example ``lev``, ``tmpprs``, ``ugrdprs`` and + ``vgrdprs``) and are intended for archived or previously downloaded files. + + Notes + ----- + - Mappings can also include optional keys such as ``projection`` for + projected grids and ``ensemble`` for member dimensions. + - The :meth:`get` method is case-insensitive, so ``"gfs_legacy"`` and + ``"GFS_LEGACY"`` are equivalent. """ GFS = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "isobaric", + "temperature": "Temperature_isobaric", + "surface_geopotential_height": "Geopotential_height_surface", + "geopotential_height": "Geopotential_height_isobaric", + "geopotential": None, + "u_wind": "u-component_of_wind_isobaric", + "v_wind": "v-component_of_wind_isobaric", + } + GFS_LEGACY = { "time": "time", "latitude": "lat", "longitude": "lon", @@ -16,6 +49,19 @@ class WeatherModelMapping: "v_wind": "vgrdprs", } NAM = { + "time": "time", + "latitude": "y", + "longitude": "x", + "projection": "LambertConformal_Projection", + "level": "isobaric", + "temperature": "Temperature_isobaric", + "surface_geopotential_height": None, + "geopotential_height": "Geopotential_height_isobaric", + "geopotential": None, + "u_wind": "u-component_of_wind_isobaric", + "v_wind": "v-component_of_wind_isobaric", + } + NAM_LEGACY = { "time": "time", "latitude": "lat", "longitude": "lon", @@ -54,6 +100,18 @@ class WeatherModelMapping: "v_wind": "v", } NOAA = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "isobaric", + "temperature": "Temperature_isobaric", + "surface_geopotential_height": "Geopotential_height_surface", + "geopotential_height": "Geopotential_height_isobaric", + "geopotential": None, + "u_wind": "u-component_of_wind_isobaric", + "v_wind": "v-component_of_wind_isobaric", + } + NOAA_LEGACY = { "time": "time", "latitude": "lat", "longitude": "lon", @@ -66,6 +124,19 @@ class WeatherModelMapping: "v_wind": "vgrdprs", } RAP = { + "time": "time", + "latitude": "y", + "longitude": "x", + "projection": "LambertConformal_Projection", + "level": "isobaric", + "temperature": "Temperature_isobaric", + "surface_geopotential_height": None, + "geopotential_height": "Geopotential_height_isobaric", + "geopotential": None, + "u_wind": "u-component_of_wind_isobaric", + "v_wind": "v-component_of_wind_isobaric", + } + RAP_LEGACY = { "time": "time", "latitude": "lat", "longitude": "lon", @@ -90,6 +161,19 @@ class WeatherModelMapping: "u_wind": "ugrdprs", "v_wind": "vgrdprs", } + CMC_LEGACY = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "lev", + "ensemble": "ens", + "temperature": "tmpprs", + "surface_geopotential_height": None, + "geopotential_height": "hgtprs", + "geopotential": None, + "u_wind": "ugrdprs", + "v_wind": "vgrdprs", + } GEFS = { "time": "time", "latitude": "lat", @@ -103,6 +187,19 @@ class WeatherModelMapping: "u_wind": "ugrdprs", "v_wind": "vgrdprs", } + GEFS_LEGACY = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "lev", + "ensemble": "ens", + "temperature": "tmpprs", + "surface_geopotential_height": None, + "geopotential_height": "hgtprs", + "geopotential": None, + "u_wind": "ugrdprs", + "v_wind": "vgrdprs", + } HIRESW = { "time": "time", "latitude": "lat", @@ -114,6 +211,17 @@ class WeatherModelMapping: "u_wind": "ugrdprs", "v_wind": "vgrdprs", } + HIRESW_LEGACY = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "lev", + "temperature": "tmpprs", + "surface_geopotential_height": "hgtsfc", + "geopotential_height": "hgtprs", + "u_wind": "ugrdprs", + "v_wind": "vgrdprs", + } MERRA2 = { "time": "time", "latitude": "lat", @@ -127,29 +235,78 @@ class WeatherModelMapping: "u_wind": "U", "v_wind": "V", } + MERRA2_LEGACY = { + "time": "time", + "latitude": "lat", + "longitude": "lon", + "level": "lev", + "temperature": "T", + "surface_geopotential_height": None, + "surface_geopotential": "PHIS", + "geopotential_height": "H", + "geopotential": None, + "u_wind": "U", + "v_wind": "V", + } def __init__(self): - """Initialize the class, creates a dictionary with all the weather models - available and their respective dictionaries with the variables.""" + """Build the lookup table with default and legacy mapping aliases.""" self.all_dictionaries = { "GFS": self.GFS, + "GFS_LEGACY": self.GFS_LEGACY, "NAM": self.NAM, + "NAM_LEGACY": self.NAM_LEGACY, "ECMWF_v0": self.ECMWF_v0, "ECMWF": self.ECMWF, "NOAA": self.NOAA, + "NOAA_LEGACY": self.NOAA_LEGACY, "RAP": self.RAP, + "RAP_LEGACY": self.RAP_LEGACY, "CMC": self.CMC, + "CMC_LEGACY": self.CMC_LEGACY, "GEFS": self.GEFS, + "GEFS_LEGACY": self.GEFS_LEGACY, "HIRESW": self.HIRESW, + "HIRESW_LEGACY": self.HIRESW_LEGACY, "MERRA2": self.MERRA2, + "MERRA2_LEGACY": self.MERRA2_LEGACY, } def get(self, model): + """Return a mapping dictionary by model alias (case-insensitive). + + Parameters + ---------- + model : str + Mapping alias name, such as ``"GFS"`` or ``"GFS_LEGACY"``. + + Returns + ------- + dict + Dictionary mapping RocketPy canonical weather keys to dataset + variable names. + + Raises + ------ + KeyError + If ``model`` is unknown or not a string. + """ + if not isinstance(model, str): + raise KeyError( + f"Model {model} not found in the WeatherModelMapping. " + f"The available models are: {self.all_dictionaries.keys()}" + ) + try: return self.all_dictionaries[model] - except KeyError as e: + except KeyError as exc: + model_casefold = model.casefold() + for key, value in self.all_dictionaries.items(): + if key.casefold() == model_casefold: + return value + raise KeyError( f"Model {model} not found in the WeatherModelMapping. " f"The available models are: {self.all_dictionaries.keys()}" - ) from e + ) from exc diff --git a/tests/integration/environment/test_environment.py b/tests/integration/environment/test_environment.py index 3bdd5209a..37078b8fd 100644 --- a/tests/integration/environment/test_environment.py +++ b/tests/integration/environment/test_environment.py @@ -178,8 +178,11 @@ def test_gefs_atmosphere(mock_show, example_spaceport_env): # pylint: disable=u example_spaceport_env : rocketpy.Environment Example environment object to be tested. """ - example_spaceport_env.set_atmospheric_model(type="Ensemble", file="GEFS") - assert example_spaceport_env.all_info() is None + with pytest.raises( + ValueError, + match="GEFS latest-model shortcut is currently unavailable", + ): + example_spaceport_env.set_atmospheric_model(type="Ensemble", file="GEFS") @pytest.mark.slow @@ -234,13 +237,15 @@ def test_hiresw_ensemble_atmosphere(mock_show, example_spaceport_env): # pylint example_spaceport_env.set_date(date_info) - example_spaceport_env.set_atmospheric_model( - type="Forecast", - file="HIRESW", - dictionary="HIRESW", - ) - - assert example_spaceport_env.all_info() is None + with pytest.raises( + ValueError, + match="HIRESW latest-model shortcut is currently unavailable", + ): + example_spaceport_env.set_atmospheric_model( + type="Forecast", + file="HIRESW", + dictionary="HIRESW", + ) @pytest.mark.skip(reason="CMC model is currently not working") diff --git a/tests/unit/environment/test_environment.py b/tests/unit/environment/test_environment.py index 6ad3e51db..6d04c089f 100644 --- a/tests/unit/environment/test_environment.py +++ b/tests/unit/environment/test_environment.py @@ -7,6 +7,7 @@ from rocketpy import Environment from rocketpy.environment.tools import geodesic_to_utm, utm_to_geodesic +from rocketpy.environment.weather_model_mapping import WeatherModelMapping @pytest.mark.parametrize( @@ -243,3 +244,70 @@ def test_environment_export_environment_exports_valid_environment_json( ) os.remove("environment.json") + + +class _DummyDataset: + """Small test double that mimics a netCDF dataset variables mapping.""" + + def __init__(self, variable_names): + self.variables = {name: object() for name in variable_names} + + +def test_resolve_dictionary_keeps_compatible_mapping(example_plain_env): + """Keep the user-selected mapping when it already matches dataset keys.""" + gfs_mapping = example_plain_env._Environment__weather_model_map.get("GFS") + dataset = _DummyDataset( + [ + "time", + "lat", + "lon", + "isobaric", + "Temperature_isobaric", + "Geopotential_height_isobaric", + "u-component_of_wind_isobaric", + "v-component_of_wind_isobaric", + ] + ) + + resolved = example_plain_env._Environment__resolve_dictionary_for_dataset( + gfs_mapping, dataset + ) + + assert resolved is gfs_mapping + + +def test_resolve_dictionary_falls_back_to_legacy_mapping(example_plain_env): + """Fallback to a compatible built-in mapping for legacy NOMADS-style files.""" + thredds_gfs_mapping = example_plain_env._Environment__weather_model_map.get("GFS") + dataset = _DummyDataset( + [ + "time", + "lat", + "lon", + "lev", + "tmpprs", + "hgtprs", + "ugrdprs", + "vgrdprs", + ] + ) + + resolved = example_plain_env._Environment__resolve_dictionary_for_dataset( + thredds_gfs_mapping, dataset + ) + + # Explicit legacy mappings should be preferred over unrelated model mappings. + assert resolved == example_plain_env._Environment__weather_model_map.get( + "GFS_LEGACY" + ) + assert resolved["level"] == "lev" + assert resolved["temperature"] == "tmpprs" + assert resolved["geopotential_height"] == "hgtprs" + + +def test_weather_model_mapping_exposes_legacy_aliases(): + """Legacy mapping names should be available and case-insensitive.""" + mapping = WeatherModelMapping() + + assert mapping.get("GFS_LEGACY")["temperature"] == "tmpprs" + assert mapping.get("gfs_legacy")["temperature"] == "tmpprs" From 3bca26ce791b651125bac3d74b345b3a42f3b07e Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Sat, 14 Mar 2026 13:15:24 -0300 Subject: [PATCH 25/44] BUG: Fix hard-coded radius value for parachute added mass calculation (#889) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix hard-coded radius value for parachute added mass calculation Calculate radius from cd_s using a typical hemispherical parachute drag coefficient (1.4) when radius is not explicitly provided. This fixes drift distance calculations for smaller parachutes like drogues. Formula: R = sqrt(cd_s / (Cd * π)) Closes #860 Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Address code review: improve docstrings and add explicit None defaults Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Add CHANGELOG entry for PR #889 Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Update rocket.add_parachute to use radius=None for consistency Changed the default radius from 1.5 to None in the add_parachute method to match the Parachute class behavior. This ensures consistent automatic radius calculation from cd_s across both APIs. Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Refactor Parachute class to remove hard-coded radius value and introduce drag_coefficient parameter for radius estimation Fix hard-coded radius value for parachute added mass calculation Calculate radius from cd_s using a typical hemispherical parachute drag coefficient (1.4) when radius is not explicitly provided. This fixes drift distance calculations for smaller parachutes like drogues. Formula: R = sqrt(cd_s / (Cd * π)) Closes #860 Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Add CHANGELOG entry for PR #889 Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Refactor Parachute class to remove hard-coded radius value and introduce drag_coefficient parameter for radius estimation MNT: Extract noise initialization to fix pylint too-many-statements in Parachute.__init__ Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> * Refactor environment method access in controller test for clarity * fix pylint * fix comments * avoid breaking change with drag_coefficient * reafactors Parachute.__init__ method * fix tests --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Co-authored-by: Gui-FernandesBR --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8fe5bd03f..88f8d4943 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -73,7 +73,7 @@ Attention: The newest changes should be on top --> - BUG: Restore `Rocket.power_off_drag` and `Rocket.power_on_drag` as `Function` objects while preserving raw inputs in `power_off_drag_input` and `power_on_drag_input` [#941](https://github.com/RocketPy-Team/RocketPy/pull/941) - BUG: Add explicit timeouts to ThrustCurve API requests [#935](https://github.com/RocketPy-Team/RocketPy/pull/935) -- BUG: Migrate Forecasts to UCAR THREDDS [#943](https://github.com/RocketPy-Team/RocketPy/pull/943) +- BUG: Fix hard-coded radius value for parachute added mass calculation [#889](https://github.com/RocketPy-Team/RocketPy/pull/889) - BUG: Fix hard-coded radius value for parachute added mass calculation [#889](https://github.com/RocketPy-Team/RocketPy/pull/889) - DOC: Fix documentation build [#908](https://github.com/RocketPy-Team/RocketPy/pull/908) - BUG: energy_data plot not working for 3 dof sims [[#906](https://github.com/RocketPy-Team/RocketPy/issues/906)] From 7be7721cd67a1998f4464fc343b90b667fc876d8 Mon Sep 17 00:00:00 2001 From: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> Date: Thu, 19 Mar 2026 21:56:49 -0300 Subject: [PATCH 26/44] ENH: Add guidelines for simulation safety, Sphinx documentation, and pytest standards (GitHub Copilot) (#937) --- .github/agents/rocketpy-reviewer.agent.md | 62 ++++ .github/copilot-instructions.md | 301 +++++------------- .../simulation-safety.instructions.md | 41 +++ .../instructions/sphinx-docs.instructions.md | 32 ++ .../instructions/tests-python.instructions.md | 36 +++ 5 files changed, 251 insertions(+), 221 deletions(-) create mode 100644 .github/agents/rocketpy-reviewer.agent.md create mode 100644 .github/instructions/simulation-safety.instructions.md create mode 100644 .github/instructions/sphinx-docs.instructions.md create mode 100644 .github/instructions/tests-python.instructions.md diff --git a/.github/agents/rocketpy-reviewer.agent.md b/.github/agents/rocketpy-reviewer.agent.md new file mode 100644 index 000000000..be1b64b13 --- /dev/null +++ b/.github/agents/rocketpy-reviewer.agent.md @@ -0,0 +1,62 @@ +--- +description: "Physics-safe RocketPy code review agent. Use for pull request review, unit consistency checks, coordinate-frame validation, cached-property risk detection, and regression-focused test-gap analysis." +name: "RocketPy Reviewer" +tools: [read, search, execute] +argument-hint: "Review these changes for physics correctness and regression risk: " +user-invocable: true +--- +You are a RocketPy-focused reviewer for physics safety and regression risk. + +## Goals + +- Detect behavioral regressions and numerical/physics risks before merge. +- Validate unit consistency and coordinate/reference-frame correctness. +- Identify stale-cache risks when `@cached_property` interacts with mutable state. +- Check test coverage quality for changed behavior. +- Verify alignment with RocketPy workflow and contributor conventions. + +## Review Priorities + +1. Correctness and safety issues (highest severity). +2. Behavioral regressions and API compatibility. +3. Numerical stability and tolerance correctness. +4. Missing tests or weak assertions. +5. Documentation mismatches affecting users. +6. Workflow violations (test placement, branch/PR conventions, or missing validation evidence). + +## RocketPy-Specific Checks + +- SI units are explicit and consistent. +- Orientation conventions are unambiguous (`tail_to_nose`, `nozzle_to_combustion_chamber`, etc.). +- New/changed simulation logic does not silently invalidate cached values. +- Floating-point assertions use `pytest.approx` where needed. +- New fixtures are wired through `tests/conftest.py` when applicable. +- Test type is appropriate for scope (`unit`, `integration`, `acceptance`) and `all_info()`-style tests + are not misclassified. +- New behavior includes at least one regression-oriented test and relevant edge-case checks. +- For docs-affecting changes, references and paths remain valid and build warnings are addressed. +- Tooling recommendations match current repository setup (prefer Makefile plus `pyproject.toml` + settings when docs are outdated). + +## Validation Expectations + +- Prefer focused test runs first, then broader relevant suites. +- Recommend `make format` and `make lint` when style/lint risks are present. +- Recommend `make build-docs` when `.rst` files or API docs are changed. + +## Output Format + +Provide findings first, ordered by severity. +For each finding include: +- Severity: Critical, High, Medium, or Low +- Location: file path and line +- Why it matters: behavioral or physics risk +- Suggested fix: concrete, minimal change + +After findings, include: +- Open questions or assumptions +- Residual risks or testing gaps +- Brief change summary +- Suggested validation commands (only when useful) + +If no findings are identified, state that explicitly and still report residual risks/testing gaps. diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index f5366cb3b..382aa15e0 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -1,221 +1,80 @@ -# GitHub Copilot Instructions for RocketPy - -This file provides instructions for GitHub Copilot when working on the RocketPy codebase. -These guidelines help ensure consistency with the project's coding standards and development practices. - -## Project Overview - -RocketPy is a Python library for 6-DOF rocket trajectory simulation. -It's designed for high-power rocketry applications with focus on accuracy, performance, and ease of use. - -## Coding Standards - -### Naming Conventions -- **Use `snake_case` for all new code** - variables, functions, methods, and modules -- **Use descriptive names** - prefer `angle_of_attack` over `a` or `alpha` -- **Class names use PascalCase** - e.g., `SolidMotor`, `Environment`, `Flight` -- **Constants use UPPER_SNAKE_CASE** - e.g., `DEFAULT_GRAVITY`, `EARTH_RADIUS` - -### Code Style -- Follow **PEP 8** guidelines -- Line length: **88 characters** (Black's default) -- Organize imports with **isort** -- Our official formatter is the **ruff frmat** - -### Documentation -- **All public classes, methods, and functions must have docstrings** -- Use **NumPy style docstrings** -- Include **Parameters**, **Returns**, and **Examples** sections -- Document **units** for physical quantities (e.g., "in meters", "in radians") - -### Testing -- Write **unit tests** for all new features using pytest -- Follow **AAA pattern** (Arrange, Act, Assert) -- Use descriptive test names following: `test_methodname_expectedbehaviour` -- Include test docstrings explaining expected behavior -- Use **parameterization** for testing multiple scenarios -- Create pytest fixtures to avoid code repetition - -## Domain-Specific Guidelines - -### Physical Units and Conventions -- **SI units by default** - meters, kilograms, seconds, radians -- **Document coordinate systems** clearly (e.g., "tail_to_nose", "nozzle_to_combustion_chamber") -- **Position parameters** are critical - always document reference points -- Use **descriptive variable names** for physical quantities - -### Rocket Components -- **Motors**: SolidMotor, HybridMotor and LiquidMotor classes are children classes of the Motor class -- **Aerodynamic Surfaces**: They have Drag curves and lift coefficients -- **Parachutes**: Trigger functions, deployment conditions -- **Environment**: Atmospheric models, weather data, wind profiles - -### Mathematical Operations -- Use **numpy arrays** for vectorized operations (this improves performance) -- Prefer **scipy functions** for numerical integration and optimization -- **Handle edge cases** in calculations (division by zero, sqrt of negative numbers) -- **Validate input ranges** for physical parameters -- Monte Carlo simulations: sample from `numpy.random` for random number generation and creates several iterations to assess uncertainty in simulations. - -## File Structure and Organization - -### Source Code Organization - -Reminds that `rocketpy` is a Python package served as a library, and its source code is organized into several modules to facilitate maintainability and clarity. The following structure is recommended: - -``` -rocketpy/ -├── core/ # Core simulation classes -├── motors/ # Motor implementations -├── environment/ # Atmospheric and environmental models -├── plots/ # Plotting and visualization -├── tools/ # Utility functions -└── mathutils/ # Mathematical utilities -``` - -Please refer to popular Python packages like `scipy`, `numpy`, and `matplotlib` for inspiration on module organization. - -### Test Organization -``` -tests/ -├── unit/ # Unit tests -├── integration/ # Integration tests -├── acceptance/ # Acceptance tests -└── fixtures/ # Test fixtures organized by component -``` - -### Documentation Structure -``` -docs/ -├── user/ # User guides and tutorials -├── development/ # Development documentation -├── reference/ # API reference -├── examples/ # Flight examples and notebooks -└── technical/ # Technical documentation -``` - -## Common Patterns and Practices - -### Error Handling -- Use **descriptive error messages** with context -- **Validate inputs** at class initialization and method entry -- Raise **appropriate exception types** (ValueError, TypeError, etc.) -- Include **suggestions for fixes** in error messages - -### Performance Considerations -- Use **vectorized operations** where possible -- **Cache expensive computations** when appropriate (we frequently use `cached_property`) -- Keep in mind that RocketPy must be fast! - -### Backward Compatibility -- **Avoid breaking changes** in public APIs -- Use **deprecation warnings** before removing features -- **Document code changes** in docstrings and CHANGELOG - -## AI Assistant Guidelines - -### Code Generation -- **Always include docstrings** for new functions and classes -- **Follow existing patterns** in the codebase -- **Consider edge cases** and error conditions - -### Code Review and Suggestions -- **Check for consistency** with existing code style -- **Verify physical units** and coordinate systems -- **Ensure proper error handling** and input validation -- **Suggest performance improvements** when applicable -- **Recommend additional tests** for new functionality - -### Documentation Assistance -- **Use NumPy docstring format** consistently -- **Include practical examples** in docstrings -- **Document physical meanings** of parameters -- **Cross-reference related functions** and classes - -## Testing Guidelines - -### Unit Tests -- **Test individual methods** in isolation -- **Use fixtures** from the appropriate test fixture modules -- **Mock external dependencies** when necessary -- **Test both happy path and error conditions** - -### Integration Tests -- **Test interactions** between components -- **Verify end-to-end workflows** (Environment → Motor → Rocket → Flight) - -### Test Data -- **Use realistic parameters** for rocket simulations -- **Include edge cases** (very small/large rockets, extreme conditions) -- **Test with different coordinate systems** and orientations - -## Project-Specific Considerations - -### User Experience -- **Provide helpful error messages** with context and suggestions -- **Include examples** in docstrings and documentation -- **Support common use cases** with reasonable defaults - -## Examples of Good Practices - -### Function Definition -```python -def calculate_drag_force( - velocity, - air_density, - drag_coefficient, - reference_area -): - """Calculate drag force using the standard drag equation. - - Parameters - ---------- - velocity : float - Velocity magnitude in m/s. - air_density : float - Air density in kg/m³. - drag_coefficient : float - Dimensionless drag coefficient. - reference_area : float - Reference area in m². - - Returns - ------- - float - Drag force in N. - - Examples - -------- - >>> drag_force = calculate_drag_force(100, 1.225, 0.5, 0.01) - >>> print(f"Drag force: {drag_force:.2f} N") - """ - if velocity < 0: - raise ValueError("Velocity must be non-negative") - if air_density <= 0: - raise ValueError("Air density must be positive") - if reference_area <= 0: - raise ValueError("Reference area must be positive") - - return 0.5 * air_density * velocity**2 * drag_coefficient * reference_area -``` - -### Test Example -```python -def test_calculate_drag_force_returns_correct_value(): - """Test drag force calculation with known inputs.""" - # Arrange - velocity = 100.0 # m/s - air_density = 1.225 # kg/m³ - drag_coefficient = 0.5 - reference_area = 0.01 # m² - expected_force = 30.625 # N - - # Act - result = calculate_drag_force(velocity, air_density, drag_coefficient, reference_area) - - # Assert - assert abs(result - expected_force) < 1e-6 -``` - - -Remember: RocketPy prioritizes accuracy, performance, and usability. Always consider the physical meaning of calculations and provide clear, well-documented interfaces for users. +# RocketPy Workspace Instructions + +## Code Style +- Use snake_case for variables, functions, methods, and modules. Use descriptive names. +- Use PascalCase for classes and UPPER_SNAKE_CASE for constants. +- Keep lines at 88 characters and follow PEP 8 unless existing code in the target file differs. +- Run Ruff as the source of truth for formatting/import organization: + - `make format` + - `make lint` +- Use NumPy-style docstrings for public classes, methods, and functions, including units. +- In case of tooling drift between docs and config, prefer current repository tooling in `Makefile` + and `pyproject.toml`. + +## Architecture +- RocketPy is a modular Python library; keep feature logic in the correct package boundary: + - `rocketpy/simulation`: flight simulation and Monte Carlo orchestration. + - `rocketpy/rocket`, `rocketpy/motors`, `rocketpy/environment`: domain models. + - `rocketpy/mathutils`: numerical primitives and interpolation utilities. + - `rocketpy/plots`, `rocketpy/prints`: output and visualization layers. +- Prefer extending existing classes/patterns over introducing new top-level abstractions. +- Preserve public API stability in `rocketpy/__init__.py` exports. + +## Build and Test +- Use Makefile targets for OS-agnostic workflows: + - `make install` + - `make pytest` + - `make pytest-slow` + - `make coverage` + - `make coverage-report` + - `make build-docs` +- Before finishing code changes, run focused tests first, then broader relevant suites. +- When running Python directly in this workspace, prefer `.venv/Scripts/python.exe`. +- Slow tests are explicitly marked with `@pytest.mark.slow` and are run with `make pytest-slow`. +- For docs changes, check `make build-docs` output and resolve warnings/errors when practical. + +## Development Workflow +- Target pull requests to `develop` by default; `master` is the stable branch. +- Use branch names in `type/description` format, such as: + - `bug/` + - `doc/` + - `enh/` + - `mnt/` + - `tst/` +- Prefer rebasing feature branches on top of `develop` to keep history linear. +- Keep commit and PR titles explicit and prefixed with project acronyms when possible: + - `BUG`, `DOC`, `ENH`, `MNT`, `TST`, `BLD`, `REL`, `REV`, `STY`, `DEV`. + +## Conventions +- SI units are the default. Document units and coordinate-system references explicitly. +- Position/reference-frame arguments are critical in this codebase. Be explicit about orientation + (for example, `tail_to_nose`, `nozzle_to_combustion_chamber`). +- Include unit tests for new behavior. Follow AAA structure and clear test names. +- Use fixtures from `tests/fixtures`; if adding a new fixture module, update `tests/conftest.py`. +- Use `pytest.approx` for floating-point checks where appropriate. +- Use `@cached_property` for expensive computations when helpful, and be careful with stale-cache + behavior when underlying mutable state changes. +- Keep behavior backward compatible across the public API exported via `rocketpy/__init__.py`. +- Prefer extending existing module patterns over creating new top-level package structure. + +## Testing Taxonomy +- Unit tests are mandatory for new behavior. +- Unit tests in RocketPy can be sociable (real collaborators allowed) but should still be fast and + method-focused. +- Treat tests as integration tests when they are strongly I/O-oriented or broad across many methods, + including `all_info()` convention cases. +- Acceptance tests represent realistic user/flight scenarios and may compare simulation thresholds to + known flight data. + +## Documentation Links +- Contributor workflow and setup: `docs/development/setting_up.rst` +- Style and naming details: `docs/development/style_guide.rst` +- Testing philosophy and structure: `docs/development/testing.rst` +- API reference conventions: `docs/reference/index.rst` +- Domain/physics background: `docs/technical/index.rst` + +## Scoped Customizations +- Simulation-specific rules: `.github/instructions/simulation-safety.instructions.md` +- Test-authoring rules: `.github/instructions/tests-python.instructions.md` +- RST/Sphinx documentation rules: `.github/instructions/sphinx-docs.instructions.md` +- Specialized review persona: `.github/agents/rocketpy-reviewer.agent.md` diff --git a/.github/instructions/simulation-safety.instructions.md b/.github/instructions/simulation-safety.instructions.md new file mode 100644 index 000000000..cc2af5d27 --- /dev/null +++ b/.github/instructions/simulation-safety.instructions.md @@ -0,0 +1,41 @@ +--- +description: "Use when editing rocketpy/simulation code, including Flight state updates, Monte Carlo orchestration, post-processing, or cached computations. Covers simulation state safety, unit/reference-frame clarity, and regression checks." +name: "Simulation Safety" +applyTo: "rocketpy/simulation/**/*.py" +--- +# Simulation Safety Guidelines + +- Keep simulation logic inside `rocketpy/simulation` and avoid leaking domain behavior that belongs in + `rocketpy/rocket`, `rocketpy/motors`, or `rocketpy/environment`. +- Preserve public API behavior and exported names used by `rocketpy/__init__.py`. +- Prefer extending existing simulation components before creating new abstractions: + - `flight.py`: simulation state, integration flow, and post-processing. + - `monte_carlo.py`: orchestration and statistical execution workflows. + - `flight_data_exporter.py` and `flight_data_importer.py`: persistence and interchange. + - `flight_comparator.py`: comparative analysis outputs. +- Be explicit with physical units and reference frames in new parameters, attributes, and docstrings. +- For position/orientation-sensitive behavior, use explicit conventions (for example + `tail_to_nose`, `nozzle_to_combustion_chamber`) and avoid implicit assumptions. +- Treat state mutation carefully when cached values exist. +- If changes can invalidate `@cached_property` values, either avoid post-computation mutation or + explicitly invalidate affected caches in a controlled, documented way. +- Keep numerical behavior deterministic unless stochastic behavior is intentional and documented. +- For Monte Carlo and stochastic code paths, make randomness controllable and reproducible when tests + rely on it. +- Prefer vectorized NumPy operations for hot paths and avoid introducing Python loops in + performance-critical sections without justification. +- Guard against numerical edge cases (zero/near-zero denominators, interpolation limits, and boundary + conditions). +- Do not change default numerical tolerances or integration behavior without documenting motivation and + validating regression impact. +- Add focused regression tests for changed behavior, including edge cases and orientation-dependent + behavior. +- For floating-point expectations, use `pytest.approx` with meaningful tolerances. +- Run focused tests first, then broader relevant tests (`make pytest` and `make pytest-slow` when + applicable). + +See: +- `docs/development/testing.rst` +- `docs/development/style_guide.rst` +- `docs/development/setting_up.rst` +- `docs/technical/index.rst` diff --git a/.github/instructions/sphinx-docs.instructions.md b/.github/instructions/sphinx-docs.instructions.md new file mode 100644 index 000000000..8c24cac53 --- /dev/null +++ b/.github/instructions/sphinx-docs.instructions.md @@ -0,0 +1,32 @@ +--- +description: "Use when writing or editing docs/**/*.rst. Covers Sphinx/reStructuredText conventions, cross-references, toctree hygiene, and RocketPy unit/reference-frame documentation requirements." +name: "Sphinx RST Conventions" +applyTo: "docs/**/*.rst" +--- +# Sphinx and RST Guidelines + +- Follow existing heading hierarchy and style in the target document. +- Prefer linking to existing documentation pages instead of duplicating content. +- Use Sphinx cross-references where appropriate (`:class:`, `:func:`, `:mod:`, `:doc:`, `:ref:`). +- Keep API names and module paths consistent with current code exports. +- Document physical units and coordinate/reference-frame conventions explicitly. +- Include concise, practical examples when introducing new user-facing behavior. +- Keep prose clear and technical; avoid marketing language in development/reference docs. +- When adding a new page, update the relevant `toctree` so it appears in navigation. +- Use RocketPy docs build workflow: + - `make build-docs` from repository root for normal validation. + - If stale artifacts appear, clean docs build outputs via `cd docs && make clean`, then rebuild. +- Treat new Sphinx warnings/errors as issues to fix or explicitly call out in review notes. +- Keep `docs/index.rst` section structure coherent with user, development, reference, technical, and + examples navigation. +- Do not edit Sphinx-generated scaffolding files unless explicitly requested: + - `docs/Makefile` + - `docs/make.bat` +- For API docs, ensure references remain aligned with exported/public objects and current module paths. + +See: +- `docs/index.rst` +- `docs/development/build_docs.rst` +- `docs/development/style_guide.rst` +- `docs/reference/index.rst` +- `docs/technical/index.rst` diff --git a/.github/instructions/tests-python.instructions.md b/.github/instructions/tests-python.instructions.md new file mode 100644 index 000000000..1e9626142 --- /dev/null +++ b/.github/instructions/tests-python.instructions.md @@ -0,0 +1,36 @@ +--- +description: "Use when creating or editing pytest files in tests/. Enforces AAA structure, naming conventions, fixture usage, parameterization, slow-test marking, and numerical assertion practices for RocketPy." +name: "RocketPy Pytest Standards" +applyTo: "tests/**/*.py" +--- +# RocketPy Test Authoring Guidelines + +- Unit tests are mandatory for new behavior. +- Follow AAA structure in each test: Arrange, Act, Assert. +- Use descriptive test names matching project convention: + - `test_methodname` + - `test_methodname_stateundertest` + - `test_methodname_expectedbehaviour` +- Include docstrings that clearly state expected behavior and context. +- Prefer parameterization for scenario matrices instead of duplicated tests. +- Classify tests correctly: + - `tests/unit`: fast, method-focused tests (sociable unit tests are acceptable in RocketPy). + - `tests/integration`: broad multi-method/component interactions and strongly I/O-oriented cases. + - `tests/acceptance`: realistic end-user/flight scenarios with threshold-based expectations. +- By RocketPy convention, tests centered on `all_info()` behavior are integration tests. +- Reuse fixtures from `tests/fixtures` whenever possible. +- Keep fixture organization aligned with existing categories under `tests/fixtures` + (environment, flight, motor, rockets, surfaces, units, etc.). +- If you add a new fixture module, update `tests/conftest.py` so fixtures are discoverable. +- Keep tests deterministic: set seeds when randomness is involved and avoid unstable external + dependencies unless integration behavior explicitly requires them. +- Use `pytest.approx` for floating-point comparisons with realistic tolerances. +- Mark expensive tests with `@pytest.mark.slow` and ensure they can run under the project slow-test + workflow. +- Include at least one negative or edge-case assertion for new behaviors. +- When adding a bug fix, include a regression test that fails before the fix and passes after it. + +See: +- `docs/development/testing.rst` +- `docs/development/style_guide.rst` +- `docs/development/setting_up.rst` From abc816c40303051e1a136c3331bbb798fc4a9f7a Mon Sep 17 00:00:00 2001 From: MateusStano Date: Sun, 8 Mar 2026 12:35:38 -0300 Subject: [PATCH 27/44] REL: bump version to 1.12 --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 88f8d4943..65e1208f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -46,6 +46,8 @@ Attention: The newest changes should be on top --> ### Added + +- ENH: Air brakes controller functions now support 8-parameter signature [#854](https://github.com/RocketPy-Team/RocketPy/pull/854) - TST: Add acceptance tests for 3DOF flight simulation based on Bella Lui rocket [#914] (https://github.com/RocketPy-Team/RocketPy/pull/914_ - ENH: Add background map auto download functionality to Monte Carlo plots [#896](https://github.com/RocketPy-Team/RocketPy/pull/896) - MNT: net thrust addition to 3 dof in flight class [#907] (https://github.com/RocketPy-Team/RocketPy/pull/907) From 6e77308ff3aa551155280989ca76ea2637563aa9 Mon Sep 17 00:00:00 2001 From: Khushal Kottaru Date: Wed, 25 Mar 2026 03:29:20 -0700 Subject: [PATCH 28/44] BUG: Add wraparound logic for wind direction in environment plots (#939) * chore: added personal toolkit files * update branch name in workflow * chore: update toolkit files * Fix: add wraparound logic for wind direction and related tests * style: fix ruff formatting * Remove unused import Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> * refactor: move repetitive logic into helper method * fix: update test logic in test_environment * add changelog entry --------- Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> --- CHANGELOG.md | 1 + rocketpy/plots/environment_plots.py | 42 +++++++++++++-- .../environment/test_environment.py | 53 +++++++++++++++++++ 3 files changed, 92 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 65e1208f0..13b20aa1d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -73,6 +73,7 @@ Attention: The newest changes should be on top --> ### Fixed +- BUG: Add wraparound logic for wind direction in environment plots [#939](https://github.com/RocketPy-Team/RocketPy/pull/939) - BUG: Restore `Rocket.power_off_drag` and `Rocket.power_on_drag` as `Function` objects while preserving raw inputs in `power_off_drag_input` and `power_on_drag_input` [#941](https://github.com/RocketPy-Team/RocketPy/pull/941) - BUG: Add explicit timeouts to ThrustCurve API requests [#935](https://github.com/RocketPy-Team/RocketPy/pull/935) - BUG: Fix hard-coded radius value for parachute added mass calculation [#889](https://github.com/RocketPy-Team/RocketPy/pull/889) diff --git a/rocketpy/plots/environment_plots.py b/rocketpy/plots/environment_plots.py index 4b8a91e15..f53cecc1b 100644 --- a/rocketpy/plots/environment_plots.py +++ b/rocketpy/plots/environment_plots.py @@ -33,6 +33,30 @@ def __init__(self, environment): self.grid = np.linspace(environment.elevation, environment.max_expected_height) self.environment = environment + def _break_direction_wraparound(self, directions, altitudes): + """Inserts NaN into direction and altitude arrays at 0°/360° wraparound + points so matplotlib does not draw a horizontal line across the plot. + + Parameters + ---------- + directions : numpy.ndarray + Wind direction values in degrees, dtype float. + altitudes : numpy.ndarray + Altitude values corresponding to each direction, dtype float. + + Returns + ------- + directions : numpy.ndarray + Direction array with NaN inserted at wraparound points. + altitudes : numpy.ndarray + Altitude array with NaN inserted at wraparound points. + """ + WRAP_THRESHOLD = 180 # degrees; half the full circle + wrap_indices = np.where(np.abs(np.diff(directions)) > WRAP_THRESHOLD)[0] + 1 + directions = np.insert(directions, wrap_indices, np.nan) + altitudes = np.insert(altitudes, wrap_indices, np.nan) + return directions, altitudes + def __wind(self, ax): """Adds wind speed and wind direction graphs to the same axis. @@ -55,9 +79,14 @@ def __wind(self, ax): ax.set_xlabel("Wind Speed (m/s)", color="#ff7f0e") ax.tick_params("x", colors="#ff7f0e") axup = ax.twiny() + directions = np.array( + [self.environment.wind_direction(i) for i in self.grid], dtype=float + ) + altitudes = np.array(self.grid, dtype=float) + directions, altitudes = self._break_direction_wraparound(directions, altitudes) axup.plot( - [self.environment.wind_direction(i) for i in self.grid], - self.grid, + directions, + altitudes, color="#1f77b4", label="Wind Direction", ) @@ -311,9 +340,14 @@ def ensemble_member_comparison(self, *, filename=None): ax8 = plt.subplot(324) for i in range(self.environment.num_ensemble_members): self.environment.select_ensemble_member(i) + dirs = np.array( + [self.environment.wind_direction(j) for j in self.grid], dtype=float + ) + alts = np.array(self.grid, dtype=float) + dirs, alts = self._break_direction_wraparound(dirs, alts) ax8.plot( - [self.environment.wind_direction(i) for i in self.grid], - self.grid, + dirs, + alts, label=i, ) ax8.set_ylabel("Height Above Sea Level (m)") diff --git a/tests/integration/environment/test_environment.py b/tests/integration/environment/test_environment.py index 37078b8fd..96ec5ecbc 100644 --- a/tests/integration/environment/test_environment.py +++ b/tests/integration/environment/test_environment.py @@ -92,6 +92,59 @@ def test_standard_atmosphere(mock_show, example_plain_env): # pylint: disable=u assert example_plain_env.prints.print_earth_details() is None +@patch("matplotlib.pyplot.show") +def test_wind_plots_wrapping_direction(mock_show, example_plain_env): # pylint: disable=unused-argument + """Tests that wind direction plots handle 360°→0° wraparound without + drawing a horizontal line across the graph. + + Parameters + ---------- + mock_show : mock + Mock object to replace matplotlib.pyplot.show() method. + example_plain_env : rocketpy.Environment + Example environment object to be tested. + """ + # Set a custom atmosphere where wind direction wraps from ~350° to ~10° + # across the altitude range by choosing wind_u and wind_v to create a + # direction near 350° at low altitude and ~10° at higher altitude. + # wind_direction = (180 + atan2(wind_u, wind_v)) % 360 + # For direction ~350°: need atan2(wind_u, wind_v) ≈ 170° → wind_u>0, wind_v<0 + # For direction ~10°: need atan2(wind_u, wind_v) ≈ -170° → wind_u<0, wind_v<0 + example_plain_env.set_atmospheric_model( + type="custom_atmosphere", + pressure=None, + temperature=300, + wind_u=[(0, 1), (5000, -1)], # changes sign across altitude + wind_v=[(0, -6), (5000, -6)], # stays negative → heading near 350°/10° + ) + # Verify that the wind direction actually wraps through 0°/360° in this + # atmosphere so the test exercises the wraparound code path. + low_dir = example_plain_env.wind_direction(0) + high_dir = example_plain_env.wind_direction(5000) + assert abs(low_dir - high_dir) > 180, ( + "Test setup error: wind direction should cross 0°/360° boundary" + ) + # Verify that the helper inserts NaN breaks into the direction and altitude + # arrays at the wraparound point, which is the core of the fix. + directions = np.array( + [example_plain_env.wind_direction(i) for i in example_plain_env.plots.grid], + dtype=float, + ) + altitudes = np.array(example_plain_env.plots.grid, dtype=float) + directions_broken, altitudes_broken = ( + example_plain_env.plots._break_direction_wraparound(directions, altitudes) + ) + assert np.any(np.isnan(directions_broken)), ( + "Expected NaN breaks in direction array at 0°/360° wraparound" + ) + assert np.any(np.isnan(altitudes_broken)), ( + "Expected NaN breaks in altitude array at 0°/360° wraparound" + ) + # Verify info() and atmospheric_model() plots complete without error + assert example_plain_env.info() is None + assert example_plain_env.plots.atmospheric_model() is None + + @pytest.mark.parametrize( "model_name", [ From 6dbfb47e6283cef1782e57491b7b79120607321d Mon Sep 17 00:00:00 2001 From: MateusStano Date: Fri, 27 Mar 2026 19:28:56 -0300 Subject: [PATCH 29/44] MNT: add numpy import to test_environment.py --- tests/integration/environment/test_environment.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/environment/test_environment.py b/tests/integration/environment/test_environment.py index 96ec5ecbc..5802650dc 100644 --- a/tests/integration/environment/test_environment.py +++ b/tests/integration/environment/test_environment.py @@ -2,6 +2,7 @@ from datetime import date, datetime, timezone from unittest.mock import patch +import numpy as np import pytest From 11cb8edd887533d007c5ca793d499143c08abcf0 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Fri, 27 Mar 2026 19:34:28 -0300 Subject: [PATCH 30/44] MNT: rename constant for wraparound threshold in _break_direction_wraparound method --- rocketpy/plots/environment_plots.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketpy/plots/environment_plots.py b/rocketpy/plots/environment_plots.py index f53cecc1b..add5e4efb 100644 --- a/rocketpy/plots/environment_plots.py +++ b/rocketpy/plots/environment_plots.py @@ -51,8 +51,8 @@ def _break_direction_wraparound(self, directions, altitudes): altitudes : numpy.ndarray Altitude array with NaN inserted at wraparound points. """ - WRAP_THRESHOLD = 180 # degrees; half the full circle - wrap_indices = np.where(np.abs(np.diff(directions)) > WRAP_THRESHOLD)[0] + 1 + wrap_threshold = 180 # degrees; half the full circle + wrap_indices = np.where(np.abs(np.diff(directions)) > wrap_threshold)[0] + 1 directions = np.insert(directions, wrap_indices, np.nan) altitudes = np.insert(altitudes, wrap_indices, np.nan) return directions, altitudes From 87fd65d6a29986649eb226e956eac55e42c280bb Mon Sep 17 00:00:00 2001 From: Mateus Stano Junqueira <69485049+MateusStano@users.noreply.github.com> Date: Tue, 31 Mar 2026 20:11:03 -0300 Subject: [PATCH 31/44] DOC: add latitude range in docs Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> --- rocketpy/environment/tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index fb0179c9e..9081ff42a 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -115,7 +115,7 @@ def geodesic_to_lambert_conformal(lat, lon, projection_variable, x_units="m"): Parameters ---------- lat : float - Latitude in degrees. + Latitude in degrees, ranging from -90 to 90 lon : float Longitude in degrees, ranging from -180 to 180. projection_variable : netCDF4.Variable From 15edc2732a29db2c792c42755bf824028b22a79f Mon Sep 17 00:00:00 2001 From: Mateus Stano Junqueira <69485049+MateusStano@users.noreply.github.com> Date: Tue, 31 Mar 2026 20:12:35 -0300 Subject: [PATCH 32/44] MNT: remove unnecessary pylint warning Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> --- rocketpy/environment/tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index 9081ff42a..6514de0b9 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -225,7 +225,7 @@ def mask_and_clean_dataset(*args): return data_array -def find_longitude_index(longitude, lon_list): # pylint: disable=too-many-statements +def find_longitude_index(longitude, lon_list): """Finds the index of the given longitude in a list of longitudes. Parameters From 015b7a5df0cc06628ef0cdc967e2f2e7533fa09f Mon Sep 17 00:00:00 2001 From: Mateus Stano Junqueira <69485049+MateusStano@users.noreply.github.com> Date: Tue, 31 Mar 2026 20:12:55 -0300 Subject: [PATCH 33/44] MNT: remove unnecessary pylint warning Co-authored-by: Gui-FernandesBR <63590233+Gui-FernandesBR@users.noreply.github.com> --- rocketpy/environment/tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index 6514de0b9..5335acb36 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -344,7 +344,7 @@ def _coord_value(source, index): return latitude, lat_index -def find_time_index(datetime_date, time_array): # pylint: disable=too-many-statements +def find_time_index(datetime_date, time_array): """Finds the index of the given datetime in a netCDF4 time array. Parameters From 27dc88ec8b1c282bf49fefd3bb71df89dcf814b9 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Tue, 31 Mar 2026 20:38:51 -0300 Subject: [PATCH 34/44] ENH: address copilot comments --- rocketpy/environment/environment.py | 15 ++-- rocketpy/environment/fetchers.py | 84 ++----------------- rocketpy/environment/weather_model_mapping.py | 28 +++---- 3 files changed, 28 insertions(+), 99 deletions(-) diff --git a/rocketpy/environment/environment.py b/rocketpy/environment/environment.py index 39441ecae..6479fa011 100644 --- a/rocketpy/environment/environment.py +++ b/rocketpy/environment/environment.py @@ -178,14 +178,14 @@ class Environment: ``Ensemble``. Environment.lat_array : array Defined if ``netCDF`` or ``OPeNDAP`` file is used, for Forecasts, - Reanalysis and Ensembles. 2x2 matrix for each pressure level of - latitudes corresponding to the vertices of the grid cell which - surrounds the launch site. + Reanalysis and Ensembles. Two-element list ``[x1, x2]`` containing + the latitude coordinates of the grid-cell vertices that bracket the + launch site and are used in bilinear interpolation. Environment.lon_array : array Defined if ``netCDF`` or ``OPeNDAP`` file is used, for Forecasts, - Reanalysis and Ensembles. 2x2 matrix for each pressure level of - longitudes corresponding to the vertices of the grid cell which - surrounds the launch site. + Reanalysis and Ensembles. Two-element list ``[y1, y2]`` containing + the longitude coordinates of the grid-cell vertices that bracket the + launch site and are used in bilinear interpolation. Environment.lon_index : int Defined if ``netCDF`` or ``OPeNDAP`` file is used, for Forecasts, Reanalysis and Ensembles. Index to a grid longitude which @@ -223,7 +223,8 @@ class Environment: surrounds the launch site. Environment.time_array : array Defined if ``netCDF`` or ``OPeNDAP`` file is used, for Forecasts, - Reanalysis and Ensembles. Array of dates available in the file. + Reanalysis and Ensembles. Two-element list with the first and last + values from the dataset time variable in the dataset native units. Environment.height : array Defined if ``netCDF`` or ``OPeNDAP`` file is used, for Forecasts, Reanalysis and Ensembles. List of geometric height corresponding to diff --git a/rocketpy/environment/fetchers.py b/rocketpy/environment/fetchers.py index 589159f1c..5cf03add8 100644 --- a/rocketpy/environment/fetchers.py +++ b/rocketpy/environment/fetchers.py @@ -93,8 +93,8 @@ def fetch_atmospheric_data_from_windy(lat, lon, model): def fetch_gfs_file_return_dataset(max_attempts=10, base_delay=2): - """Fetches the latest GFS (Global Forecast System) dataset from the NOAA's - GrADS data server using the OpenDAP protocol. + """Fetches the latest GFS (Global Forecast System) dataset from the UCAR + THREDDS data server using the OPeNDAP protocol. Parameters ---------- @@ -128,8 +128,8 @@ def fetch_gfs_file_return_dataset(max_attempts=10, base_delay=2): def fetch_nam_file_return_dataset(max_attempts=10, base_delay=2): - """Fetches the latest NAM (North American Mesoscale) dataset from the NOAA's - GrADS data server using the OpenDAP protocol. + """Fetches the latest NAM (North American Mesoscale) dataset from the UCAR + THREDDS data server using the OPeNDAP protocol. Parameters ---------- @@ -161,8 +161,8 @@ def fetch_nam_file_return_dataset(max_attempts=10, base_delay=2): def fetch_rap_file_return_dataset(max_attempts=10, base_delay=2): - """Fetches the latest RAP (Rapid Refresh) dataset from the NOAA's GrADS data - server using the OpenDAP protocol. + """Fetches the latest RAP (Rapid Refresh) dataset from the UCAR THREDDS + data server using the OPeNDAP protocol. Parameters ---------- @@ -193,78 +193,6 @@ def fetch_rap_file_return_dataset(max_attempts=10, base_delay=2): raise RuntimeError("Unable to load latest weather data for RAP through " + file_url) -def fetch_hrrr_file_return_dataset(max_attempts=10, base_delay=2): - """Fetches the latest HRRR (High-Resolution Rapid Refresh) dataset from - the NOAA's GrADS data server using the OpenDAP protocol. - - Parameters - ---------- - max_attempts : int, optional - The maximum number of attempts to fetch the dataset. Default is 10. - base_delay : int, optional - The base delay in seconds between attempts. Default is 2. - - Returns - ------- - netCDF4.Dataset - The HRRR dataset. - - Raises - ------ - RuntimeError - If unable to load the latest weather data for HRRR. - """ - file_url = "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/HRRR/CONUS_2p5km/Best" - attempt_count = 0 - while attempt_count < max_attempts: - try: - return netCDF4.Dataset(file_url) - except OSError: - attempt_count += 1 - time.sleep(base_delay**attempt_count) - - raise RuntimeError( - "Unable to load latest weather data for HRRR through " + file_url - ) - - -def fetch_aigfs_file_return_dataset(max_attempts=10, base_delay=2): - """Fetches the latest AIGFS (Artificial Intelligence GFS) dataset from - the NOAA's GrADS data server using the OpenDAP protocol. - - Parameters - ---------- - max_attempts : int, optional - The maximum number of attempts to fetch the dataset. Default is 10. - base_delay : int, optional - The base delay in seconds between attempts. Default is 2. - - Returns - ------- - netCDF4.Dataset - The AIGFS dataset. - - Raises - ------ - RuntimeError - If unable to load the latest weather data for AIGFS. - """ - file_url = ( - "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/AIGFS/Global_0p25deg/Best" - ) - attempt_count = 0 - while attempt_count < max_attempts: - try: - return netCDF4.Dataset(file_url) - except OSError: - attempt_count += 1 - time.sleep(base_delay**attempt_count) - - raise RuntimeError( - "Unable to load latest weather data for AIGFS through " + file_url - ) - - def fetch_hiresw_file_return_dataset(max_attempts=10, base_delay=2): """Fetches the latest HiResW (High-Resolution Window) dataset from the NOAA's GrADS data server using the OpenDAP protocol. diff --git a/rocketpy/environment/weather_model_mapping.py b/rocketpy/environment/weather_model_mapping.py index c490fad9d..b054a35c4 100644 --- a/rocketpy/environment/weather_model_mapping.py +++ b/rocketpy/environment/weather_model_mapping.py @@ -7,21 +7,21 @@ class WeatherModelMapping: Each dictionary in this class maps those canonical keys to the actual variable names in a specific data provider format. - Mapping families - ---------------- - - Base names (for example ``GFS``, ``NAM``, ``RAP``) represent the current - default mappings used by the latest-model shortcuts and THREDDS-style - datasets. - - ``*_LEGACY`` names represent older NOMADS-style variable naming - conventions (for example ``lev``, ``tmpprs``, ``ugrdprs`` and - ``vgrdprs``) and are intended for archived or previously downloaded files. + Mapping families + ---------------- + - Base names (for example ``GFS``, ``NAM``, ``RAP``) represent the current + default mappings used by the latest-model shortcuts and THREDDS-style + datasets. + - ``*_LEGACY`` names represent older NOMADS-style variable naming + conventions (for example ``lev``, ``tmpprs``, ``ugrdprs`` and + ``vgrdprs``) and are intended for archived or previously downloaded files. - Notes - ----- - - Mappings can also include optional keys such as ``projection`` for - projected grids and ``ensemble`` for member dimensions. - - The :meth:`get` method is case-insensitive, so ``"gfs_legacy"`` and - ``"GFS_LEGACY"`` are equivalent. + Notes + ----- + - Mappings can also include optional keys such as ``projection`` for + projected grids and ``ensemble`` for member dimensions. + - The :meth:`get` method is case-insensitive, so ``"gfs_legacy"`` and + ``"GFS_LEGACY"`` are equivalent. """ GFS = { From 762c4dc6b3c74beee9e222e165ae9f2326860ee8 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Tue, 31 Mar 2026 21:41:13 -0300 Subject: [PATCH 35/44] TST: improve tests --- rocketpy/environment/tools.py | 4 +- tests/unit/environment/test_environment.py | 231 +++++++++++++++++++++ tests/unit/environment/test_fetchers.py | 83 ++++++++ 3 files changed, 316 insertions(+), 2 deletions(-) create mode 100644 tests/unit/environment/test_fetchers.py diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index 5335acb36..9081ff42a 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -225,7 +225,7 @@ def mask_and_clean_dataset(*args): return data_array -def find_longitude_index(longitude, lon_list): +def find_longitude_index(longitude, lon_list): # pylint: disable=too-many-statements """Finds the index of the given longitude in a list of longitudes. Parameters @@ -344,7 +344,7 @@ def _coord_value(source, index): return latitude, lat_index -def find_time_index(datetime_date, time_array): +def find_time_index(datetime_date, time_array): # pylint: disable=too-many-statements """Finds the index of the given datetime in a netCDF4 time array. Parameters diff --git a/tests/unit/environment/test_environment.py b/tests/unit/environment/test_environment.py index 6d04c089f..eb196e591 100644 --- a/tests/unit/environment/test_environment.py +++ b/tests/unit/environment/test_environment.py @@ -311,3 +311,234 @@ def test_weather_model_mapping_exposes_legacy_aliases(): assert mapping.get("GFS_LEGACY")["temperature"] == "tmpprs" assert mapping.get("gfs_legacy")["temperature"] == "tmpprs" + + +def test_dictionary_matches_dataset_rejects_missing_projection(example_plain_env): + """Reject mapping when projection key is declared but variable is missing.""" + # Arrange + mapping = { + "time": "time", + "latitude": "y", + "longitude": "x", + "projection": "LambertConformal_Projection", + "level": "isobaric", + "temperature": "Temperature_isobaric", + "geopotential_height": "Geopotential_height_isobaric", + "geopotential": None, + "u_wind": "u-component_of_wind_isobaric", + "v_wind": "v-component_of_wind_isobaric", + } + dataset = _DummyDataset( + [ + "time", + "y", + "x", + "isobaric", + "Temperature_isobaric", + "Geopotential_height_isobaric", + "u-component_of_wind_isobaric", + "v-component_of_wind_isobaric", + ] + ) + + # Act + is_compatible = example_plain_env._Environment__dictionary_matches_dataset( + mapping, dataset + ) + + # Assert + assert not is_compatible + + +def test_dictionary_matches_dataset_accepts_geopotential_only(example_plain_env): + """Accept mapping when geopotential exists and geopotential height is absent.""" + # Arrange + mapping = { + "time": "time", + "latitude": "latitude", + "longitude": "longitude", + "level": "level", + "temperature": "t", + "geopotential_height": None, + "geopotential": "z", + "u_wind": "u", + "v_wind": "v", + } + dataset = _DummyDataset( + [ + "time", + "latitude", + "longitude", + "level", + "t", + "z", + "u", + "v", + ] + ) + + # Act + is_compatible = example_plain_env._Environment__dictionary_matches_dataset( + mapping, dataset + ) + + # Assert + assert is_compatible + + +def test_resolve_dictionary_warns_when_falling_back(example_plain_env): + """Emit warning and return a built-in mapping when fallback is required.""" + # Arrange + incompatible_mapping = { + "time": "bad_time", + "latitude": "bad_lat", + "longitude": "bad_lon", + "level": "bad_level", + "temperature": "bad_temp", + "geopotential_height": "bad_height", + "geopotential": None, + "u_wind": "bad_u", + "v_wind": "bad_v", + } + dataset = _DummyDataset( + [ + "time", + "lat", + "lon", + "isobaric", + "Temperature_isobaric", + "Geopotential_height_isobaric", + "u-component_of_wind_isobaric", + "v-component_of_wind_isobaric", + ] + ) + + # Act + with pytest.warns(UserWarning, match="Falling back to built-in mapping"): + resolved = example_plain_env._Environment__resolve_dictionary_for_dataset( + incompatible_mapping, dataset + ) + + # Assert + assert resolved == example_plain_env._Environment__weather_model_map.get("GFS") + + +def test_resolve_dictionary_returns_original_when_no_compatible_builtin( + example_plain_env, +): + """Return original mapping unchanged when no built-in mapping can match.""" + # Arrange + original_mapping = { + "time": "a", + "latitude": "b", + "longitude": "c", + "level": "d", + "temperature": "e", + "geopotential_height": "f", + "geopotential": None, + "u_wind": "g", + "v_wind": "h", + } + dataset = _DummyDataset(["foo", "bar"]) + + # Act + resolved = example_plain_env._Environment__resolve_dictionary_for_dataset( + original_mapping, dataset + ) + + # Assert + assert resolved is original_mapping + + +@pytest.mark.parametrize( + "model_type,file_name,error_message", + [ + ( + "Forecast", + "hiresw", + "HIRESW latest-model shortcut is currently unavailable", + ), + ( + "Ensemble", + "gefs", + "GEFS latest-model shortcut is currently unavailable", + ), + ], +) +def test_set_atmospheric_model_blocks_deactivated_shortcuts_case_insensitive( + example_plain_env, + model_type, + file_name, + error_message, +): + """Reject deactivated shortcut aliases regardless of input string case.""" + # Arrange + environment = example_plain_env + + # Act / Assert + with pytest.raises(ValueError, match=error_message): + environment.set_atmospheric_model(type=model_type, file=file_name) + + +def test_validate_dictionary_uses_case_insensitive_file_shortcut(example_plain_env): + """Infer built-in mapping from file shortcut even when shortcut is lowercase.""" + # Arrange + environment = example_plain_env + + # Act + mapping = environment._Environment__validate_dictionary("gfs", None) + + # Assert + assert mapping == environment._Environment__weather_model_map.get("GFS") + + +def test_validate_dictionary_raises_type_error_for_invalid_dictionary( + example_plain_env, +): + """Raise TypeError when no valid dictionary can be inferred.""" + # Arrange + environment = example_plain_env + + # Act / Assert + with pytest.raises(TypeError, match="Please specify a dictionary"): + environment._Environment__validate_dictionary("not_a_model", None) + + +def test_set_atmospheric_model_normalizes_shortcut_case_for_forecast(example_plain_env): + """Normalize shortcut name before lookup and process forecast data.""" + # Arrange + environment = example_plain_env + + environment._Environment__atm_type_file_to_function_map = { + "forecast": { + "GFS": lambda: "fake-dataset", + }, + "ensemble": {}, + } + + called_arguments = {} + + def fake_process_forecast_reanalysis(dataset, dictionary): + called_arguments["dataset"] = dataset + called_arguments["dictionary"] = dictionary + + environment.process_forecast_reanalysis = fake_process_forecast_reanalysis + + # Act + environment.set_atmospheric_model(type="Forecast", file="gfs") + + # Assert + assert called_arguments["dataset"] == "fake-dataset" + assert called_arguments[ + "dictionary" + ] == environment._Environment__weather_model_map.get("GFS") + + +def test_set_atmospheric_model_raises_for_unknown_model_type(example_plain_env): + """Raise ValueError for unknown atmospheric model selector.""" + # Arrange + environment = example_plain_env + + # Act / Assert + with pytest.raises(ValueError, match="Unknown model type"): + environment.set_atmospheric_model(type="unknown_type") diff --git a/tests/unit/environment/test_fetchers.py b/tests/unit/environment/test_fetchers.py new file mode 100644 index 000000000..eea06f977 --- /dev/null +++ b/tests/unit/environment/test_fetchers.py @@ -0,0 +1,83 @@ +import pytest + +from rocketpy.environment import fetchers + + +@pytest.mark.parametrize( + "fetcher,expected_url", + [ + ( + fetchers.fetch_gfs_file_return_dataset, + "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/GFS/Global_0p25deg/Best", + ), + ( + fetchers.fetch_nam_file_return_dataset, + "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/NAM/CONUS_12km/Best", + ), + ( + fetchers.fetch_rap_file_return_dataset, + "https://thredds.ucar.edu/thredds/dodsC/grib/NCEP/RAP/CONUS_13km/Best", + ), + ], +) +def test_fetcher_returns_dataset_on_first_attempt(fetcher, expected_url, monkeypatch): + """Return dataset immediately when the first OPeNDAP attempt succeeds.""" + # Arrange + calls = [] + sentinel_dataset = object() + + def fake_dataset(url): + calls.append(url) + return sentinel_dataset + + monkeypatch.setattr(fetchers.netCDF4, "Dataset", fake_dataset) + + # Act + dataset = fetcher(max_attempts=3, base_delay=2) + + # Assert + assert dataset is sentinel_dataset + assert calls == [expected_url] + + +def test_fetch_gfs_retries_then_succeeds(monkeypatch): + """Retry GFS fetch after OSError and return data once endpoint responds.""" + # Arrange + attempt_counter = {"count": 0} + sleep_calls = [] + + def fake_dataset(_): + attempt_counter["count"] += 1 + if attempt_counter["count"] < 3: + raise OSError("temporary failure") + return "gfs-dataset" + + monkeypatch.setattr(fetchers.netCDF4, "Dataset", fake_dataset) + monkeypatch.setattr(fetchers.time, "sleep", sleep_calls.append) + + # Act + dataset = fetchers.fetch_gfs_file_return_dataset(max_attempts=3, base_delay=2) + + # Assert + assert dataset == "gfs-dataset" + assert sleep_calls == [2, 4] + + +def test_fetch_rap_raises_runtime_error_after_max_attempts(monkeypatch): + """Raise RuntimeError when all RAP attempts fail with OSError.""" + # Arrange + sleep_calls = [] + + def always_fails(_): + raise OSError("endpoint down") + + monkeypatch.setattr(fetchers.netCDF4, "Dataset", always_fails) + monkeypatch.setattr(fetchers.time, "sleep", sleep_calls.append) + + # Act / Assert + with pytest.raises( + RuntimeError, match="Unable to load latest weather data for RAP" + ): + fetchers.fetch_rap_file_return_dataset(max_attempts=2, base_delay=2) + + assert sleep_calls == [2, 4] From 06a50347da529b1289f42a51366c19bede046d08 Mon Sep 17 00:00:00 2001 From: Mateus Stano Junqueira <69485049+MateusStano@users.noreply.github.com> Date: Tue, 31 Mar 2026 20:17:08 -0300 Subject: [PATCH 36/44] DOC: correctly link to WeatherModelMapping Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- rocketpy/environment/environment.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rocketpy/environment/environment.py b/rocketpy/environment/environment.py index 6479fa011..026bded3d 100644 --- a/rocketpy/environment/environment.py +++ b/rocketpy/environment/environment.py @@ -2033,8 +2033,8 @@ def process_ensemble(self, file, dictionary): # pylint: disable=too-many-locals See also -------- - See the :class:``rocketpy.environment.weather_model_mapping`` for some - dictionary examples. + See the :class:`rocketpy.environment.weather_model_mapping.WeatherModelMapping` + class for some dictionary examples. Raises ------ From 18e110c359dd4a68f1fa7cdf83cb988030ba6edc Mon Sep 17 00:00:00 2001 From: Mateus Stano Junqueira <69485049+MateusStano@users.noreply.github.com> Date: Tue, 31 Mar 2026 20:22:31 -0300 Subject: [PATCH 37/44] DOCS: checked todo Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- rocketpy/environment/environment.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/rocketpy/environment/environment.py b/rocketpy/environment/environment.py index 026bded3d..8e379800c 100644 --- a/rocketpy/environment/environment.py +++ b/rocketpy/environment/environment.py @@ -2059,8 +2059,10 @@ class for some dictionary examples. lon_array = data.variables[dictionary["longitude"]] lat_array = data.variables[dictionary["latitude"]] - # Some THREDDS datasets use projected x/y coordinates. - # TODO CHECK THIS I AM NOT SURE????? + # Some THREDDS datasets use projected x/y coordinates. When a + # "projection" variable is provided in the mapping dictionary, convert + # the launch site's geodesic coordinates to the model's projected + # coordinate system before locating the nearest grid cell. if dictionary.get("projection") is not None: projection_variable = data.variables[dictionary["projection"]] x_units = getattr(lon_array, "units", "m") From 059e44d8acc7e3e4ec149755aa6a6dbe31144cfb Mon Sep 17 00:00:00 2001 From: "Mohammed S. Al-Mahrouqi" Date: Sat, 28 Mar 2026 21:26:19 -0400 Subject: [PATCH 38/44] ENH: Adaptive Monte Carlo via Convergence Criteria (#922) * ENH: added a new function (simulate_convergence) * DOC: added a cell to show simulate_convergence function usage * TST: integration test for simulate_convergence * DOC: updated changelog for this PR * ENH: ran black to lint intg test file * new fixes thx to copilot comments * linted rocketpy/simulation/monte_carlo.py --------- Co-authored-by: Malmahrouqi3 --- CHANGELOG.md | 3 +- .../monte_carlo_class_usage.ipynb | 22 ++++++ rocketpy/simulation/monte_carlo.py | 67 +++++++++++++++++++ .../simulation/test_monte_carlo.py | 27 ++++++++ 4 files changed, 118 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 13b20aa1d..720a4cd17 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,7 +32,8 @@ Attention: The newest changes should be on top --> ### Added -- +- ENH: Adaptive Monte Carlo via Convergence Criteria [#922](https://github.com/RocketPy-Team/RocketPy/pull/922) +- TST: Add acceptance tests for 3DOF flight simulation based on Bella Lui rocket [#914](https://github.com/RocketPy-Team/RocketPy/pull/914) ### Changed diff --git a/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb b/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb index 2fb46fa86..8181c03ba 100644 --- a/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb +++ b/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb @@ -800,6 +800,28 @@ ")" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Alternatively, we can target an attribute using the method `MonteCarlo.simulate_convergence()` such that when the tolerance is met, the flight simulations would terminate early." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "test_dispersion.simulate_convergence(\n", + " target_attribute=\"apogee_time\",\n", + " target_confidence=0.95,\n", + " tolerance=0.5, # in seconds\n", + " max_simulations=1000,\n", + " batch_size=50,\n", + ")" + ] + }, { "attachments": {}, "cell_type": "markdown", diff --git a/rocketpy/simulation/monte_carlo.py b/rocketpy/simulation/monte_carlo.py index e10789a7d..42a566b7b 100644 --- a/rocketpy/simulation/monte_carlo.py +++ b/rocketpy/simulation/monte_carlo.py @@ -525,6 +525,73 @@ def estimate_confidence_interval( return res.confidence_interval + def simulate_convergence( + self, + target_attribute="apogee_time", + target_confidence=0.95, + tolerance=0.5, + max_simulations=1000, + batch_size=50, + parallel=False, + n_workers=None, + ): + """Run Monte Carlo simulations in batches until the confidence interval + width converges within the specified tolerance or the maximum number of + simulations is reached. + + Parameters + ---------- + target_attribute : str + The target attribute to track its convergence (e.g., "apogee", "apogee_time", etc.). + target_confidence : float, optional + The confidence level for the interval (between 0 and 1). Default is 0.95. + tolerance : float, optional + The desired width of the confidence interval in seconds, meters, or other units. Default is 0.5. + max_simulations : int, optional + The maximum number of simulations to run to avoid infinite loops. Default is 1000. + batch_size : int, optional + The number of simulations to run in each batch. Default is 50. + parallel : bool, optional + Whether to run simulations in parallel. Default is False. + n_workers : int, optional + The number of worker processes to use if running in parallel. Default is None. + + Returns + ------- + confidence_interval_history : list of float + History of confidence interval widths, one value per batch of simulations. + The last element corresponds to the width when the simulation stopped for + either meeting the tolerance or reaching the maximum number of simulations. + """ + + self.import_outputs(self.filename.with_suffix(".outputs.txt")) + confidence_interval_history = [] + + while self.num_of_loaded_sims < max_simulations: + total_sims = min(self.num_of_loaded_sims + batch_size, max_simulations) + + self.simulate( + number_of_simulations=total_sims, + append=True, + include_function_data=False, + parallel=parallel, + n_workers=n_workers, + ) + + self.import_outputs(self.filename.with_suffix(".outputs.txt")) + + ci = self.estimate_confidence_interval( + attribute=target_attribute, + confidence_level=target_confidence, + ) + + confidence_interval_history.append(float(ci.high - ci.low)) + + if float(ci.high - ci.low) <= tolerance: + break + + return confidence_interval_history + def __evaluate_flight_inputs(self, sim_idx): """Evaluates the inputs of a single flight simulation. diff --git a/tests/integration/simulation/test_monte_carlo.py b/tests/integration/simulation/test_monte_carlo.py index 4b1b82392..98af2431d 100644 --- a/tests/integration/simulation/test_monte_carlo.py +++ b/tests/integration/simulation/test_monte_carlo.py @@ -236,3 +236,30 @@ def invalid_data_collector(flight): monte_carlo_calisto.simulate(number_of_simulations=10, append=False) finally: _post_test_file_cleanup() + + +@pytest.mark.slow +def test_monte_carlo_simulate_convergence(monte_carlo_calisto): + """Tests the simulate_convergence method of the MonteCarlo class. + + Parameters + ---------- + monte_carlo_calisto : MonteCarlo + The MonteCarlo object, this is a pytest fixture. + """ + try: + ci_history = monte_carlo_calisto.simulate_convergence( + target_attribute="apogee", + target_confidence=0.95, + tolerance=5.0, + max_simulations=20, + batch_size=5, + parallel=False, + ) + + assert isinstance(ci_history, list) + assert all(isinstance(width, float) for width in ci_history) + assert len(ci_history) >= 1 + assert monte_carlo_calisto.num_of_loaded_sims <= 20 + finally: + _post_test_file_cleanup() From 47ad8540dd98c884ddda437e7cc6e09d168b7e35 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Fri, 3 Apr 2026 20:46:09 -0300 Subject: [PATCH 39/44] DEV: remove unwanted changes from develop --- .github/agents/rocketpy-reviewer.agent.md | 62 ---- .github/copilot-instructions.md | 301 +++++++++++++----- .../simulation-safety.instructions.md | 41 --- .../instructions/sphinx-docs.instructions.md | 32 -- .../instructions/tests-python.instructions.md | 36 --- CHANGELOG.md | 15 +- .../monte_carlo_class_usage.ipynb | 22 -- rocketpy/simulation/monte_carlo.py | 67 ---- .../simulation/test_monte_carlo.py | 29 +- 9 files changed, 230 insertions(+), 375 deletions(-) delete mode 100644 .github/agents/rocketpy-reviewer.agent.md delete mode 100644 .github/instructions/simulation-safety.instructions.md delete mode 100644 .github/instructions/sphinx-docs.instructions.md delete mode 100644 .github/instructions/tests-python.instructions.md diff --git a/.github/agents/rocketpy-reviewer.agent.md b/.github/agents/rocketpy-reviewer.agent.md deleted file mode 100644 index be1b64b13..000000000 --- a/.github/agents/rocketpy-reviewer.agent.md +++ /dev/null @@ -1,62 +0,0 @@ ---- -description: "Physics-safe RocketPy code review agent. Use for pull request review, unit consistency checks, coordinate-frame validation, cached-property risk detection, and regression-focused test-gap analysis." -name: "RocketPy Reviewer" -tools: [read, search, execute] -argument-hint: "Review these changes for physics correctness and regression risk: " -user-invocable: true ---- -You are a RocketPy-focused reviewer for physics safety and regression risk. - -## Goals - -- Detect behavioral regressions and numerical/physics risks before merge. -- Validate unit consistency and coordinate/reference-frame correctness. -- Identify stale-cache risks when `@cached_property` interacts with mutable state. -- Check test coverage quality for changed behavior. -- Verify alignment with RocketPy workflow and contributor conventions. - -## Review Priorities - -1. Correctness and safety issues (highest severity). -2. Behavioral regressions and API compatibility. -3. Numerical stability and tolerance correctness. -4. Missing tests or weak assertions. -5. Documentation mismatches affecting users. -6. Workflow violations (test placement, branch/PR conventions, or missing validation evidence). - -## RocketPy-Specific Checks - -- SI units are explicit and consistent. -- Orientation conventions are unambiguous (`tail_to_nose`, `nozzle_to_combustion_chamber`, etc.). -- New/changed simulation logic does not silently invalidate cached values. -- Floating-point assertions use `pytest.approx` where needed. -- New fixtures are wired through `tests/conftest.py` when applicable. -- Test type is appropriate for scope (`unit`, `integration`, `acceptance`) and `all_info()`-style tests - are not misclassified. -- New behavior includes at least one regression-oriented test and relevant edge-case checks. -- For docs-affecting changes, references and paths remain valid and build warnings are addressed. -- Tooling recommendations match current repository setup (prefer Makefile plus `pyproject.toml` - settings when docs are outdated). - -## Validation Expectations - -- Prefer focused test runs first, then broader relevant suites. -- Recommend `make format` and `make lint` when style/lint risks are present. -- Recommend `make build-docs` when `.rst` files or API docs are changed. - -## Output Format - -Provide findings first, ordered by severity. -For each finding include: -- Severity: Critical, High, Medium, or Low -- Location: file path and line -- Why it matters: behavioral or physics risk -- Suggested fix: concrete, minimal change - -After findings, include: -- Open questions or assumptions -- Residual risks or testing gaps -- Brief change summary -- Suggested validation commands (only when useful) - -If no findings are identified, state that explicitly and still report residual risks/testing gaps. diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 382aa15e0..f5366cb3b 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -1,80 +1,221 @@ -# RocketPy Workspace Instructions - -## Code Style -- Use snake_case for variables, functions, methods, and modules. Use descriptive names. -- Use PascalCase for classes and UPPER_SNAKE_CASE for constants. -- Keep lines at 88 characters and follow PEP 8 unless existing code in the target file differs. -- Run Ruff as the source of truth for formatting/import organization: - - `make format` - - `make lint` -- Use NumPy-style docstrings for public classes, methods, and functions, including units. -- In case of tooling drift between docs and config, prefer current repository tooling in `Makefile` - and `pyproject.toml`. - -## Architecture -- RocketPy is a modular Python library; keep feature logic in the correct package boundary: - - `rocketpy/simulation`: flight simulation and Monte Carlo orchestration. - - `rocketpy/rocket`, `rocketpy/motors`, `rocketpy/environment`: domain models. - - `rocketpy/mathutils`: numerical primitives and interpolation utilities. - - `rocketpy/plots`, `rocketpy/prints`: output and visualization layers. -- Prefer extending existing classes/patterns over introducing new top-level abstractions. -- Preserve public API stability in `rocketpy/__init__.py` exports. - -## Build and Test -- Use Makefile targets for OS-agnostic workflows: - - `make install` - - `make pytest` - - `make pytest-slow` - - `make coverage` - - `make coverage-report` - - `make build-docs` -- Before finishing code changes, run focused tests first, then broader relevant suites. -- When running Python directly in this workspace, prefer `.venv/Scripts/python.exe`. -- Slow tests are explicitly marked with `@pytest.mark.slow` and are run with `make pytest-slow`. -- For docs changes, check `make build-docs` output and resolve warnings/errors when practical. - -## Development Workflow -- Target pull requests to `develop` by default; `master` is the stable branch. -- Use branch names in `type/description` format, such as: - - `bug/` - - `doc/` - - `enh/` - - `mnt/` - - `tst/` -- Prefer rebasing feature branches on top of `develop` to keep history linear. -- Keep commit and PR titles explicit and prefixed with project acronyms when possible: - - `BUG`, `DOC`, `ENH`, `MNT`, `TST`, `BLD`, `REL`, `REV`, `STY`, `DEV`. - -## Conventions -- SI units are the default. Document units and coordinate-system references explicitly. -- Position/reference-frame arguments are critical in this codebase. Be explicit about orientation - (for example, `tail_to_nose`, `nozzle_to_combustion_chamber`). -- Include unit tests for new behavior. Follow AAA structure and clear test names. -- Use fixtures from `tests/fixtures`; if adding a new fixture module, update `tests/conftest.py`. -- Use `pytest.approx` for floating-point checks where appropriate. -- Use `@cached_property` for expensive computations when helpful, and be careful with stale-cache - behavior when underlying mutable state changes. -- Keep behavior backward compatible across the public API exported via `rocketpy/__init__.py`. -- Prefer extending existing module patterns over creating new top-level package structure. - -## Testing Taxonomy -- Unit tests are mandatory for new behavior. -- Unit tests in RocketPy can be sociable (real collaborators allowed) but should still be fast and - method-focused. -- Treat tests as integration tests when they are strongly I/O-oriented or broad across many methods, - including `all_info()` convention cases. -- Acceptance tests represent realistic user/flight scenarios and may compare simulation thresholds to - known flight data. - -## Documentation Links -- Contributor workflow and setup: `docs/development/setting_up.rst` -- Style and naming details: `docs/development/style_guide.rst` -- Testing philosophy and structure: `docs/development/testing.rst` -- API reference conventions: `docs/reference/index.rst` -- Domain/physics background: `docs/technical/index.rst` - -## Scoped Customizations -- Simulation-specific rules: `.github/instructions/simulation-safety.instructions.md` -- Test-authoring rules: `.github/instructions/tests-python.instructions.md` -- RST/Sphinx documentation rules: `.github/instructions/sphinx-docs.instructions.md` -- Specialized review persona: `.github/agents/rocketpy-reviewer.agent.md` +# GitHub Copilot Instructions for RocketPy + +This file provides instructions for GitHub Copilot when working on the RocketPy codebase. +These guidelines help ensure consistency with the project's coding standards and development practices. + +## Project Overview + +RocketPy is a Python library for 6-DOF rocket trajectory simulation. +It's designed for high-power rocketry applications with focus on accuracy, performance, and ease of use. + +## Coding Standards + +### Naming Conventions +- **Use `snake_case` for all new code** - variables, functions, methods, and modules +- **Use descriptive names** - prefer `angle_of_attack` over `a` or `alpha` +- **Class names use PascalCase** - e.g., `SolidMotor`, `Environment`, `Flight` +- **Constants use UPPER_SNAKE_CASE** - e.g., `DEFAULT_GRAVITY`, `EARTH_RADIUS` + +### Code Style +- Follow **PEP 8** guidelines +- Line length: **88 characters** (Black's default) +- Organize imports with **isort** +- Our official formatter is the **ruff frmat** + +### Documentation +- **All public classes, methods, and functions must have docstrings** +- Use **NumPy style docstrings** +- Include **Parameters**, **Returns**, and **Examples** sections +- Document **units** for physical quantities (e.g., "in meters", "in radians") + +### Testing +- Write **unit tests** for all new features using pytest +- Follow **AAA pattern** (Arrange, Act, Assert) +- Use descriptive test names following: `test_methodname_expectedbehaviour` +- Include test docstrings explaining expected behavior +- Use **parameterization** for testing multiple scenarios +- Create pytest fixtures to avoid code repetition + +## Domain-Specific Guidelines + +### Physical Units and Conventions +- **SI units by default** - meters, kilograms, seconds, radians +- **Document coordinate systems** clearly (e.g., "tail_to_nose", "nozzle_to_combustion_chamber") +- **Position parameters** are critical - always document reference points +- Use **descriptive variable names** for physical quantities + +### Rocket Components +- **Motors**: SolidMotor, HybridMotor and LiquidMotor classes are children classes of the Motor class +- **Aerodynamic Surfaces**: They have Drag curves and lift coefficients +- **Parachutes**: Trigger functions, deployment conditions +- **Environment**: Atmospheric models, weather data, wind profiles + +### Mathematical Operations +- Use **numpy arrays** for vectorized operations (this improves performance) +- Prefer **scipy functions** for numerical integration and optimization +- **Handle edge cases** in calculations (division by zero, sqrt of negative numbers) +- **Validate input ranges** for physical parameters +- Monte Carlo simulations: sample from `numpy.random` for random number generation and creates several iterations to assess uncertainty in simulations. + +## File Structure and Organization + +### Source Code Organization + +Reminds that `rocketpy` is a Python package served as a library, and its source code is organized into several modules to facilitate maintainability and clarity. The following structure is recommended: + +``` +rocketpy/ +├── core/ # Core simulation classes +├── motors/ # Motor implementations +├── environment/ # Atmospheric and environmental models +├── plots/ # Plotting and visualization +├── tools/ # Utility functions +└── mathutils/ # Mathematical utilities +``` + +Please refer to popular Python packages like `scipy`, `numpy`, and `matplotlib` for inspiration on module organization. + +### Test Organization +``` +tests/ +├── unit/ # Unit tests +├── integration/ # Integration tests +├── acceptance/ # Acceptance tests +└── fixtures/ # Test fixtures organized by component +``` + +### Documentation Structure +``` +docs/ +├── user/ # User guides and tutorials +├── development/ # Development documentation +├── reference/ # API reference +├── examples/ # Flight examples and notebooks +└── technical/ # Technical documentation +``` + +## Common Patterns and Practices + +### Error Handling +- Use **descriptive error messages** with context +- **Validate inputs** at class initialization and method entry +- Raise **appropriate exception types** (ValueError, TypeError, etc.) +- Include **suggestions for fixes** in error messages + +### Performance Considerations +- Use **vectorized operations** where possible +- **Cache expensive computations** when appropriate (we frequently use `cached_property`) +- Keep in mind that RocketPy must be fast! + +### Backward Compatibility +- **Avoid breaking changes** in public APIs +- Use **deprecation warnings** before removing features +- **Document code changes** in docstrings and CHANGELOG + +## AI Assistant Guidelines + +### Code Generation +- **Always include docstrings** for new functions and classes +- **Follow existing patterns** in the codebase +- **Consider edge cases** and error conditions + +### Code Review and Suggestions +- **Check for consistency** with existing code style +- **Verify physical units** and coordinate systems +- **Ensure proper error handling** and input validation +- **Suggest performance improvements** when applicable +- **Recommend additional tests** for new functionality + +### Documentation Assistance +- **Use NumPy docstring format** consistently +- **Include practical examples** in docstrings +- **Document physical meanings** of parameters +- **Cross-reference related functions** and classes + +## Testing Guidelines + +### Unit Tests +- **Test individual methods** in isolation +- **Use fixtures** from the appropriate test fixture modules +- **Mock external dependencies** when necessary +- **Test both happy path and error conditions** + +### Integration Tests +- **Test interactions** between components +- **Verify end-to-end workflows** (Environment → Motor → Rocket → Flight) + +### Test Data +- **Use realistic parameters** for rocket simulations +- **Include edge cases** (very small/large rockets, extreme conditions) +- **Test with different coordinate systems** and orientations + +## Project-Specific Considerations + +### User Experience +- **Provide helpful error messages** with context and suggestions +- **Include examples** in docstrings and documentation +- **Support common use cases** with reasonable defaults + +## Examples of Good Practices + +### Function Definition +```python +def calculate_drag_force( + velocity, + air_density, + drag_coefficient, + reference_area +): + """Calculate drag force using the standard drag equation. + + Parameters + ---------- + velocity : float + Velocity magnitude in m/s. + air_density : float + Air density in kg/m³. + drag_coefficient : float + Dimensionless drag coefficient. + reference_area : float + Reference area in m². + + Returns + ------- + float + Drag force in N. + + Examples + -------- + >>> drag_force = calculate_drag_force(100, 1.225, 0.5, 0.01) + >>> print(f"Drag force: {drag_force:.2f} N") + """ + if velocity < 0: + raise ValueError("Velocity must be non-negative") + if air_density <= 0: + raise ValueError("Air density must be positive") + if reference_area <= 0: + raise ValueError("Reference area must be positive") + + return 0.5 * air_density * velocity**2 * drag_coefficient * reference_area +``` + +### Test Example +```python +def test_calculate_drag_force_returns_correct_value(): + """Test drag force calculation with known inputs.""" + # Arrange + velocity = 100.0 # m/s + air_density = 1.225 # kg/m³ + drag_coefficient = 0.5 + reference_area = 0.01 # m² + expected_force = 30.625 # N + + # Act + result = calculate_drag_force(velocity, air_density, drag_coefficient, reference_area) + + # Assert + assert abs(result - expected_force) < 1e-6 +``` + + +Remember: RocketPy prioritizes accuracy, performance, and usability. Always consider the physical meaning of calculations and provide clear, well-documented interfaces for users. diff --git a/.github/instructions/simulation-safety.instructions.md b/.github/instructions/simulation-safety.instructions.md deleted file mode 100644 index cc2af5d27..000000000 --- a/.github/instructions/simulation-safety.instructions.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -description: "Use when editing rocketpy/simulation code, including Flight state updates, Monte Carlo orchestration, post-processing, or cached computations. Covers simulation state safety, unit/reference-frame clarity, and regression checks." -name: "Simulation Safety" -applyTo: "rocketpy/simulation/**/*.py" ---- -# Simulation Safety Guidelines - -- Keep simulation logic inside `rocketpy/simulation` and avoid leaking domain behavior that belongs in - `rocketpy/rocket`, `rocketpy/motors`, or `rocketpy/environment`. -- Preserve public API behavior and exported names used by `rocketpy/__init__.py`. -- Prefer extending existing simulation components before creating new abstractions: - - `flight.py`: simulation state, integration flow, and post-processing. - - `monte_carlo.py`: orchestration and statistical execution workflows. - - `flight_data_exporter.py` and `flight_data_importer.py`: persistence and interchange. - - `flight_comparator.py`: comparative analysis outputs. -- Be explicit with physical units and reference frames in new parameters, attributes, and docstrings. -- For position/orientation-sensitive behavior, use explicit conventions (for example - `tail_to_nose`, `nozzle_to_combustion_chamber`) and avoid implicit assumptions. -- Treat state mutation carefully when cached values exist. -- If changes can invalidate `@cached_property` values, either avoid post-computation mutation or - explicitly invalidate affected caches in a controlled, documented way. -- Keep numerical behavior deterministic unless stochastic behavior is intentional and documented. -- For Monte Carlo and stochastic code paths, make randomness controllable and reproducible when tests - rely on it. -- Prefer vectorized NumPy operations for hot paths and avoid introducing Python loops in - performance-critical sections without justification. -- Guard against numerical edge cases (zero/near-zero denominators, interpolation limits, and boundary - conditions). -- Do not change default numerical tolerances or integration behavior without documenting motivation and - validating regression impact. -- Add focused regression tests for changed behavior, including edge cases and orientation-dependent - behavior. -- For floating-point expectations, use `pytest.approx` with meaningful tolerances. -- Run focused tests first, then broader relevant tests (`make pytest` and `make pytest-slow` when - applicable). - -See: -- `docs/development/testing.rst` -- `docs/development/style_guide.rst` -- `docs/development/setting_up.rst` -- `docs/technical/index.rst` diff --git a/.github/instructions/sphinx-docs.instructions.md b/.github/instructions/sphinx-docs.instructions.md deleted file mode 100644 index 8c24cac53..000000000 --- a/.github/instructions/sphinx-docs.instructions.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -description: "Use when writing or editing docs/**/*.rst. Covers Sphinx/reStructuredText conventions, cross-references, toctree hygiene, and RocketPy unit/reference-frame documentation requirements." -name: "Sphinx RST Conventions" -applyTo: "docs/**/*.rst" ---- -# Sphinx and RST Guidelines - -- Follow existing heading hierarchy and style in the target document. -- Prefer linking to existing documentation pages instead of duplicating content. -- Use Sphinx cross-references where appropriate (`:class:`, `:func:`, `:mod:`, `:doc:`, `:ref:`). -- Keep API names and module paths consistent with current code exports. -- Document physical units and coordinate/reference-frame conventions explicitly. -- Include concise, practical examples when introducing new user-facing behavior. -- Keep prose clear and technical; avoid marketing language in development/reference docs. -- When adding a new page, update the relevant `toctree` so it appears in navigation. -- Use RocketPy docs build workflow: - - `make build-docs` from repository root for normal validation. - - If stale artifacts appear, clean docs build outputs via `cd docs && make clean`, then rebuild. -- Treat new Sphinx warnings/errors as issues to fix or explicitly call out in review notes. -- Keep `docs/index.rst` section structure coherent with user, development, reference, technical, and - examples navigation. -- Do not edit Sphinx-generated scaffolding files unless explicitly requested: - - `docs/Makefile` - - `docs/make.bat` -- For API docs, ensure references remain aligned with exported/public objects and current module paths. - -See: -- `docs/index.rst` -- `docs/development/build_docs.rst` -- `docs/development/style_guide.rst` -- `docs/reference/index.rst` -- `docs/technical/index.rst` diff --git a/.github/instructions/tests-python.instructions.md b/.github/instructions/tests-python.instructions.md deleted file mode 100644 index 1e9626142..000000000 --- a/.github/instructions/tests-python.instructions.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -description: "Use when creating or editing pytest files in tests/. Enforces AAA structure, naming conventions, fixture usage, parameterization, slow-test marking, and numerical assertion practices for RocketPy." -name: "RocketPy Pytest Standards" -applyTo: "tests/**/*.py" ---- -# RocketPy Test Authoring Guidelines - -- Unit tests are mandatory for new behavior. -- Follow AAA structure in each test: Arrange, Act, Assert. -- Use descriptive test names matching project convention: - - `test_methodname` - - `test_methodname_stateundertest` - - `test_methodname_expectedbehaviour` -- Include docstrings that clearly state expected behavior and context. -- Prefer parameterization for scenario matrices instead of duplicated tests. -- Classify tests correctly: - - `tests/unit`: fast, method-focused tests (sociable unit tests are acceptable in RocketPy). - - `tests/integration`: broad multi-method/component interactions and strongly I/O-oriented cases. - - `tests/acceptance`: realistic end-user/flight scenarios with threshold-based expectations. -- By RocketPy convention, tests centered on `all_info()` behavior are integration tests. -- Reuse fixtures from `tests/fixtures` whenever possible. -- Keep fixture organization aligned with existing categories under `tests/fixtures` - (environment, flight, motor, rockets, surfaces, units, etc.). -- If you add a new fixture module, update `tests/conftest.py` so fixtures are discoverable. -- Keep tests deterministic: set seeds when randomness is involved and avoid unstable external - dependencies unless integration behavior explicitly requires them. -- Use `pytest.approx` for floating-point comparisons with realistic tolerances. -- Mark expensive tests with `@pytest.mark.slow` and ensure they can run under the project slow-test - workflow. -- Include at least one negative or edge-case assertion for new behaviors. -- When adding a bug fix, include a regression test that fails before the fix and passes after it. - -See: -- `docs/development/testing.rst` -- `docs/development/style_guide.rst` -- `docs/development/setting_up.rst` diff --git a/CHANGELOG.md b/CHANGELOG.md index 720a4cd17..00f9ef5a3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,8 +32,7 @@ Attention: The newest changes should be on top --> ### Added -- ENH: Adaptive Monte Carlo via Convergence Criteria [#922](https://github.com/RocketPy-Team/RocketPy/pull/922) -- TST: Add acceptance tests for 3DOF flight simulation based on Bella Lui rocket [#914](https://github.com/RocketPy-Team/RocketPy/pull/914) +- ### Changed @@ -41,14 +40,18 @@ Attention: The newest changes should be on top --> ### Fixed -- +- + +## [v1.12.1] - 2026-04-03 + +### Fixed + +- BUG: Migrate Forecasts to UCAR THREDDS [#943](https://github.com/RocketPy-Team/RocketPy/pull/943) ## [v1.12.0] - 2026-03-08 ### Added - -- ENH: Air brakes controller functions now support 8-parameter signature [#854](https://github.com/RocketPy-Team/RocketPy/pull/854) - TST: Add acceptance tests for 3DOF flight simulation based on Bella Lui rocket [#914] (https://github.com/RocketPy-Team/RocketPy/pull/914_ - ENH: Add background map auto download functionality to Monte Carlo plots [#896](https://github.com/RocketPy-Team/RocketPy/pull/896) - MNT: net thrust addition to 3 dof in flight class [#907] (https://github.com/RocketPy-Team/RocketPy/pull/907) @@ -74,11 +77,9 @@ Attention: The newest changes should be on top --> ### Fixed -- BUG: Add wraparound logic for wind direction in environment plots [#939](https://github.com/RocketPy-Team/RocketPy/pull/939) - BUG: Restore `Rocket.power_off_drag` and `Rocket.power_on_drag` as `Function` objects while preserving raw inputs in `power_off_drag_input` and `power_on_drag_input` [#941](https://github.com/RocketPy-Team/RocketPy/pull/941) - BUG: Add explicit timeouts to ThrustCurve API requests [#935](https://github.com/RocketPy-Team/RocketPy/pull/935) - BUG: Fix hard-coded radius value for parachute added mass calculation [#889](https://github.com/RocketPy-Team/RocketPy/pull/889) -- BUG: Fix hard-coded radius value for parachute added mass calculation [#889](https://github.com/RocketPy-Team/RocketPy/pull/889) - DOC: Fix documentation build [#908](https://github.com/RocketPy-Team/RocketPy/pull/908) - BUG: energy_data plot not working for 3 dof sims [[#906](https://github.com/RocketPy-Team/RocketPy/issues/906)] - BUG: Fix CSV column header spacing in FlightDataExporter [#864](https://github.com/RocketPy-Team/RocketPy/issues/864) diff --git a/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb b/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb index 8181c03ba..2fb46fa86 100644 --- a/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb +++ b/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb @@ -800,28 +800,6 @@ ")" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Alternatively, we can target an attribute using the method `MonteCarlo.simulate_convergence()` such that when the tolerance is met, the flight simulations would terminate early." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "test_dispersion.simulate_convergence(\n", - " target_attribute=\"apogee_time\",\n", - " target_confidence=0.95,\n", - " tolerance=0.5, # in seconds\n", - " max_simulations=1000,\n", - " batch_size=50,\n", - ")" - ] - }, { "attachments": {}, "cell_type": "markdown", diff --git a/rocketpy/simulation/monte_carlo.py b/rocketpy/simulation/monte_carlo.py index 42a566b7b..e10789a7d 100644 --- a/rocketpy/simulation/monte_carlo.py +++ b/rocketpy/simulation/monte_carlo.py @@ -525,73 +525,6 @@ def estimate_confidence_interval( return res.confidence_interval - def simulate_convergence( - self, - target_attribute="apogee_time", - target_confidence=0.95, - tolerance=0.5, - max_simulations=1000, - batch_size=50, - parallel=False, - n_workers=None, - ): - """Run Monte Carlo simulations in batches until the confidence interval - width converges within the specified tolerance or the maximum number of - simulations is reached. - - Parameters - ---------- - target_attribute : str - The target attribute to track its convergence (e.g., "apogee", "apogee_time", etc.). - target_confidence : float, optional - The confidence level for the interval (between 0 and 1). Default is 0.95. - tolerance : float, optional - The desired width of the confidence interval in seconds, meters, or other units. Default is 0.5. - max_simulations : int, optional - The maximum number of simulations to run to avoid infinite loops. Default is 1000. - batch_size : int, optional - The number of simulations to run in each batch. Default is 50. - parallel : bool, optional - Whether to run simulations in parallel. Default is False. - n_workers : int, optional - The number of worker processes to use if running in parallel. Default is None. - - Returns - ------- - confidence_interval_history : list of float - History of confidence interval widths, one value per batch of simulations. - The last element corresponds to the width when the simulation stopped for - either meeting the tolerance or reaching the maximum number of simulations. - """ - - self.import_outputs(self.filename.with_suffix(".outputs.txt")) - confidence_interval_history = [] - - while self.num_of_loaded_sims < max_simulations: - total_sims = min(self.num_of_loaded_sims + batch_size, max_simulations) - - self.simulate( - number_of_simulations=total_sims, - append=True, - include_function_data=False, - parallel=parallel, - n_workers=n_workers, - ) - - self.import_outputs(self.filename.with_suffix(".outputs.txt")) - - ci = self.estimate_confidence_interval( - attribute=target_attribute, - confidence_level=target_confidence, - ) - - confidence_interval_history.append(float(ci.high - ci.low)) - - if float(ci.high - ci.low) <= tolerance: - break - - return confidence_interval_history - def __evaluate_flight_inputs(self, sim_idx): """Evaluates the inputs of a single flight simulation. diff --git a/tests/integration/simulation/test_monte_carlo.py b/tests/integration/simulation/test_monte_carlo.py index 98af2431d..968cbadc5 100644 --- a/tests/integration/simulation/test_monte_carlo.py +++ b/tests/integration/simulation/test_monte_carlo.py @@ -235,31 +235,4 @@ def invalid_data_collector(flight): with pytest.raises(ValueError): monte_carlo_calisto.simulate(number_of_simulations=10, append=False) finally: - _post_test_file_cleanup() - - -@pytest.mark.slow -def test_monte_carlo_simulate_convergence(monte_carlo_calisto): - """Tests the simulate_convergence method of the MonteCarlo class. - - Parameters - ---------- - monte_carlo_calisto : MonteCarlo - The MonteCarlo object, this is a pytest fixture. - """ - try: - ci_history = monte_carlo_calisto.simulate_convergence( - target_attribute="apogee", - target_confidence=0.95, - tolerance=5.0, - max_simulations=20, - batch_size=5, - parallel=False, - ) - - assert isinstance(ci_history, list) - assert all(isinstance(width, float) for width in ci_history) - assert len(ci_history) >= 1 - assert monte_carlo_calisto.num_of_loaded_sims <= 20 - finally: - _post_test_file_cleanup() + _post_test_file_cleanup() \ No newline at end of file From 16dd43b266e340640a3f163cb20678a8365a648e Mon Sep 17 00:00:00 2001 From: MateusStano Date: Fri, 3 Apr 2026 20:54:31 -0300 Subject: [PATCH 40/44] DEV: Update for hotfix --- CHANGELOG.md | 2 +- docs/conf.py | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 00f9ef5a3..b1e492826 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -46,7 +46,7 @@ Attention: The newest changes should be on top --> ### Fixed -- BUG: Migrate Forecasts to UCAR THREDDS [#943](https://github.com/RocketPy-Team/RocketPy/pull/943) +- HOTFIX: Migrate Forecasts to UCAR THREDDS [#943](https://github.com/RocketPy-Team/RocketPy/pull/943) ## [v1.12.0] - 2026-03-08 diff --git a/docs/conf.py b/docs/conf.py index e535082e7..1bbdd7058 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -27,7 +27,7 @@ author = "RocketPy Team" # The full version, including alpha/beta/rc tags -release = "1.12.0" +release = "1.12.1" # -- General configuration --------------------------------------------------- diff --git a/pyproject.toml b/pyproject.toml index b9433c6d3..4f1ecced4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "rocketpy" -version = "1.12.0" +version = "1.12.1" description="Advanced 6-DOF trajectory simulation for High-Power Rocketry." dynamic = ["dependencies"] readme = "README.md" From 749e8b3a7dbd1931f78808537abf36dd739d798e Mon Sep 17 00:00:00 2001 From: MateusStano Date: Fri, 3 Apr 2026 22:37:26 -0300 Subject: [PATCH 41/44] TST: add tests --- docs/user/compare_flights.rst | 4 +- rocketpy/environment/fetchers.py | 14 +++-- rocketpy/environment/tools.py | 8 ++- .../simulation/test_monte_carlo.py | 2 +- tests/unit/environment/test_environment.py | 62 ++++++++++++++++++- 5 files changed, 78 insertions(+), 12 deletions(-) diff --git a/docs/user/compare_flights.rst b/docs/user/compare_flights.rst index 70f5fc7bb..d7f8478c8 100644 --- a/docs/user/compare_flights.rst +++ b/docs/user/compare_flights.rst @@ -26,8 +26,8 @@ This is done following the same steps as in the :ref:`firstsimulation` example. .. jupyter-execute:: - after_tomorrow = datetime.now() + timedelta(days=2) - env = Environment(latitude=-23, longitude=-49, date=after_tomorrow) + tomorrow = datetime.now() + timedelta(days=1) + env = Environment(latitude=-23, longitude=-49, date=tomorrow) env.set_atmospheric_model(type="Forecast", file="GFS") cesaroni_motor = SolidMotor( diff --git a/rocketpy/environment/fetchers.py b/rocketpy/environment/fetchers.py index 5cf03add8..de63d53ad 100644 --- a/rocketpy/environment/fetchers.py +++ b/rocketpy/environment/fetchers.py @@ -12,6 +12,8 @@ from rocketpy.tools import exponential_backoff +MAX_RETRY_DELAY_SECONDS = 600 + @exponential_backoff(max_attempts=3, base_delay=1, max_delay=60) def fetch_open_elevation(lat, lon): @@ -122,7 +124,7 @@ def fetch_gfs_file_return_dataset(max_attempts=10, base_delay=2): return netCDF4.Dataset(file_url) except OSError: attempt_count += 1 - time.sleep(base_delay**attempt_count) + time.sleep(min(base_delay**attempt_count, MAX_RETRY_DELAY_SECONDS)) raise RuntimeError("Unable to load latest weather data for GFS through " + file_url) @@ -155,7 +157,7 @@ def fetch_nam_file_return_dataset(max_attempts=10, base_delay=2): return netCDF4.Dataset(file_url) except OSError: attempt_count += 1 - time.sleep(base_delay**attempt_count) + time.sleep(min(base_delay**attempt_count, MAX_RETRY_DELAY_SECONDS)) raise RuntimeError("Unable to load latest weather data for NAM through " + file_url) @@ -188,7 +190,7 @@ def fetch_rap_file_return_dataset(max_attempts=10, base_delay=2): return netCDF4.Dataset(file_url) except OSError: attempt_count += 1 - time.sleep(base_delay**attempt_count) + time.sleep(min(base_delay**attempt_count, MAX_RETRY_DELAY_SECONDS)) raise RuntimeError("Unable to load latest weather data for RAP through " + file_url) @@ -241,7 +243,7 @@ def fetch_hiresw_file_return_dataset(max_attempts=10, base_delay=2): return dataset except OSError: attempt_count += 1 - time.sleep(base_delay**attempt_count) + time.sleep(min(base_delay**attempt_count, MAX_RETRY_DELAY_SECONDS)) if dataset is None: raise RuntimeError( @@ -320,7 +322,7 @@ def fetch_gefs_ensemble(): return dataset except OSError: attempt_count += 1 - time.sleep(2**attempt_count) + time.sleep(min(2**attempt_count, MAX_RETRY_DELAY_SECONDS)) if not success: raise RuntimeError( "Unable to load latest weather data for GEFS through " + file @@ -362,6 +364,6 @@ def fetch_cmc_ensemble(): return dataset except OSError: attempt_count += 1 - time.sleep(2**attempt_count) + time.sleep(min(2**attempt_count, MAX_RETRY_DELAY_SECONDS)) if not success: raise RuntimeError("Unable to load latest weather data for CMC through " + file) diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index 9081ff42a..4ce986d02 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -279,7 +279,9 @@ def _coord_value(source, index): high = mid lon_index = low - # Take care of longitude value equal to maximum longitude in the grid + # Take care of longitude value equal to minimum/maximum longitude in the grid + if lon_index == 0 and math.isclose(_coord_value(lon_list, 0), lon): + lon_index = 1 if lon_index == lon_len and _coord_value(lon_list, lon_index - 1) == lon: lon_index -= 1 # Check if longitude value is inside the grid @@ -332,7 +334,9 @@ def _coord_value(source, index): high = mid lat_index = low - # Take care of latitude value equal to maximum latitude in the grid + # Take care of latitude value equal to minimum/maximum latitude in the grid + if lat_index == 0 and math.isclose(_coord_value(lat_list, 0), latitude): + lat_index = 1 if lat_index == lat_len and _coord_value(lat_list, lat_index - 1) == latitude: lat_index -= 1 # Check if latitude value is inside the grid diff --git a/tests/integration/simulation/test_monte_carlo.py b/tests/integration/simulation/test_monte_carlo.py index 968cbadc5..4b1b82392 100644 --- a/tests/integration/simulation/test_monte_carlo.py +++ b/tests/integration/simulation/test_monte_carlo.py @@ -235,4 +235,4 @@ def invalid_data_collector(flight): with pytest.raises(ValueError): monte_carlo_calisto.simulate(number_of_simulations=10, append=False) finally: - _post_test_file_cleanup() \ No newline at end of file + _post_test_file_cleanup() diff --git a/tests/unit/environment/test_environment.py b/tests/unit/environment/test_environment.py index eb196e591..beb6d5ac6 100644 --- a/tests/unit/environment/test_environment.py +++ b/tests/unit/environment/test_environment.py @@ -6,10 +6,24 @@ import pytz from rocketpy import Environment -from rocketpy.environment.tools import geodesic_to_utm, utm_to_geodesic +from rocketpy.environment.tools import ( + find_longitude_index, + geodesic_to_lambert_conformal, + geodesic_to_utm, + utm_to_geodesic, +) from rocketpy.environment.weather_model_mapping import WeatherModelMapping +class DummyLambertProjection: + """Minimal projection metadata container for unit tests.""" + + latitude_of_projection_origin = 40.0 + longitude_of_central_meridian = 263.0 + standard_parallel = np.array([30.0, 60.0]) + earth_radius = 6371229.0 + + @pytest.mark.parametrize( "latitude, longitude", [(-21.960641, -47.482122), (0, 0), (21.960641, 47.482122)] ) @@ -112,6 +126,52 @@ class and checks the conversion results from UTM to geodesic assert np.isclose(lon, -106.9750, atol=1e-5) +def test_geodesic_to_lambert_conformal_projection_origin_maps_to_zero(): + """Tests wrapped central meridian maps to coordinate origin in Lambert conformal.""" + projection = DummyLambertProjection() + + x, y = geodesic_to_lambert_conformal( + lat=projection.latitude_of_projection_origin, + lon=projection.longitude_of_central_meridian % 360, + projection_variable=projection, + x_units="m", + ) + + assert np.isclose(x, 0.0, atol=1e-8) + assert np.isclose(y, 0.0, atol=1e-8) + + +def test_geodesic_to_lambert_conformal_km_units_scale_from_meters(): + """Tests Lambert conformal conversion scales outputs from meters to km.""" + projection = DummyLambertProjection() + + x_meters, y_meters = geodesic_to_lambert_conformal( + lat=39.0, + lon=-96.0, + projection_variable=projection, + x_units="m", + ) + x_km, y_km = geodesic_to_lambert_conformal( + lat=39.0, + lon=-96.0, + projection_variable=projection, + x_units="km", + ) + + assert np.isclose(x_km, x_meters / 1000.0, atol=1e-8) + assert np.isclose(y_km, y_meters / 1000.0, atol=1e-8) + + +def test_find_longitude_index_accepts_lower_grid_boundary(): + """Tests longitude equal to first grid value is accepted as in-range.""" + lon_list = [0.0, 0.25, 0.5] + + lon, lon_index = find_longitude_index(0.0, lon_list) + + assert lon == 0.0 + assert lon_index == 1 + + @pytest.mark.parametrize( "latitude, theoretical_radius", [(0, 6378137.0), (90, 6356752.31424518), (-90, 6356752.31424518)], From e89046ba754a76ebfcefb70acde114c3ab269ed6 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Fri, 3 Apr 2026 22:45:08 -0300 Subject: [PATCH 42/44] MNT: remove changes from develop again --- .github/agents/rocketpy-reviewer.agent.md | 62 ---- .github/copilot-instructions.md | 301 +++++++++++++----- .../simulation-safety.instructions.md | 41 --- .../instructions/sphinx-docs.instructions.md | 32 -- .../instructions/tests-python.instructions.md | 36 --- .../monte_carlo_class_usage.ipynb | 22 -- rocketpy/simulation/monte_carlo.py | 67 ---- .../simulation/test_monte_carlo.py | 27 -- 8 files changed, 221 insertions(+), 367 deletions(-) delete mode 100644 .github/agents/rocketpy-reviewer.agent.md delete mode 100644 .github/instructions/simulation-safety.instructions.md delete mode 100644 .github/instructions/sphinx-docs.instructions.md delete mode 100644 .github/instructions/tests-python.instructions.md diff --git a/.github/agents/rocketpy-reviewer.agent.md b/.github/agents/rocketpy-reviewer.agent.md deleted file mode 100644 index be1b64b13..000000000 --- a/.github/agents/rocketpy-reviewer.agent.md +++ /dev/null @@ -1,62 +0,0 @@ ---- -description: "Physics-safe RocketPy code review agent. Use for pull request review, unit consistency checks, coordinate-frame validation, cached-property risk detection, and regression-focused test-gap analysis." -name: "RocketPy Reviewer" -tools: [read, search, execute] -argument-hint: "Review these changes for physics correctness and regression risk: " -user-invocable: true ---- -You are a RocketPy-focused reviewer for physics safety and regression risk. - -## Goals - -- Detect behavioral regressions and numerical/physics risks before merge. -- Validate unit consistency and coordinate/reference-frame correctness. -- Identify stale-cache risks when `@cached_property` interacts with mutable state. -- Check test coverage quality for changed behavior. -- Verify alignment with RocketPy workflow and contributor conventions. - -## Review Priorities - -1. Correctness and safety issues (highest severity). -2. Behavioral regressions and API compatibility. -3. Numerical stability and tolerance correctness. -4. Missing tests or weak assertions. -5. Documentation mismatches affecting users. -6. Workflow violations (test placement, branch/PR conventions, or missing validation evidence). - -## RocketPy-Specific Checks - -- SI units are explicit and consistent. -- Orientation conventions are unambiguous (`tail_to_nose`, `nozzle_to_combustion_chamber`, etc.). -- New/changed simulation logic does not silently invalidate cached values. -- Floating-point assertions use `pytest.approx` where needed. -- New fixtures are wired through `tests/conftest.py` when applicable. -- Test type is appropriate for scope (`unit`, `integration`, `acceptance`) and `all_info()`-style tests - are not misclassified. -- New behavior includes at least one regression-oriented test and relevant edge-case checks. -- For docs-affecting changes, references and paths remain valid and build warnings are addressed. -- Tooling recommendations match current repository setup (prefer Makefile plus `pyproject.toml` - settings when docs are outdated). - -## Validation Expectations - -- Prefer focused test runs first, then broader relevant suites. -- Recommend `make format` and `make lint` when style/lint risks are present. -- Recommend `make build-docs` when `.rst` files or API docs are changed. - -## Output Format - -Provide findings first, ordered by severity. -For each finding include: -- Severity: Critical, High, Medium, or Low -- Location: file path and line -- Why it matters: behavioral or physics risk -- Suggested fix: concrete, minimal change - -After findings, include: -- Open questions or assumptions -- Residual risks or testing gaps -- Brief change summary -- Suggested validation commands (only when useful) - -If no findings are identified, state that explicitly and still report residual risks/testing gaps. diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 382aa15e0..f5366cb3b 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -1,80 +1,221 @@ -# RocketPy Workspace Instructions - -## Code Style -- Use snake_case for variables, functions, methods, and modules. Use descriptive names. -- Use PascalCase for classes and UPPER_SNAKE_CASE for constants. -- Keep lines at 88 characters and follow PEP 8 unless existing code in the target file differs. -- Run Ruff as the source of truth for formatting/import organization: - - `make format` - - `make lint` -- Use NumPy-style docstrings for public classes, methods, and functions, including units. -- In case of tooling drift between docs and config, prefer current repository tooling in `Makefile` - and `pyproject.toml`. - -## Architecture -- RocketPy is a modular Python library; keep feature logic in the correct package boundary: - - `rocketpy/simulation`: flight simulation and Monte Carlo orchestration. - - `rocketpy/rocket`, `rocketpy/motors`, `rocketpy/environment`: domain models. - - `rocketpy/mathutils`: numerical primitives and interpolation utilities. - - `rocketpy/plots`, `rocketpy/prints`: output and visualization layers. -- Prefer extending existing classes/patterns over introducing new top-level abstractions. -- Preserve public API stability in `rocketpy/__init__.py` exports. - -## Build and Test -- Use Makefile targets for OS-agnostic workflows: - - `make install` - - `make pytest` - - `make pytest-slow` - - `make coverage` - - `make coverage-report` - - `make build-docs` -- Before finishing code changes, run focused tests first, then broader relevant suites. -- When running Python directly in this workspace, prefer `.venv/Scripts/python.exe`. -- Slow tests are explicitly marked with `@pytest.mark.slow` and are run with `make pytest-slow`. -- For docs changes, check `make build-docs` output and resolve warnings/errors when practical. - -## Development Workflow -- Target pull requests to `develop` by default; `master` is the stable branch. -- Use branch names in `type/description` format, such as: - - `bug/` - - `doc/` - - `enh/` - - `mnt/` - - `tst/` -- Prefer rebasing feature branches on top of `develop` to keep history linear. -- Keep commit and PR titles explicit and prefixed with project acronyms when possible: - - `BUG`, `DOC`, `ENH`, `MNT`, `TST`, `BLD`, `REL`, `REV`, `STY`, `DEV`. - -## Conventions -- SI units are the default. Document units and coordinate-system references explicitly. -- Position/reference-frame arguments are critical in this codebase. Be explicit about orientation - (for example, `tail_to_nose`, `nozzle_to_combustion_chamber`). -- Include unit tests for new behavior. Follow AAA structure and clear test names. -- Use fixtures from `tests/fixtures`; if adding a new fixture module, update `tests/conftest.py`. -- Use `pytest.approx` for floating-point checks where appropriate. -- Use `@cached_property` for expensive computations when helpful, and be careful with stale-cache - behavior when underlying mutable state changes. -- Keep behavior backward compatible across the public API exported via `rocketpy/__init__.py`. -- Prefer extending existing module patterns over creating new top-level package structure. - -## Testing Taxonomy -- Unit tests are mandatory for new behavior. -- Unit tests in RocketPy can be sociable (real collaborators allowed) but should still be fast and - method-focused. -- Treat tests as integration tests when they are strongly I/O-oriented or broad across many methods, - including `all_info()` convention cases. -- Acceptance tests represent realistic user/flight scenarios and may compare simulation thresholds to - known flight data. - -## Documentation Links -- Contributor workflow and setup: `docs/development/setting_up.rst` -- Style and naming details: `docs/development/style_guide.rst` -- Testing philosophy and structure: `docs/development/testing.rst` -- API reference conventions: `docs/reference/index.rst` -- Domain/physics background: `docs/technical/index.rst` - -## Scoped Customizations -- Simulation-specific rules: `.github/instructions/simulation-safety.instructions.md` -- Test-authoring rules: `.github/instructions/tests-python.instructions.md` -- RST/Sphinx documentation rules: `.github/instructions/sphinx-docs.instructions.md` -- Specialized review persona: `.github/agents/rocketpy-reviewer.agent.md` +# GitHub Copilot Instructions for RocketPy + +This file provides instructions for GitHub Copilot when working on the RocketPy codebase. +These guidelines help ensure consistency with the project's coding standards and development practices. + +## Project Overview + +RocketPy is a Python library for 6-DOF rocket trajectory simulation. +It's designed for high-power rocketry applications with focus on accuracy, performance, and ease of use. + +## Coding Standards + +### Naming Conventions +- **Use `snake_case` for all new code** - variables, functions, methods, and modules +- **Use descriptive names** - prefer `angle_of_attack` over `a` or `alpha` +- **Class names use PascalCase** - e.g., `SolidMotor`, `Environment`, `Flight` +- **Constants use UPPER_SNAKE_CASE** - e.g., `DEFAULT_GRAVITY`, `EARTH_RADIUS` + +### Code Style +- Follow **PEP 8** guidelines +- Line length: **88 characters** (Black's default) +- Organize imports with **isort** +- Our official formatter is the **ruff frmat** + +### Documentation +- **All public classes, methods, and functions must have docstrings** +- Use **NumPy style docstrings** +- Include **Parameters**, **Returns**, and **Examples** sections +- Document **units** for physical quantities (e.g., "in meters", "in radians") + +### Testing +- Write **unit tests** for all new features using pytest +- Follow **AAA pattern** (Arrange, Act, Assert) +- Use descriptive test names following: `test_methodname_expectedbehaviour` +- Include test docstrings explaining expected behavior +- Use **parameterization** for testing multiple scenarios +- Create pytest fixtures to avoid code repetition + +## Domain-Specific Guidelines + +### Physical Units and Conventions +- **SI units by default** - meters, kilograms, seconds, radians +- **Document coordinate systems** clearly (e.g., "tail_to_nose", "nozzle_to_combustion_chamber") +- **Position parameters** are critical - always document reference points +- Use **descriptive variable names** for physical quantities + +### Rocket Components +- **Motors**: SolidMotor, HybridMotor and LiquidMotor classes are children classes of the Motor class +- **Aerodynamic Surfaces**: They have Drag curves and lift coefficients +- **Parachutes**: Trigger functions, deployment conditions +- **Environment**: Atmospheric models, weather data, wind profiles + +### Mathematical Operations +- Use **numpy arrays** for vectorized operations (this improves performance) +- Prefer **scipy functions** for numerical integration and optimization +- **Handle edge cases** in calculations (division by zero, sqrt of negative numbers) +- **Validate input ranges** for physical parameters +- Monte Carlo simulations: sample from `numpy.random` for random number generation and creates several iterations to assess uncertainty in simulations. + +## File Structure and Organization + +### Source Code Organization + +Reminds that `rocketpy` is a Python package served as a library, and its source code is organized into several modules to facilitate maintainability and clarity. The following structure is recommended: + +``` +rocketpy/ +├── core/ # Core simulation classes +├── motors/ # Motor implementations +├── environment/ # Atmospheric and environmental models +├── plots/ # Plotting and visualization +├── tools/ # Utility functions +└── mathutils/ # Mathematical utilities +``` + +Please refer to popular Python packages like `scipy`, `numpy`, and `matplotlib` for inspiration on module organization. + +### Test Organization +``` +tests/ +├── unit/ # Unit tests +├── integration/ # Integration tests +├── acceptance/ # Acceptance tests +└── fixtures/ # Test fixtures organized by component +``` + +### Documentation Structure +``` +docs/ +├── user/ # User guides and tutorials +├── development/ # Development documentation +├── reference/ # API reference +├── examples/ # Flight examples and notebooks +└── technical/ # Technical documentation +``` + +## Common Patterns and Practices + +### Error Handling +- Use **descriptive error messages** with context +- **Validate inputs** at class initialization and method entry +- Raise **appropriate exception types** (ValueError, TypeError, etc.) +- Include **suggestions for fixes** in error messages + +### Performance Considerations +- Use **vectorized operations** where possible +- **Cache expensive computations** when appropriate (we frequently use `cached_property`) +- Keep in mind that RocketPy must be fast! + +### Backward Compatibility +- **Avoid breaking changes** in public APIs +- Use **deprecation warnings** before removing features +- **Document code changes** in docstrings and CHANGELOG + +## AI Assistant Guidelines + +### Code Generation +- **Always include docstrings** for new functions and classes +- **Follow existing patterns** in the codebase +- **Consider edge cases** and error conditions + +### Code Review and Suggestions +- **Check for consistency** with existing code style +- **Verify physical units** and coordinate systems +- **Ensure proper error handling** and input validation +- **Suggest performance improvements** when applicable +- **Recommend additional tests** for new functionality + +### Documentation Assistance +- **Use NumPy docstring format** consistently +- **Include practical examples** in docstrings +- **Document physical meanings** of parameters +- **Cross-reference related functions** and classes + +## Testing Guidelines + +### Unit Tests +- **Test individual methods** in isolation +- **Use fixtures** from the appropriate test fixture modules +- **Mock external dependencies** when necessary +- **Test both happy path and error conditions** + +### Integration Tests +- **Test interactions** between components +- **Verify end-to-end workflows** (Environment → Motor → Rocket → Flight) + +### Test Data +- **Use realistic parameters** for rocket simulations +- **Include edge cases** (very small/large rockets, extreme conditions) +- **Test with different coordinate systems** and orientations + +## Project-Specific Considerations + +### User Experience +- **Provide helpful error messages** with context and suggestions +- **Include examples** in docstrings and documentation +- **Support common use cases** with reasonable defaults + +## Examples of Good Practices + +### Function Definition +```python +def calculate_drag_force( + velocity, + air_density, + drag_coefficient, + reference_area +): + """Calculate drag force using the standard drag equation. + + Parameters + ---------- + velocity : float + Velocity magnitude in m/s. + air_density : float + Air density in kg/m³. + drag_coefficient : float + Dimensionless drag coefficient. + reference_area : float + Reference area in m². + + Returns + ------- + float + Drag force in N. + + Examples + -------- + >>> drag_force = calculate_drag_force(100, 1.225, 0.5, 0.01) + >>> print(f"Drag force: {drag_force:.2f} N") + """ + if velocity < 0: + raise ValueError("Velocity must be non-negative") + if air_density <= 0: + raise ValueError("Air density must be positive") + if reference_area <= 0: + raise ValueError("Reference area must be positive") + + return 0.5 * air_density * velocity**2 * drag_coefficient * reference_area +``` + +### Test Example +```python +def test_calculate_drag_force_returns_correct_value(): + """Test drag force calculation with known inputs.""" + # Arrange + velocity = 100.0 # m/s + air_density = 1.225 # kg/m³ + drag_coefficient = 0.5 + reference_area = 0.01 # m² + expected_force = 30.625 # N + + # Act + result = calculate_drag_force(velocity, air_density, drag_coefficient, reference_area) + + # Assert + assert abs(result - expected_force) < 1e-6 +``` + + +Remember: RocketPy prioritizes accuracy, performance, and usability. Always consider the physical meaning of calculations and provide clear, well-documented interfaces for users. diff --git a/.github/instructions/simulation-safety.instructions.md b/.github/instructions/simulation-safety.instructions.md deleted file mode 100644 index cc2af5d27..000000000 --- a/.github/instructions/simulation-safety.instructions.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -description: "Use when editing rocketpy/simulation code, including Flight state updates, Monte Carlo orchestration, post-processing, or cached computations. Covers simulation state safety, unit/reference-frame clarity, and regression checks." -name: "Simulation Safety" -applyTo: "rocketpy/simulation/**/*.py" ---- -# Simulation Safety Guidelines - -- Keep simulation logic inside `rocketpy/simulation` and avoid leaking domain behavior that belongs in - `rocketpy/rocket`, `rocketpy/motors`, or `rocketpy/environment`. -- Preserve public API behavior and exported names used by `rocketpy/__init__.py`. -- Prefer extending existing simulation components before creating new abstractions: - - `flight.py`: simulation state, integration flow, and post-processing. - - `monte_carlo.py`: orchestration and statistical execution workflows. - - `flight_data_exporter.py` and `flight_data_importer.py`: persistence and interchange. - - `flight_comparator.py`: comparative analysis outputs. -- Be explicit with physical units and reference frames in new parameters, attributes, and docstrings. -- For position/orientation-sensitive behavior, use explicit conventions (for example - `tail_to_nose`, `nozzle_to_combustion_chamber`) and avoid implicit assumptions. -- Treat state mutation carefully when cached values exist. -- If changes can invalidate `@cached_property` values, either avoid post-computation mutation or - explicitly invalidate affected caches in a controlled, documented way. -- Keep numerical behavior deterministic unless stochastic behavior is intentional and documented. -- For Monte Carlo and stochastic code paths, make randomness controllable and reproducible when tests - rely on it. -- Prefer vectorized NumPy operations for hot paths and avoid introducing Python loops in - performance-critical sections without justification. -- Guard against numerical edge cases (zero/near-zero denominators, interpolation limits, and boundary - conditions). -- Do not change default numerical tolerances or integration behavior without documenting motivation and - validating regression impact. -- Add focused regression tests for changed behavior, including edge cases and orientation-dependent - behavior. -- For floating-point expectations, use `pytest.approx` with meaningful tolerances. -- Run focused tests first, then broader relevant tests (`make pytest` and `make pytest-slow` when - applicable). - -See: -- `docs/development/testing.rst` -- `docs/development/style_guide.rst` -- `docs/development/setting_up.rst` -- `docs/technical/index.rst` diff --git a/.github/instructions/sphinx-docs.instructions.md b/.github/instructions/sphinx-docs.instructions.md deleted file mode 100644 index 8c24cac53..000000000 --- a/.github/instructions/sphinx-docs.instructions.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -description: "Use when writing or editing docs/**/*.rst. Covers Sphinx/reStructuredText conventions, cross-references, toctree hygiene, and RocketPy unit/reference-frame documentation requirements." -name: "Sphinx RST Conventions" -applyTo: "docs/**/*.rst" ---- -# Sphinx and RST Guidelines - -- Follow existing heading hierarchy and style in the target document. -- Prefer linking to existing documentation pages instead of duplicating content. -- Use Sphinx cross-references where appropriate (`:class:`, `:func:`, `:mod:`, `:doc:`, `:ref:`). -- Keep API names and module paths consistent with current code exports. -- Document physical units and coordinate/reference-frame conventions explicitly. -- Include concise, practical examples when introducing new user-facing behavior. -- Keep prose clear and technical; avoid marketing language in development/reference docs. -- When adding a new page, update the relevant `toctree` so it appears in navigation. -- Use RocketPy docs build workflow: - - `make build-docs` from repository root for normal validation. - - If stale artifacts appear, clean docs build outputs via `cd docs && make clean`, then rebuild. -- Treat new Sphinx warnings/errors as issues to fix or explicitly call out in review notes. -- Keep `docs/index.rst` section structure coherent with user, development, reference, technical, and - examples navigation. -- Do not edit Sphinx-generated scaffolding files unless explicitly requested: - - `docs/Makefile` - - `docs/make.bat` -- For API docs, ensure references remain aligned with exported/public objects and current module paths. - -See: -- `docs/index.rst` -- `docs/development/build_docs.rst` -- `docs/development/style_guide.rst` -- `docs/reference/index.rst` -- `docs/technical/index.rst` diff --git a/.github/instructions/tests-python.instructions.md b/.github/instructions/tests-python.instructions.md deleted file mode 100644 index 1e9626142..000000000 --- a/.github/instructions/tests-python.instructions.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -description: "Use when creating or editing pytest files in tests/. Enforces AAA structure, naming conventions, fixture usage, parameterization, slow-test marking, and numerical assertion practices for RocketPy." -name: "RocketPy Pytest Standards" -applyTo: "tests/**/*.py" ---- -# RocketPy Test Authoring Guidelines - -- Unit tests are mandatory for new behavior. -- Follow AAA structure in each test: Arrange, Act, Assert. -- Use descriptive test names matching project convention: - - `test_methodname` - - `test_methodname_stateundertest` - - `test_methodname_expectedbehaviour` -- Include docstrings that clearly state expected behavior and context. -- Prefer parameterization for scenario matrices instead of duplicated tests. -- Classify tests correctly: - - `tests/unit`: fast, method-focused tests (sociable unit tests are acceptable in RocketPy). - - `tests/integration`: broad multi-method/component interactions and strongly I/O-oriented cases. - - `tests/acceptance`: realistic end-user/flight scenarios with threshold-based expectations. -- By RocketPy convention, tests centered on `all_info()` behavior are integration tests. -- Reuse fixtures from `tests/fixtures` whenever possible. -- Keep fixture organization aligned with existing categories under `tests/fixtures` - (environment, flight, motor, rockets, surfaces, units, etc.). -- If you add a new fixture module, update `tests/conftest.py` so fixtures are discoverable. -- Keep tests deterministic: set seeds when randomness is involved and avoid unstable external - dependencies unless integration behavior explicitly requires them. -- Use `pytest.approx` for floating-point comparisons with realistic tolerances. -- Mark expensive tests with `@pytest.mark.slow` and ensure they can run under the project slow-test - workflow. -- Include at least one negative or edge-case assertion for new behaviors. -- When adding a bug fix, include a regression test that fails before the fix and passes after it. - -See: -- `docs/development/testing.rst` -- `docs/development/style_guide.rst` -- `docs/development/setting_up.rst` diff --git a/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb b/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb index 8181c03ba..2fb46fa86 100644 --- a/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb +++ b/docs/notebooks/monte_carlo_analysis/monte_carlo_class_usage.ipynb @@ -800,28 +800,6 @@ ")" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Alternatively, we can target an attribute using the method `MonteCarlo.simulate_convergence()` such that when the tolerance is met, the flight simulations would terminate early." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "test_dispersion.simulate_convergence(\n", - " target_attribute=\"apogee_time\",\n", - " target_confidence=0.95,\n", - " tolerance=0.5, # in seconds\n", - " max_simulations=1000,\n", - " batch_size=50,\n", - ")" - ] - }, { "attachments": {}, "cell_type": "markdown", diff --git a/rocketpy/simulation/monte_carlo.py b/rocketpy/simulation/monte_carlo.py index 42a566b7b..e10789a7d 100644 --- a/rocketpy/simulation/monte_carlo.py +++ b/rocketpy/simulation/monte_carlo.py @@ -525,73 +525,6 @@ def estimate_confidence_interval( return res.confidence_interval - def simulate_convergence( - self, - target_attribute="apogee_time", - target_confidence=0.95, - tolerance=0.5, - max_simulations=1000, - batch_size=50, - parallel=False, - n_workers=None, - ): - """Run Monte Carlo simulations in batches until the confidence interval - width converges within the specified tolerance or the maximum number of - simulations is reached. - - Parameters - ---------- - target_attribute : str - The target attribute to track its convergence (e.g., "apogee", "apogee_time", etc.). - target_confidence : float, optional - The confidence level for the interval (between 0 and 1). Default is 0.95. - tolerance : float, optional - The desired width of the confidence interval in seconds, meters, or other units. Default is 0.5. - max_simulations : int, optional - The maximum number of simulations to run to avoid infinite loops. Default is 1000. - batch_size : int, optional - The number of simulations to run in each batch. Default is 50. - parallel : bool, optional - Whether to run simulations in parallel. Default is False. - n_workers : int, optional - The number of worker processes to use if running in parallel. Default is None. - - Returns - ------- - confidence_interval_history : list of float - History of confidence interval widths, one value per batch of simulations. - The last element corresponds to the width when the simulation stopped for - either meeting the tolerance or reaching the maximum number of simulations. - """ - - self.import_outputs(self.filename.with_suffix(".outputs.txt")) - confidence_interval_history = [] - - while self.num_of_loaded_sims < max_simulations: - total_sims = min(self.num_of_loaded_sims + batch_size, max_simulations) - - self.simulate( - number_of_simulations=total_sims, - append=True, - include_function_data=False, - parallel=parallel, - n_workers=n_workers, - ) - - self.import_outputs(self.filename.with_suffix(".outputs.txt")) - - ci = self.estimate_confidence_interval( - attribute=target_attribute, - confidence_level=target_confidence, - ) - - confidence_interval_history.append(float(ci.high - ci.low)) - - if float(ci.high - ci.low) <= tolerance: - break - - return confidence_interval_history - def __evaluate_flight_inputs(self, sim_idx): """Evaluates the inputs of a single flight simulation. diff --git a/tests/integration/simulation/test_monte_carlo.py b/tests/integration/simulation/test_monte_carlo.py index 98af2431d..4b1b82392 100644 --- a/tests/integration/simulation/test_monte_carlo.py +++ b/tests/integration/simulation/test_monte_carlo.py @@ -236,30 +236,3 @@ def invalid_data_collector(flight): monte_carlo_calisto.simulate(number_of_simulations=10, append=False) finally: _post_test_file_cleanup() - - -@pytest.mark.slow -def test_monte_carlo_simulate_convergence(monte_carlo_calisto): - """Tests the simulate_convergence method of the MonteCarlo class. - - Parameters - ---------- - monte_carlo_calisto : MonteCarlo - The MonteCarlo object, this is a pytest fixture. - """ - try: - ci_history = monte_carlo_calisto.simulate_convergence( - target_attribute="apogee", - target_confidence=0.95, - tolerance=5.0, - max_simulations=20, - batch_size=5, - parallel=False, - ) - - assert isinstance(ci_history, list) - assert all(isinstance(width, float) for width in ci_history) - assert len(ci_history) >= 1 - assert monte_carlo_calisto.num_of_loaded_sims <= 20 - finally: - _post_test_file_cleanup() From badd7e2b1e25db328c15d077bc422e0c3aa112f6 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Fri, 3 Apr 2026 23:03:54 -0300 Subject: [PATCH 43/44] MNT: Refactor longitude and latitude index functions --- rocketpy/environment/tools.py | 197 ++++++++++++++++++++++------------ 1 file changed, 127 insertions(+), 70 deletions(-) diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index 4ce986d02..559a63550 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -225,72 +225,148 @@ def mask_and_clean_dataset(*args): return data_array -def find_longitude_index(longitude, lon_list): # pylint: disable=too-many-statements - """Finds the index of the given longitude in a list of longitudes. +def _normalize_longitude_value(longitude, lon_start, lon_end): + """Normalize longitude based on grid format [-180, 180] or [0, 360]. Parameters ---------- longitude : float - The longitude to find in the list. - lon_list : list of float - The list of longitudes. + The longitude to normalize. + lon_start : float + The first longitude value in the grid. + lon_end : float + The last longitude value in the grid. Returns ------- - tuple - A tuple containing the adjusted longitude and its index in the list. - - Raises - ------ - ValueError - If the longitude is not within the range covered by the list. + float + The normalized longitude value. """ - - def _coord_value(source, index): - return float(source[index]) - - lon_len = len(lon_list) - lon_start = _coord_value(lon_list, 0) - lon_end = _coord_value(lon_list, lon_len - 1) - # Determine if file uses geographic longitudes in [-180, 180] or [0, 360]. # Do not remap projected x coordinates. is_geographic_longitude = abs(lon_start) <= 360 and abs(lon_end) <= 360 if is_geographic_longitude: if lon_start < 0 or lon_end < 0: - lon = longitude if longitude < 180 else -180 + longitude % 180 - else: - lon = longitude % 360 - else: - lon = longitude + return longitude if longitude < 180 else -180 + longitude % 180 + return longitude % 360 + return longitude - is_ascending = lon_start < lon_end - # Binary search to find the insertion index such that index-1 and index - # bracket the requested longitude. +def _binary_search_coordinate_index(target_value, coord_list, is_ascending): + """Find insertion index for target value using binary search. + + Parameters + ---------- + target_value : float + The coordinate value to locate. + coord_list : list of float + The list of coordinate values. + is_ascending : bool + Whether the coordinate list is in ascending order. + + Returns + ------- + int + The insertion index such that coord_list[index-1] and coord_list[index] + bracket the target value. + """ low = 0 - high = lon_len + high = len(coord_list) while low < high: mid = (low + high) // 2 - mid_value = _coord_value(lon_list, mid) - if (mid_value < lon) if is_ascending else (mid_value > lon): + mid_value = float(coord_list[mid]) + if (mid_value < target_value) if is_ascending else (mid_value > target_value): low = mid + 1 else: high = mid - lon_index = low - - # Take care of longitude value equal to minimum/maximum longitude in the grid - if lon_index == 0 and math.isclose(_coord_value(lon_list, 0), lon): - lon_index = 1 - if lon_index == lon_len and _coord_value(lon_list, lon_index - 1) == lon: - lon_index -= 1 - # Check if longitude value is inside the grid - if lon_index in (0, lon_len): + return low + + +def _adjust_boundary_coordinate_index(index, coord_list, coord_value): + """Adjust index for exact matches at grid boundaries. + + Parameters + ---------- + index : int + The current index from binary search. + coord_list : list of float + The list of coordinate values. + coord_value : float + The coordinate value being matched. + + Returns + ------- + int + The adjusted index after boundary handling. + """ + coord_len = len(coord_list) + if index == 0 and math.isclose(float(coord_list[0]), coord_value): + return 1 + if index == coord_len and float(coord_list[coord_len - 1]) == coord_value: + return index - 1 + return index + + +def _validate_coordinate_index_in_range(index, coord_len, coord_start, coord_end, coord_name): + """Validate that coordinate index is within valid interpolation range. + + Parameters + ---------- + index : int + The coordinate index to validate. + coord_len : int + The length of the coordinate list. + coord_start : float + The first coordinate value in the grid. + coord_end : float + The last coordinate value in the grid. + coord_name : str + The name of the coordinate (e.g., "Longitude", "Latitude"). + + Raises + ------ + ValueError + If the index is out of valid range (0 or coord_len). + """ + if index in (0, coord_len): raise ValueError( - f"Longitude {lon} not inside region covered by file, which is " - f"from {lon_start} to {lon_end}." + f"{coord_name} not inside region covered by file, which is " + f"from {coord_start} to {coord_end}." ) + +def find_longitude_index(longitude, lon_list): + """Finds the index of the given longitude in a list of longitudes. + + Parameters + ---------- + longitude : float + The longitude to find in the list. + lon_list : list of float + The list of longitudes. + + Returns + ------- + tuple + A tuple containing the adjusted longitude and its index in the list. + + Raises + ------ + ValueError + If the longitude is not within the range covered by the list. + """ + lon_len = len(lon_list) + lon_start = float(lon_list[0]) + lon_end = float(lon_list[lon_len - 1]) + + lon = _normalize_longitude_value(longitude, lon_start, lon_end) + is_ascending = lon_start < lon_end + + lon_index = _binary_search_coordinate_index(lon, lon_list, is_ascending) + lon_index = _adjust_boundary_coordinate_index(lon_index, lon_list, lon) + + _validate_coordinate_index_in_range(lon_index, lon_len, lon_start, lon_end, "Longitude") + return lon, lon_index @@ -314,37 +390,18 @@ def find_latitude_index(latitude, lat_list): ValueError If the latitude is not within the range covered by the list. """ - - def _coord_value(source, index): - return float(source[index]) - lat_len = len(lat_list) - lat_start = _coord_value(lat_list, 0) - lat_end = _coord_value(lat_list, lat_len - 1) + lat_start = float(lat_list[0]) + lat_end = float(lat_list[lat_len - 1]) is_ascending = lat_start < lat_end - low = 0 - high = lat_len - while low < high: - mid = (low + high) // 2 - mid_value = _coord_value(lat_list, mid) - if (mid_value < latitude) if is_ascending else (mid_value > latitude): - low = mid + 1 - else: - high = mid - lat_index = low - - # Take care of latitude value equal to minimum/maximum latitude in the grid - if lat_index == 0 and math.isclose(_coord_value(lat_list, 0), latitude): - lat_index = 1 - if lat_index == lat_len and _coord_value(lat_list, lat_index - 1) == latitude: - lat_index -= 1 - # Check if latitude value is inside the grid - if lat_index in (0, lat_len): - raise ValueError( - f"Latitude {latitude} not inside region covered by file, " - f"which is from {lat_start} to {lat_end}." - ) + lat_index = _binary_search_coordinate_index(latitude, lat_list, is_ascending) + lat_index = _adjust_boundary_coordinate_index(lat_index, lat_list, latitude) + + _validate_coordinate_index_in_range( + lat_index, lat_len, lat_start, lat_end, "Latitude" + ) + return latitude, lat_index From 78256b87f6d9c145debd580271653b48242fad93 Mon Sep 17 00:00:00 2001 From: MateusStano Date: Sat, 4 Apr 2026 11:48:40 -0300 Subject: [PATCH 44/44] MNT: ruff --- rocketpy/environment/tools.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/rocketpy/environment/tools.py b/rocketpy/environment/tools.py index 559a63550..4a06ef2c4 100644 --- a/rocketpy/environment/tools.py +++ b/rocketpy/environment/tools.py @@ -307,7 +307,9 @@ def _adjust_boundary_coordinate_index(index, coord_list, coord_value): return index -def _validate_coordinate_index_in_range(index, coord_len, coord_start, coord_end, coord_name): +def _validate_coordinate_index_in_range( + index, coord_len, coord_start, coord_end, coord_name +): """Validate that coordinate index is within valid interpolation range. Parameters @@ -365,7 +367,9 @@ def find_longitude_index(longitude, lon_list): lon_index = _binary_search_coordinate_index(lon, lon_list, is_ascending) lon_index = _adjust_boundary_coordinate_index(lon_index, lon_list, lon) - _validate_coordinate_index_in_range(lon_index, lon_len, lon_start, lon_end, "Longitude") + _validate_coordinate_index_in_range( + lon_index, lon_len, lon_start, lon_end, "Longitude" + ) return lon, lon_index