From de107ac216f91aeeb36d9f3c53d792362c8641bd Mon Sep 17 00:00:00 2001 From: Patryk Gala Date: Wed, 27 Nov 2024 20:56:25 +0100 Subject: [PATCH] chore: Remove code and deps --- .github/workflows/e2e.yml | 172 - .github/workflows/integrations.yml | 797 ----- .github/workflows/unit-in-pull-request.yml | 31 - .github/workflows/unit.yml | 65 - poetry.lock | 1092 +------ pyproject.toml | 221 -- src/neptune/__init__.py | 108 - src/neptune/api/__init__.py | 15 - src/neptune/api/fetching_series_values.py | 60 - src/neptune/api/field_visitor.py | 91 - src/neptune/api/models.py | 865 ----- src/neptune/api/pagination.py | 78 - src/neptune/api/proto/__init__.py | 15 - src/neptune/api/proto/neptune_pb/__init__.py | 15 - .../api/proto/neptune_pb/api/__init__.py | 15 - .../proto/neptune_pb/api/model/__init__.py | 15 - .../neptune_pb/api/model/attributes_pb2.py | 36 - .../neptune_pb/api/model/attributes_pb2.pyi | 119 - .../api/model/leaderboard_entries_pb2.py | 47 - .../api/model/leaderboard_entries_pb2.pyi | 345 -- .../neptune_pb/api/model/series_values_pb2.py | 29 - .../api/model/series_values_pb2.pyi | 57 - src/neptune/api/requests_utils.py | 34 - src/neptune/api/searching_entries.py | 248 -- src/neptune/attributes/__init__.py | 56 - src/neptune/attributes/atoms/__init__.py | 27 - src/neptune/attributes/atoms/artifact.py | 84 - src/neptune/attributes/atoms/atom.py | 22 - src/neptune/attributes/atoms/boolean.py | 49 - src/neptune/attributes/atoms/copiable_atom.py | 63 - src/neptune/attributes/atoms/datetime.py | 54 - src/neptune/attributes/atoms/file.py | 58 - src/neptune/attributes/atoms/float.py | 65 - src/neptune/attributes/atoms/git_ref.py | 22 - src/neptune/attributes/atoms/integer.py | 68 - src/neptune/attributes/atoms/notebook_ref.py | 23 - src/neptune/attributes/atoms/run_state.py | 22 - src/neptune/attributes/atoms/string.py | 75 - src/neptune/attributes/attribute.py | 66 - src/neptune/attributes/constants.py | 72 - src/neptune/attributes/file_set.py | 88 - src/neptune/attributes/namespace.py | 137 - src/neptune/attributes/series/__init__.py | 24 - .../attributes/series/fetchable_series.py | 77 - src/neptune/attributes/series/file_series.py | 133 - src/neptune/attributes/series/float_series.py | 85 - src/neptune/attributes/series/series.py | 167 - .../attributes/series/string_series.py | 109 - src/neptune/attributes/sets/__init__.py | 20 - src/neptune/attributes/sets/set.py | 22 - src/neptune/attributes/sets/string_set.py | 76 - src/neptune/attributes/utils.py | 78 - src/neptune/cli/__init__.py | 22 - src/neptune/cli/__main__.py | 48 - src/neptune/cli/clear.py | 91 - src/neptune/cli/collect.py | 142 - src/neptune/cli/commands.py | 169 - src/neptune/cli/containers.py | 322 -- src/neptune/cli/path_option.py | 46 - src/neptune/cli/status.py | 103 - src/neptune/cli/sync.py | 155 - src/neptune/cli/utils.py | 144 - src/neptune/constants.py | 41 - src/neptune/core/__init__.py | 15 - src/neptune/core/components/__init__.py | 15 - src/neptune/core/components/abstract.py | 77 - src/neptune/core/components/metadata_file.py | 76 - .../core/components/operation_storage.py | 43 - src/neptune/core/components/queue/__init__.py | 15 - .../core/components/queue/disk_queue.py | 265 -- .../components/queue/json_file_splitter.py | 118 - src/neptune/core/components/queue/log_file.py | 75 - .../core/components/queue/sync_offset_file.py | 64 - src/neptune/envs.py | 81 - src/neptune/exceptions.py | 1179 ------- src/neptune/handler.py | 801 ----- src/neptune/integrations/__init__.py | 15 - src/neptune/integrations/aws/__init__.py | 20 - .../integrations/detectron2/__init__.py | 20 - src/neptune/integrations/fastai/__init__.py | 20 - src/neptune/integrations/kedro/__init__.py | 20 - src/neptune/integrations/lightgbm/__init__.py | 20 - src/neptune/integrations/mosaicml/__init__.py | 22 - src/neptune/integrations/optuna/__init__.py | 20 - src/neptune/integrations/pandas/__init__.py | 133 - src/neptune/integrations/prophet/__init__.py | 20 - src/neptune/integrations/python_logger.py | 79 - src/neptune/integrations/pytorch/__init__.py | 20 - .../pytorch_lightning/__init__.py | 23 - src/neptune/integrations/sacred/__init__.py | 20 - src/neptune/integrations/sklearn/__init__.py | 20 - .../integrations/tensorboard/__init__.py | 20 - .../integrations/tensorflow_keras/__init__.py | 20 - .../integrations/transformers/__init__.py | 22 - src/neptune/integrations/utils.py | 25 - src/neptune/integrations/xgboost/__init__.py | 20 - src/neptune/internal/__init__.py | 15 - src/neptune/internal/artifacts/__init__.py | 24 - .../internal/artifacts/drivers/__init__.py | 19 - .../internal/artifacts/drivers/local.py | 116 - src/neptune/internal/artifacts/drivers/s3.py | 132 - src/neptune/internal/artifacts/file_hasher.py | 112 - .../artifacts/local_file_hash_storage.py | 68 - src/neptune/internal/artifacts/types.py | 113 - src/neptune/internal/artifacts/utils.py | 33 - src/neptune/internal/backends/__init__.py | 15 - src/neptune/internal/backends/api_model.py | 182 -- src/neptune/internal/backends/factory.py | 41 - .../backends/hosted_artifact_operations.py | 300 -- .../internal/backends/hosted_client.py | 212 -- .../backends/hosted_file_operations.py | 534 ---- .../backends/hosted_neptune_backend.py | 1357 -------- .../internal/backends/neptune_backend.py | 361 --- .../internal/backends/neptune_backend_mock.py | 836 ----- src/neptune/internal/backends/nql.py | 122 - .../backends/offline_neptune_backend.py | 200 -- .../backends/operation_api_name_visitor.py | 126 - .../operation_api_object_converter.py | 157 - .../backends/operations_preprocessor.py | 371 --- .../internal/backends/project_name_lookup.py | 46 - .../backends/swagger_client_wrapper.py | 145 - src/neptune/internal/backends/utils.py | 483 --- src/neptune/internal/backgroud_job_list.py | 55 - src/neptune/internal/background_job.py | 47 - src/neptune/internal/constants.py | 23 - src/neptune/internal/container_structure.py | 127 - src/neptune/internal/container_type.py | 43 - src/neptune/internal/credentials.py | 70 - src/neptune/internal/envs.py | 28 - src/neptune/internal/exceptions.py | 430 --- src/neptune/internal/extensions.py | 53 - src/neptune/internal/hardware/__init__.py | 15 - .../internal/hardware/cgroup/__init__.py | 15 - .../cgroup/cgroup_filesystem_reader.py | 69 - .../hardware/cgroup/cgroup_monitor.py | 76 - src/neptune/internal/hardware/constants.py | 19 - .../internal/hardware/gauges/__init__.py | 15 - src/neptune/internal/hardware/gauges/cpu.py | 53 - src/neptune/internal/hardware/gauges/gauge.py | 38 - .../internal/hardware/gauges/gauge_factory.py | 60 - .../internal/hardware/gauges/gauge_mode.py | 20 - src/neptune/internal/hardware/gauges/gpu.py | 54 - .../internal/hardware/gauges/memory.py | 55 - src/neptune/internal/hardware/gpu/__init__.py | 15 - .../internal/hardware/gpu/gpu_monitor.py | 73 - .../hardware/hardware_metric_reporting_job.py | 122 - .../internal/hardware/metrics/__init__.py | 15 - .../internal/hardware/metrics/metric.py | 99 - .../hardware/metrics/metrics_container.py | 35 - .../hardware/metrics/metrics_factory.py | 92 - .../hardware/metrics/reports/__init__.py | 15 - .../hardware/metrics/reports/metric_report.py | 21 - .../metrics/reports/metric_reporter.py | 51 - .../reports/metric_reporter_factory.py | 24 - .../hardware/metrics/service/__init__.py | 15 - .../metrics/service/metric_service.py | 27 - .../metrics/service/metric_service_factory.py | 51 - .../internal/hardware/resources/__init__.py | 15 - .../resources/gpu_card_indices_provider.py | 49 - .../resources/system_resource_info.py | 55 - .../resources/system_resource_info_factory.py | 56 - .../internal/hardware/system/__init__.py | 15 - .../hardware/system/system_monitor.py | 36 - src/neptune/internal/id_formats.py | 29 - src/neptune/internal/init/__init__.py | 15 - src/neptune/internal/init/parameters.py | 34 - src/neptune/internal/notebooks/__init__.py | 15 - src/neptune/internal/notebooks/comm.py | 55 - src/neptune/internal/notebooks/notebooks.py | 52 - src/neptune/internal/oauth.py | 136 - src/neptune/internal/operation.py | 605 ---- .../internal/operation_processors/__init__.py | 15 - .../async_operation_processor.py | 359 --- .../internal/operation_processors/factory.py | 88 - .../lazy_operation_processor_wrapper.py | 125 - .../offline_operation_processor.py | 86 - .../operation_processors/operation_logger.py | 197 -- .../operation_processor.py | 56 - .../read_only_operation_processor.py | 32 - .../sync_operation_processor.py | 94 - .../internal/operation_processors/utils.py | 75 - src/neptune/internal/operation_visitor.py | 153 - src/neptune/internal/patches/__init__.py | 27 - src/neptune/internal/patches/bravado.py | 80 - .../internal/signals_processing/__init__.py | 15 - .../signals_processing/background_job.py | 79 - .../internal/signals_processing/signals.py | 64 - .../signals_processing/signals_processor.py | 127 - .../internal/signals_processing/utils.py | 53 - src/neptune/internal/state.py | 25 - src/neptune/internal/storage/__init__.py | 38 - src/neptune/internal/storage/datastream.py | 90 - src/neptune/internal/storage/storage_utils.py | 269 -- src/neptune/internal/streams/__init__.py | 15 - .../streams/std_capture_background_job.py | 72 - .../streams/std_stream_capture_logger.py | 93 - src/neptune/internal/threading/__init__.py | 15 - src/neptune/internal/threading/daemon.py | 147 - src/neptune/internal/types/__init__.py | 15 - src/neptune/internal/types/file_types.py | 168 - src/neptune/internal/types/stringify_value.py | 82 - src/neptune/internal/types/utils.py | 25 - src/neptune/internal/utils/__init__.py | 205 -- .../internal/utils/dependency_tracking.py | 85 - src/neptune/internal/utils/deprecation.py | 63 - .../internal/utils/disk_utilization.py | 173 - .../utils/generic_attribute_mapper.py | 67 - src/neptune/internal/utils/git.py | 217 -- src/neptune/internal/utils/git_info.py | 70 - src/neptune/internal/utils/hashing.py | 26 - src/neptune/internal/utils/images.py | 349 -- src/neptune/internal/utils/iso_dates.py | 36 - src/neptune/internal/utils/iteration.py | 43 - src/neptune/internal/utils/limits.py | 50 - src/neptune/internal/utils/logger.py | 95 - src/neptune/internal/utils/paths.py | 34 - src/neptune/internal/utils/patterns.py | 18 - .../internal/utils/ping_background_job.py | 72 - src/neptune/internal/utils/process_killer.py | 72 - .../internal/utils/requirement_check.py | 37 - src/neptune/internal/utils/run_state.py | 50 - src/neptune/internal/utils/runningmode.py | 36 - src/neptune/internal/utils/s3.py | 39 - src/neptune/internal/utils/source_code.py | 63 - src/neptune/internal/utils/traceback_job.py | 66 - .../utils/uncaught_exception_handler.py | 79 - src/neptune/internal/utils/utils.py | 270 -- .../internal/value_to_attribute_visitor.py | 109 - src/neptune/internal/warnings.py | 108 - src/neptune/internal/websockets/__init__.py | 15 - .../websockets/reconnecting_websocket.py | 114 - .../websockets/websocket_client_adapter.py | 89 - .../websocket_signals_background_job.py | 138 - .../internal/websockets/websockets_factory.py | 38 - src/neptune/management/__init__.py | 152 - src/neptune/management/exceptions.py | 212 -- src/neptune/management/internal/__init__.py | 15 - src/neptune/management/internal/api.py | 1099 ------- src/neptune/management/internal/dto.py | 91 - src/neptune/management/internal/types.py | 46 - src/neptune/management/internal/utils.py | 63 - src/neptune/objects/__init__.py | 28 - src/neptune/objects/abstract.py | 62 - src/neptune/objects/model.py | 357 --- src/neptune/objects/model_version.py | 299 -- src/neptune/objects/neptune_object.py | 695 ---- src/neptune/objects/project.py | 422 --- src/neptune/objects/run.py | 593 ---- src/neptune/objects/structure_version.py | 50 - src/neptune/objects/utils.py | 142 - src/neptune/table.py | 172 - src/neptune/types/__init__.py | 49 - src/neptune/types/atoms/__init__.py | 25 - src/neptune/types/atoms/artifact.py | 56 - src/neptune/types/atoms/atom.py | 35 - src/neptune/types/atoms/boolean.py | 48 - src/neptune/types/atoms/datetime.py | 50 - src/neptune/types/atoms/file.py | 325 -- src/neptune/types/atoms/float.py | 48 - src/neptune/types/atoms/git_ref.py | 64 - src/neptune/types/atoms/integer.py | 48 - src/neptune/types/atoms/string.py | 53 - src/neptune/types/file_set.py | 53 - src/neptune/types/mode.py | 29 - src/neptune/types/model_version_stage.py | 25 - src/neptune/types/namespace.py | 56 - src/neptune/types/series/__init__.py | 25 - src/neptune/types/series/file_series.py | 84 - src/neptune/types/series/float_series.py | 131 - src/neptune/types/series/series.py | 53 - src/neptune/types/series/series_value.py | 54 - src/neptune/types/series/string_series.py | 93 - src/neptune/types/sets/__init__.py | 18 - src/neptune/types/sets/set.py | 35 - src/neptune/types/sets/string_set.py | 40 - src/neptune/types/type_casting.py | 118 - src/neptune/types/value.py | 33 - src/neptune/types/value_copy.py | 51 - src/neptune/types/value_visitor.py | 110 - src/neptune/typing.py | 87 - src/neptune/utils.py | 138 - src/neptune/vendor/__init__.py | 0 src/neptune/vendor/lib_programname.py | 177 -- src/neptune/vendor/pynvml.py | 2312 -------------- src/neptune/version.py | 49 - tests/__init__.py | 0 tests/e2e/__init__.py | 0 tests/e2e/base.py | 93 - tests/e2e/conftest.py | 147 - tests/e2e/exceptions.py | 26 - tests/e2e/integrations/__init__.py | 0 tests/e2e/integrations/test_huggingface.py | 585 ---- tests/e2e/integrations/test_lightning.py | 168 - tests/e2e/integrations/test_mosaicml.py | 104 - tests/e2e/integrations/test_zenml.py | 128 - tests/e2e/management/__init__.py | 0 tests/e2e/management/test_management.py | 550 ---- tests/e2e/plot_utils.py | 133 - tests/e2e/pytest.ini | 9 - tests/e2e/standard/__init__.py | 0 tests/e2e/standard/test_artifacts.py | 233 -- tests/e2e/standard/test_base.py | 177 -- tests/e2e/standard/test_cli.py | 276 -- tests/e2e/standard/test_copy.py | 61 - tests/e2e/standard/test_fetch_tables.py | 449 --- tests/e2e/standard/test_files.py | 559 ---- tests/e2e/standard/test_init.py | 230 -- tests/e2e/standard/test_multiple.py | 129 - tests/e2e/standard/test_multiprocessing.py | 41 - tests/e2e/standard/test_series.py | 164 - tests/e2e/standard/test_stage_transitions.py | 82 - tests/e2e/utils.py | 235 -- tests/unit/__init__.py | 15 - .../dir_to_link/file_in_linked_dir.txt | 1 - .../file_to_link.txt | 1 - .../files_to_track/file1.txt | 1 - .../files_to_track/sub_dir/file_in_subdir.txt | 1 - tests/unit/neptune/__init__.py | 0 tests/unit/neptune/backend_test_mixin.py | 55 - tests/unit/neptune/management/__init__.py | 15 - .../neptune/management/internal/__init__.py | 15 - .../neptune/management/internal/test_api.py | 131 - .../neptune/management/internal/test_utils.py | 42 - tests/unit/neptune/new/__init__.py | 0 tests/unit/neptune/new/api/__init__.py | 15 - .../new/api/test_fetching_series_values.py | 160 - tests/unit/neptune/new/api/test_models.py | 2808 ----------------- tests/unit/neptune/new/api/test_pagination.py | 152 - .../neptune/new/api/test_requests_utils.py | 58 - .../neptune/new/api/test_searching_entries.py | 284 -- tests/unit/neptune/new/attributes/__init__.py | 15 - .../neptune/new/attributes/atoms/__init__.py | 15 - .../new/attributes/atoms/test_artifact.py | 187 -- .../attributes/atoms/test_artifact_hash.py | 59 - .../new/attributes/atoms/test_datetime.py | 68 - .../neptune/new/attributes/atoms/test_file.py | 212 -- .../new/attributes/atoms/test_float.py | 84 - .../new/attributes/atoms/test_string.py | 54 - .../neptune/new/attributes/series/__init__.py | 15 - .../new/attributes/series/test_file_series.py | 252 -- .../attributes/series/test_float_series.py | 87 - .../new/attributes/series/test_series.py | 196 -- .../attributes/series/test_string_series.py | 51 - .../neptune/new/attributes/sets/__init__.py | 15 - .../new/attributes/sets/test_file_set.py | 95 - .../new/attributes/sets/test_string_set.py | 156 - .../new/attributes/test_attribute_base.py | 50 - .../new/attributes/test_attribute_utils.py | 32 - tests/unit/neptune/new/cli/__init__.py | 15 - tests/unit/neptune/new/cli/test_clear.py | 245 -- tests/unit/neptune/new/cli/test_status.py | 199 -- tests/unit/neptune/new/cli/test_sync.py | 428 --- tests/unit/neptune/new/cli/test_utils.py | 68 - tests/unit/neptune/new/cli/utils.py | 127 - tests/unit/neptune/new/client/__init__.py | 15 - .../client/abstract_experiment_test_mixin.py | 160 - .../new/client/abstract_tables_test.py | 230 -- tests/unit/neptune/new/client/test_model.py | 120 - .../neptune/new/client/test_model_tables.py | 36 - .../neptune/new/client/test_model_version.py | 169 - .../new/client/test_model_version_tables.py | 44 - tests/unit/neptune/new/client/test_project.py | 165 - tests/unit/neptune/new/client/test_run.py | 306 -- .../neptune/new/client/test_run_tables.py | 81 - tests/unit/neptune/new/core/__init__.py | 15 - .../neptune/new/core/components/__init__.py | 15 - .../new/core/components/queue/__init__.py | 15 - .../core/components/queue/test_disk_queue.py | 255 -- .../queue/test_json_file_splitter.py | 176 -- .../new/core/components/test_metadata_file.py | 119 - tests/unit/neptune/new/internal/__init__.py | 15 - .../new/internal/artifacts/__init__.py | 15 - .../internal/artifacts/drivers/__init__.py | 15 - .../internal/artifacts/drivers/test_local.py | 209 -- .../new/internal/artifacts/drivers/test_s3.py | 134 - .../internal/artifacts/test_file_hasher.py | 134 - .../artifacts/test_local_file_hash_storage.py | 73 - .../new/internal/artifacts/test_serializer.py | 42 - .../new/internal/artifacts/test_types.py | 99 - .../neptune/new/internal/artifacts/utils.py | 29 - .../neptune/new/internal/backends/__init__.py | 15 - .../test_hosted_artifact_operations.py | 196 -- .../internal/backends/test_hosted_client.py | 568 ---- .../backends/test_hosted_file_operations.py | 510 --- .../backends/test_hosted_neptune_backend.py | 714 ----- .../backends/test_neptune_backend_mock.py | 448 --- .../neptune/new/internal/backends/test_nql.py | 163 - .../backends/test_operations_preprocessor.py | 416 --- .../backends/test_swagger_client_wrapper.py | 43 - .../new/internal/backends/test_utils.py | 209 -- .../internal/operation_processors/__init__.py | 15 - .../test_async_operation_processor.py | 240 -- .../test_lazy_operation_processor_wrapper.py | 70 - .../test_offline_operation_processor.py | 166 - .../test_operation_logger.py | 124 - .../test_sync_operation_processor.py | 157 - .../internal/signals_processing/__init__.py | 15 - .../test_signals_processor.py | 546 ---- .../new/internal/test_container_structure.py | 162 - .../neptune/new/internal/test_credentials.py | 62 - .../neptune/new/internal/test_operations.py | 154 - .../unit/neptune/new/internal/test_streams.py | 88 - .../neptune/new/internal/utils/__init__.py | 15 - .../utils/test_dependency_tracking.py | 54 - .../new/internal/utils/test_deprecation.py | 98 - .../internal/utils/test_disk_utilization.py | 205 -- .../neptune/new/internal/utils/test_git.py | 265 -- .../new/internal/utils/test_hashing.py | 48 - .../neptune/new/internal/utils/test_images.py | 327 -- .../new/internal/utils/test_iso_dates.py | 55 - .../new/internal/utils/test_iteration.py | 43 - .../internal/utils/test_requirement_check.py | 76 - .../neptune/new/internal/utils/test_utils.py | 49 - tests/unit/neptune/new/test_experiment.py | 306 -- tests/unit/neptune/new/test_handler.py | 797 ----- tests/unit/neptune/new/test_imports.py | 119 - tests/unit/neptune/new/test_libprogramname.py | 56 - tests/unit/neptune/new/test_log_handler.py | 139 - tests/unit/neptune/new/test_logging.py | 79 - .../neptune/new/test_stringify_unsupported.py | 556 ---- tests/unit/neptune/new/types/__init__.py | 15 - .../unit/neptune/new/types/atoms/__init__.py | 15 - .../unit/neptune/new/types/atoms/test_file.py | 209 -- .../neptune/new/types/atoms/test_git_ref.py | 28 - .../neptune/new/types/test_file_casting.py | 74 - tests/unit/neptune/new/utils/__init__.py | 20 - .../new/utils/api_experiments_factory.py | 83 - tests/unit/neptune/new/utils/file_helpers.py | 39 - tests/unit/neptune/new/utils/logging.py | 6 - tests/unit/neptune/new/websockets/__init__.py | 15 - .../test_websockets_signals_background_job.py | 57 - 431 files changed, 13 insertions(+), 57840 deletions(-) delete mode 100644 .github/workflows/e2e.yml delete mode 100644 .github/workflows/integrations.yml delete mode 100644 .github/workflows/unit-in-pull-request.yml delete mode 100644 .github/workflows/unit.yml delete mode 100644 src/neptune/api/__init__.py delete mode 100644 src/neptune/api/fetching_series_values.py delete mode 100644 src/neptune/api/field_visitor.py delete mode 100644 src/neptune/api/models.py delete mode 100644 src/neptune/api/pagination.py delete mode 100644 src/neptune/api/proto/__init__.py delete mode 100644 src/neptune/api/proto/neptune_pb/__init__.py delete mode 100644 src/neptune/api/proto/neptune_pb/api/__init__.py delete mode 100644 src/neptune/api/proto/neptune_pb/api/model/__init__.py delete mode 100644 src/neptune/api/proto/neptune_pb/api/model/attributes_pb2.py delete mode 100644 src/neptune/api/proto/neptune_pb/api/model/attributes_pb2.pyi delete mode 100644 src/neptune/api/proto/neptune_pb/api/model/leaderboard_entries_pb2.py delete mode 100644 src/neptune/api/proto/neptune_pb/api/model/leaderboard_entries_pb2.pyi delete mode 100644 src/neptune/api/proto/neptune_pb/api/model/series_values_pb2.py delete mode 100644 src/neptune/api/proto/neptune_pb/api/model/series_values_pb2.pyi delete mode 100644 src/neptune/api/requests_utils.py delete mode 100644 src/neptune/api/searching_entries.py delete mode 100644 src/neptune/attributes/__init__.py delete mode 100644 src/neptune/attributes/atoms/__init__.py delete mode 100644 src/neptune/attributes/atoms/artifact.py delete mode 100644 src/neptune/attributes/atoms/atom.py delete mode 100644 src/neptune/attributes/atoms/boolean.py delete mode 100644 src/neptune/attributes/atoms/copiable_atom.py delete mode 100644 src/neptune/attributes/atoms/datetime.py delete mode 100644 src/neptune/attributes/atoms/file.py delete mode 100644 src/neptune/attributes/atoms/float.py delete mode 100644 src/neptune/attributes/atoms/git_ref.py delete mode 100644 src/neptune/attributes/atoms/integer.py delete mode 100644 src/neptune/attributes/atoms/notebook_ref.py delete mode 100644 src/neptune/attributes/atoms/run_state.py delete mode 100644 src/neptune/attributes/atoms/string.py delete mode 100644 src/neptune/attributes/attribute.py delete mode 100644 src/neptune/attributes/constants.py delete mode 100644 src/neptune/attributes/file_set.py delete mode 100644 src/neptune/attributes/namespace.py delete mode 100644 src/neptune/attributes/series/__init__.py delete mode 100644 src/neptune/attributes/series/fetchable_series.py delete mode 100644 src/neptune/attributes/series/file_series.py delete mode 100644 src/neptune/attributes/series/float_series.py delete mode 100644 src/neptune/attributes/series/series.py delete mode 100644 src/neptune/attributes/series/string_series.py delete mode 100644 src/neptune/attributes/sets/__init__.py delete mode 100644 src/neptune/attributes/sets/set.py delete mode 100644 src/neptune/attributes/sets/string_set.py delete mode 100644 src/neptune/attributes/utils.py delete mode 100644 src/neptune/cli/__init__.py delete mode 100644 src/neptune/cli/__main__.py delete mode 100644 src/neptune/cli/clear.py delete mode 100644 src/neptune/cli/collect.py delete mode 100644 src/neptune/cli/commands.py delete mode 100644 src/neptune/cli/containers.py delete mode 100644 src/neptune/cli/path_option.py delete mode 100644 src/neptune/cli/status.py delete mode 100644 src/neptune/cli/sync.py delete mode 100644 src/neptune/cli/utils.py delete mode 100644 src/neptune/constants.py delete mode 100644 src/neptune/core/__init__.py delete mode 100644 src/neptune/core/components/__init__.py delete mode 100644 src/neptune/core/components/abstract.py delete mode 100644 src/neptune/core/components/metadata_file.py delete mode 100644 src/neptune/core/components/operation_storage.py delete mode 100644 src/neptune/core/components/queue/__init__.py delete mode 100644 src/neptune/core/components/queue/disk_queue.py delete mode 100644 src/neptune/core/components/queue/json_file_splitter.py delete mode 100644 src/neptune/core/components/queue/log_file.py delete mode 100644 src/neptune/core/components/queue/sync_offset_file.py delete mode 100644 src/neptune/envs.py delete mode 100644 src/neptune/exceptions.py delete mode 100644 src/neptune/handler.py delete mode 100644 src/neptune/integrations/__init__.py delete mode 100644 src/neptune/integrations/aws/__init__.py delete mode 100644 src/neptune/integrations/detectron2/__init__.py delete mode 100644 src/neptune/integrations/fastai/__init__.py delete mode 100644 src/neptune/integrations/kedro/__init__.py delete mode 100644 src/neptune/integrations/lightgbm/__init__.py delete mode 100644 src/neptune/integrations/mosaicml/__init__.py delete mode 100644 src/neptune/integrations/optuna/__init__.py delete mode 100644 src/neptune/integrations/pandas/__init__.py delete mode 100644 src/neptune/integrations/prophet/__init__.py delete mode 100644 src/neptune/integrations/python_logger.py delete mode 100644 src/neptune/integrations/pytorch/__init__.py delete mode 100644 src/neptune/integrations/pytorch_lightning/__init__.py delete mode 100644 src/neptune/integrations/sacred/__init__.py delete mode 100644 src/neptune/integrations/sklearn/__init__.py delete mode 100644 src/neptune/integrations/tensorboard/__init__.py delete mode 100644 src/neptune/integrations/tensorflow_keras/__init__.py delete mode 100644 src/neptune/integrations/transformers/__init__.py delete mode 100644 src/neptune/integrations/utils.py delete mode 100644 src/neptune/integrations/xgboost/__init__.py delete mode 100644 src/neptune/internal/__init__.py delete mode 100644 src/neptune/internal/artifacts/__init__.py delete mode 100644 src/neptune/internal/artifacts/drivers/__init__.py delete mode 100644 src/neptune/internal/artifacts/drivers/local.py delete mode 100644 src/neptune/internal/artifacts/drivers/s3.py delete mode 100644 src/neptune/internal/artifacts/file_hasher.py delete mode 100644 src/neptune/internal/artifacts/local_file_hash_storage.py delete mode 100644 src/neptune/internal/artifacts/types.py delete mode 100644 src/neptune/internal/artifacts/utils.py delete mode 100644 src/neptune/internal/backends/__init__.py delete mode 100644 src/neptune/internal/backends/api_model.py delete mode 100644 src/neptune/internal/backends/factory.py delete mode 100644 src/neptune/internal/backends/hosted_artifact_operations.py delete mode 100644 src/neptune/internal/backends/hosted_client.py delete mode 100644 src/neptune/internal/backends/hosted_file_operations.py delete mode 100644 src/neptune/internal/backends/hosted_neptune_backend.py delete mode 100644 src/neptune/internal/backends/neptune_backend.py delete mode 100644 src/neptune/internal/backends/neptune_backend_mock.py delete mode 100644 src/neptune/internal/backends/nql.py delete mode 100644 src/neptune/internal/backends/offline_neptune_backend.py delete mode 100644 src/neptune/internal/backends/operation_api_name_visitor.py delete mode 100644 src/neptune/internal/backends/operation_api_object_converter.py delete mode 100644 src/neptune/internal/backends/operations_preprocessor.py delete mode 100644 src/neptune/internal/backends/project_name_lookup.py delete mode 100644 src/neptune/internal/backends/swagger_client_wrapper.py delete mode 100644 src/neptune/internal/backends/utils.py delete mode 100644 src/neptune/internal/backgroud_job_list.py delete mode 100644 src/neptune/internal/background_job.py delete mode 100644 src/neptune/internal/constants.py delete mode 100644 src/neptune/internal/container_structure.py delete mode 100644 src/neptune/internal/container_type.py delete mode 100644 src/neptune/internal/credentials.py delete mode 100644 src/neptune/internal/envs.py delete mode 100644 src/neptune/internal/exceptions.py delete mode 100644 src/neptune/internal/extensions.py delete mode 100644 src/neptune/internal/hardware/__init__.py delete mode 100644 src/neptune/internal/hardware/cgroup/__init__.py delete mode 100644 src/neptune/internal/hardware/cgroup/cgroup_filesystem_reader.py delete mode 100644 src/neptune/internal/hardware/cgroup/cgroup_monitor.py delete mode 100644 src/neptune/internal/hardware/constants.py delete mode 100644 src/neptune/internal/hardware/gauges/__init__.py delete mode 100644 src/neptune/internal/hardware/gauges/cpu.py delete mode 100644 src/neptune/internal/hardware/gauges/gauge.py delete mode 100644 src/neptune/internal/hardware/gauges/gauge_factory.py delete mode 100644 src/neptune/internal/hardware/gauges/gauge_mode.py delete mode 100644 src/neptune/internal/hardware/gauges/gpu.py delete mode 100644 src/neptune/internal/hardware/gauges/memory.py delete mode 100644 src/neptune/internal/hardware/gpu/__init__.py delete mode 100644 src/neptune/internal/hardware/gpu/gpu_monitor.py delete mode 100644 src/neptune/internal/hardware/hardware_metric_reporting_job.py delete mode 100644 src/neptune/internal/hardware/metrics/__init__.py delete mode 100644 src/neptune/internal/hardware/metrics/metric.py delete mode 100644 src/neptune/internal/hardware/metrics/metrics_container.py delete mode 100644 src/neptune/internal/hardware/metrics/metrics_factory.py delete mode 100644 src/neptune/internal/hardware/metrics/reports/__init__.py delete mode 100644 src/neptune/internal/hardware/metrics/reports/metric_report.py delete mode 100644 src/neptune/internal/hardware/metrics/reports/metric_reporter.py delete mode 100644 src/neptune/internal/hardware/metrics/reports/metric_reporter_factory.py delete mode 100644 src/neptune/internal/hardware/metrics/service/__init__.py delete mode 100644 src/neptune/internal/hardware/metrics/service/metric_service.py delete mode 100644 src/neptune/internal/hardware/metrics/service/metric_service_factory.py delete mode 100644 src/neptune/internal/hardware/resources/__init__.py delete mode 100644 src/neptune/internal/hardware/resources/gpu_card_indices_provider.py delete mode 100644 src/neptune/internal/hardware/resources/system_resource_info.py delete mode 100644 src/neptune/internal/hardware/resources/system_resource_info_factory.py delete mode 100644 src/neptune/internal/hardware/system/__init__.py delete mode 100644 src/neptune/internal/hardware/system/system_monitor.py delete mode 100644 src/neptune/internal/id_formats.py delete mode 100644 src/neptune/internal/init/__init__.py delete mode 100644 src/neptune/internal/init/parameters.py delete mode 100644 src/neptune/internal/notebooks/__init__.py delete mode 100644 src/neptune/internal/notebooks/comm.py delete mode 100644 src/neptune/internal/notebooks/notebooks.py delete mode 100644 src/neptune/internal/oauth.py delete mode 100644 src/neptune/internal/operation.py delete mode 100644 src/neptune/internal/operation_processors/__init__.py delete mode 100644 src/neptune/internal/operation_processors/async_operation_processor.py delete mode 100644 src/neptune/internal/operation_processors/factory.py delete mode 100644 src/neptune/internal/operation_processors/lazy_operation_processor_wrapper.py delete mode 100644 src/neptune/internal/operation_processors/offline_operation_processor.py delete mode 100644 src/neptune/internal/operation_processors/operation_logger.py delete mode 100644 src/neptune/internal/operation_processors/operation_processor.py delete mode 100644 src/neptune/internal/operation_processors/read_only_operation_processor.py delete mode 100644 src/neptune/internal/operation_processors/sync_operation_processor.py delete mode 100644 src/neptune/internal/operation_processors/utils.py delete mode 100644 src/neptune/internal/operation_visitor.py delete mode 100644 src/neptune/internal/patches/__init__.py delete mode 100644 src/neptune/internal/patches/bravado.py delete mode 100644 src/neptune/internal/signals_processing/__init__.py delete mode 100644 src/neptune/internal/signals_processing/background_job.py delete mode 100644 src/neptune/internal/signals_processing/signals.py delete mode 100644 src/neptune/internal/signals_processing/signals_processor.py delete mode 100644 src/neptune/internal/signals_processing/utils.py delete mode 100644 src/neptune/internal/state.py delete mode 100644 src/neptune/internal/storage/__init__.py delete mode 100644 src/neptune/internal/storage/datastream.py delete mode 100644 src/neptune/internal/storage/storage_utils.py delete mode 100644 src/neptune/internal/streams/__init__.py delete mode 100644 src/neptune/internal/streams/std_capture_background_job.py delete mode 100644 src/neptune/internal/streams/std_stream_capture_logger.py delete mode 100644 src/neptune/internal/threading/__init__.py delete mode 100644 src/neptune/internal/threading/daemon.py delete mode 100644 src/neptune/internal/types/__init__.py delete mode 100644 src/neptune/internal/types/file_types.py delete mode 100644 src/neptune/internal/types/stringify_value.py delete mode 100644 src/neptune/internal/types/utils.py delete mode 100644 src/neptune/internal/utils/__init__.py delete mode 100644 src/neptune/internal/utils/dependency_tracking.py delete mode 100644 src/neptune/internal/utils/deprecation.py delete mode 100644 src/neptune/internal/utils/disk_utilization.py delete mode 100644 src/neptune/internal/utils/generic_attribute_mapper.py delete mode 100644 src/neptune/internal/utils/git.py delete mode 100644 src/neptune/internal/utils/git_info.py delete mode 100644 src/neptune/internal/utils/hashing.py delete mode 100644 src/neptune/internal/utils/images.py delete mode 100644 src/neptune/internal/utils/iso_dates.py delete mode 100644 src/neptune/internal/utils/iteration.py delete mode 100644 src/neptune/internal/utils/limits.py delete mode 100644 src/neptune/internal/utils/logger.py delete mode 100644 src/neptune/internal/utils/paths.py delete mode 100644 src/neptune/internal/utils/patterns.py delete mode 100644 src/neptune/internal/utils/ping_background_job.py delete mode 100644 src/neptune/internal/utils/process_killer.py delete mode 100644 src/neptune/internal/utils/requirement_check.py delete mode 100644 src/neptune/internal/utils/run_state.py delete mode 100644 src/neptune/internal/utils/runningmode.py delete mode 100644 src/neptune/internal/utils/s3.py delete mode 100644 src/neptune/internal/utils/source_code.py delete mode 100644 src/neptune/internal/utils/traceback_job.py delete mode 100644 src/neptune/internal/utils/uncaught_exception_handler.py delete mode 100644 src/neptune/internal/utils/utils.py delete mode 100644 src/neptune/internal/value_to_attribute_visitor.py delete mode 100644 src/neptune/internal/warnings.py delete mode 100644 src/neptune/internal/websockets/__init__.py delete mode 100644 src/neptune/internal/websockets/reconnecting_websocket.py delete mode 100644 src/neptune/internal/websockets/websocket_client_adapter.py delete mode 100644 src/neptune/internal/websockets/websocket_signals_background_job.py delete mode 100644 src/neptune/internal/websockets/websockets_factory.py delete mode 100644 src/neptune/management/__init__.py delete mode 100644 src/neptune/management/exceptions.py delete mode 100644 src/neptune/management/internal/__init__.py delete mode 100644 src/neptune/management/internal/api.py delete mode 100644 src/neptune/management/internal/dto.py delete mode 100644 src/neptune/management/internal/types.py delete mode 100644 src/neptune/management/internal/utils.py delete mode 100644 src/neptune/objects/__init__.py delete mode 100644 src/neptune/objects/abstract.py delete mode 100644 src/neptune/objects/model.py delete mode 100644 src/neptune/objects/model_version.py delete mode 100644 src/neptune/objects/neptune_object.py delete mode 100644 src/neptune/objects/project.py delete mode 100644 src/neptune/objects/run.py delete mode 100644 src/neptune/objects/structure_version.py delete mode 100644 src/neptune/objects/utils.py delete mode 100644 src/neptune/table.py delete mode 100644 src/neptune/types/__init__.py delete mode 100644 src/neptune/types/atoms/__init__.py delete mode 100644 src/neptune/types/atoms/artifact.py delete mode 100644 src/neptune/types/atoms/atom.py delete mode 100644 src/neptune/types/atoms/boolean.py delete mode 100644 src/neptune/types/atoms/datetime.py delete mode 100644 src/neptune/types/atoms/file.py delete mode 100644 src/neptune/types/atoms/float.py delete mode 100644 src/neptune/types/atoms/git_ref.py delete mode 100644 src/neptune/types/atoms/integer.py delete mode 100644 src/neptune/types/atoms/string.py delete mode 100644 src/neptune/types/file_set.py delete mode 100644 src/neptune/types/mode.py delete mode 100644 src/neptune/types/model_version_stage.py delete mode 100644 src/neptune/types/namespace.py delete mode 100644 src/neptune/types/series/__init__.py delete mode 100644 src/neptune/types/series/file_series.py delete mode 100644 src/neptune/types/series/float_series.py delete mode 100644 src/neptune/types/series/series.py delete mode 100644 src/neptune/types/series/series_value.py delete mode 100644 src/neptune/types/series/string_series.py delete mode 100644 src/neptune/types/sets/__init__.py delete mode 100644 src/neptune/types/sets/set.py delete mode 100644 src/neptune/types/sets/string_set.py delete mode 100644 src/neptune/types/type_casting.py delete mode 100644 src/neptune/types/value.py delete mode 100644 src/neptune/types/value_copy.py delete mode 100644 src/neptune/types/value_visitor.py delete mode 100644 src/neptune/typing.py delete mode 100644 src/neptune/utils.py delete mode 100644 src/neptune/vendor/__init__.py delete mode 100644 src/neptune/vendor/lib_programname.py delete mode 100644 src/neptune/vendor/pynvml.py delete mode 100644 src/neptune/version.py delete mode 100644 tests/__init__.py delete mode 100644 tests/e2e/__init__.py delete mode 100644 tests/e2e/base.py delete mode 100644 tests/e2e/conftest.py delete mode 100644 tests/e2e/exceptions.py delete mode 100644 tests/e2e/integrations/__init__.py delete mode 100644 tests/e2e/integrations/test_huggingface.py delete mode 100644 tests/e2e/integrations/test_lightning.py delete mode 100644 tests/e2e/integrations/test_mosaicml.py delete mode 100644 tests/e2e/integrations/test_zenml.py delete mode 100644 tests/e2e/management/__init__.py delete mode 100644 tests/e2e/management/test_management.py delete mode 100644 tests/e2e/plot_utils.py delete mode 100644 tests/e2e/pytest.ini delete mode 100644 tests/e2e/standard/__init__.py delete mode 100644 tests/e2e/standard/test_artifacts.py delete mode 100644 tests/e2e/standard/test_base.py delete mode 100644 tests/e2e/standard/test_cli.py delete mode 100644 tests/e2e/standard/test_copy.py delete mode 100644 tests/e2e/standard/test_fetch_tables.py delete mode 100644 tests/e2e/standard/test_files.py delete mode 100644 tests/e2e/standard/test_init.py delete mode 100644 tests/e2e/standard/test_multiple.py delete mode 100644 tests/e2e/standard/test_multiprocessing.py delete mode 100644 tests/e2e/standard/test_series.py delete mode 100644 tests/e2e/standard/test_stage_transitions.py delete mode 100644 tests/e2e/utils.py delete mode 100644 tests/unit/__init__.py delete mode 100755 tests/unit/data/local_artifact_drivers_data/dir_to_link/file_in_linked_dir.txt delete mode 100755 tests/unit/data/local_artifact_drivers_data/file_to_link.txt delete mode 100755 tests/unit/data/local_artifact_drivers_data/files_to_track/file1.txt delete mode 100755 tests/unit/data/local_artifact_drivers_data/files_to_track/sub_dir/file_in_subdir.txt delete mode 100644 tests/unit/neptune/__init__.py delete mode 100644 tests/unit/neptune/backend_test_mixin.py delete mode 100644 tests/unit/neptune/management/__init__.py delete mode 100644 tests/unit/neptune/management/internal/__init__.py delete mode 100644 tests/unit/neptune/management/internal/test_api.py delete mode 100644 tests/unit/neptune/management/internal/test_utils.py delete mode 100644 tests/unit/neptune/new/__init__.py delete mode 100644 tests/unit/neptune/new/api/__init__.py delete mode 100644 tests/unit/neptune/new/api/test_fetching_series_values.py delete mode 100644 tests/unit/neptune/new/api/test_models.py delete mode 100644 tests/unit/neptune/new/api/test_pagination.py delete mode 100644 tests/unit/neptune/new/api/test_requests_utils.py delete mode 100644 tests/unit/neptune/new/api/test_searching_entries.py delete mode 100644 tests/unit/neptune/new/attributes/__init__.py delete mode 100644 tests/unit/neptune/new/attributes/atoms/__init__.py delete mode 100644 tests/unit/neptune/new/attributes/atoms/test_artifact.py delete mode 100644 tests/unit/neptune/new/attributes/atoms/test_artifact_hash.py delete mode 100644 tests/unit/neptune/new/attributes/atoms/test_datetime.py delete mode 100644 tests/unit/neptune/new/attributes/atoms/test_file.py delete mode 100644 tests/unit/neptune/new/attributes/atoms/test_float.py delete mode 100644 tests/unit/neptune/new/attributes/atoms/test_string.py delete mode 100644 tests/unit/neptune/new/attributes/series/__init__.py delete mode 100644 tests/unit/neptune/new/attributes/series/test_file_series.py delete mode 100644 tests/unit/neptune/new/attributes/series/test_float_series.py delete mode 100644 tests/unit/neptune/new/attributes/series/test_series.py delete mode 100644 tests/unit/neptune/new/attributes/series/test_string_series.py delete mode 100644 tests/unit/neptune/new/attributes/sets/__init__.py delete mode 100644 tests/unit/neptune/new/attributes/sets/test_file_set.py delete mode 100644 tests/unit/neptune/new/attributes/sets/test_string_set.py delete mode 100644 tests/unit/neptune/new/attributes/test_attribute_base.py delete mode 100644 tests/unit/neptune/new/attributes/test_attribute_utils.py delete mode 100644 tests/unit/neptune/new/cli/__init__.py delete mode 100644 tests/unit/neptune/new/cli/test_clear.py delete mode 100644 tests/unit/neptune/new/cli/test_status.py delete mode 100644 tests/unit/neptune/new/cli/test_sync.py delete mode 100644 tests/unit/neptune/new/cli/test_utils.py delete mode 100644 tests/unit/neptune/new/cli/utils.py delete mode 100644 tests/unit/neptune/new/client/__init__.py delete mode 100644 tests/unit/neptune/new/client/abstract_experiment_test_mixin.py delete mode 100644 tests/unit/neptune/new/client/abstract_tables_test.py delete mode 100644 tests/unit/neptune/new/client/test_model.py delete mode 100644 tests/unit/neptune/new/client/test_model_tables.py delete mode 100644 tests/unit/neptune/new/client/test_model_version.py delete mode 100644 tests/unit/neptune/new/client/test_model_version_tables.py delete mode 100644 tests/unit/neptune/new/client/test_project.py delete mode 100644 tests/unit/neptune/new/client/test_run.py delete mode 100644 tests/unit/neptune/new/client/test_run_tables.py delete mode 100644 tests/unit/neptune/new/core/__init__.py delete mode 100644 tests/unit/neptune/new/core/components/__init__.py delete mode 100644 tests/unit/neptune/new/core/components/queue/__init__.py delete mode 100644 tests/unit/neptune/new/core/components/queue/test_disk_queue.py delete mode 100644 tests/unit/neptune/new/core/components/queue/test_json_file_splitter.py delete mode 100644 tests/unit/neptune/new/core/components/test_metadata_file.py delete mode 100644 tests/unit/neptune/new/internal/__init__.py delete mode 100644 tests/unit/neptune/new/internal/artifacts/__init__.py delete mode 100644 tests/unit/neptune/new/internal/artifacts/drivers/__init__.py delete mode 100644 tests/unit/neptune/new/internal/artifacts/drivers/test_local.py delete mode 100644 tests/unit/neptune/new/internal/artifacts/drivers/test_s3.py delete mode 100644 tests/unit/neptune/new/internal/artifacts/test_file_hasher.py delete mode 100644 tests/unit/neptune/new/internal/artifacts/test_local_file_hash_storage.py delete mode 100644 tests/unit/neptune/new/internal/artifacts/test_serializer.py delete mode 100644 tests/unit/neptune/new/internal/artifacts/test_types.py delete mode 100644 tests/unit/neptune/new/internal/artifacts/utils.py delete mode 100644 tests/unit/neptune/new/internal/backends/__init__.py delete mode 100644 tests/unit/neptune/new/internal/backends/test_hosted_artifact_operations.py delete mode 100644 tests/unit/neptune/new/internal/backends/test_hosted_client.py delete mode 100644 tests/unit/neptune/new/internal/backends/test_hosted_file_operations.py delete mode 100644 tests/unit/neptune/new/internal/backends/test_hosted_neptune_backend.py delete mode 100644 tests/unit/neptune/new/internal/backends/test_neptune_backend_mock.py delete mode 100644 tests/unit/neptune/new/internal/backends/test_nql.py delete mode 100644 tests/unit/neptune/new/internal/backends/test_operations_preprocessor.py delete mode 100644 tests/unit/neptune/new/internal/backends/test_swagger_client_wrapper.py delete mode 100644 tests/unit/neptune/new/internal/backends/test_utils.py delete mode 100644 tests/unit/neptune/new/internal/operation_processors/__init__.py delete mode 100644 tests/unit/neptune/new/internal/operation_processors/test_async_operation_processor.py delete mode 100644 tests/unit/neptune/new/internal/operation_processors/test_lazy_operation_processor_wrapper.py delete mode 100644 tests/unit/neptune/new/internal/operation_processors/test_offline_operation_processor.py delete mode 100644 tests/unit/neptune/new/internal/operation_processors/test_operation_logger.py delete mode 100644 tests/unit/neptune/new/internal/operation_processors/test_sync_operation_processor.py delete mode 100644 tests/unit/neptune/new/internal/signals_processing/__init__.py delete mode 100644 tests/unit/neptune/new/internal/signals_processing/test_signals_processor.py delete mode 100644 tests/unit/neptune/new/internal/test_container_structure.py delete mode 100644 tests/unit/neptune/new/internal/test_credentials.py delete mode 100644 tests/unit/neptune/new/internal/test_operations.py delete mode 100644 tests/unit/neptune/new/internal/test_streams.py delete mode 100644 tests/unit/neptune/new/internal/utils/__init__.py delete mode 100644 tests/unit/neptune/new/internal/utils/test_dependency_tracking.py delete mode 100644 tests/unit/neptune/new/internal/utils/test_deprecation.py delete mode 100644 tests/unit/neptune/new/internal/utils/test_disk_utilization.py delete mode 100644 tests/unit/neptune/new/internal/utils/test_git.py delete mode 100644 tests/unit/neptune/new/internal/utils/test_hashing.py delete mode 100644 tests/unit/neptune/new/internal/utils/test_images.py delete mode 100644 tests/unit/neptune/new/internal/utils/test_iso_dates.py delete mode 100644 tests/unit/neptune/new/internal/utils/test_iteration.py delete mode 100644 tests/unit/neptune/new/internal/utils/test_requirement_check.py delete mode 100644 tests/unit/neptune/new/internal/utils/test_utils.py delete mode 100644 tests/unit/neptune/new/test_experiment.py delete mode 100644 tests/unit/neptune/new/test_handler.py delete mode 100644 tests/unit/neptune/new/test_imports.py delete mode 100644 tests/unit/neptune/new/test_libprogramname.py delete mode 100644 tests/unit/neptune/new/test_log_handler.py delete mode 100644 tests/unit/neptune/new/test_logging.py delete mode 100644 tests/unit/neptune/new/test_stringify_unsupported.py delete mode 100644 tests/unit/neptune/new/types/__init__.py delete mode 100644 tests/unit/neptune/new/types/atoms/__init__.py delete mode 100644 tests/unit/neptune/new/types/atoms/test_file.py delete mode 100644 tests/unit/neptune/new/types/atoms/test_git_ref.py delete mode 100644 tests/unit/neptune/new/types/test_file_casting.py delete mode 100644 tests/unit/neptune/new/utils/__init__.py delete mode 100644 tests/unit/neptune/new/utils/api_experiments_factory.py delete mode 100644 tests/unit/neptune/new/utils/file_helpers.py delete mode 100644 tests/unit/neptune/new/utils/logging.py delete mode 100644 tests/unit/neptune/new/websockets/__init__.py delete mode 100644 tests/unit/neptune/new/websockets/test_websockets_signals_background_job.py diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml deleted file mode 100644 index 80c939ed2..000000000 --- a/.github/workflows/e2e.yml +++ /dev/null @@ -1,172 +0,0 @@ -name: e2e - -on: - workflow_call: - workflow_dispatch: - schedule: - - cron: "0 4 * * *" # Run every day at arbitrary time (4:00 AM UTC) - push: - branches: - - master - -env: - WORKSPACE_NAME: e2e-tests - BUCKET_NAME: ${{ secrets.E2E_BUCKET_NAME }} - USER_USERNAME: ${{ secrets.E2E_USER_USERNAME }} - ADMIN_USERNAME: ${{ secrets.E2E_ADMIN_USERNAME }} - ADMIN_NEPTUNE_API_TOKEN: ${{ secrets.E2E_ADMIN_NEPTUNE_API_TOKEN }} - SERVICE_ACCOUNT_NAME: ${{ secrets.E2E_SERVICE_ACCOUNT_NAME }} - -jobs: - e2e: - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.12" ] - os: [ ubuntu, windows ] - include: - - python-version: "3.7.16" - os: macos - - python-version: "3.12" - os: macos - runs-on: ${{ matrix.os }}-latest - name: 'standard (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Test - uses: ./.github/actions/test-e2e - env: - NEPTUNE_API_TOKEN: ${{secrets.E2E_SERVICE_ACCOUNT_API_TOKEN}} - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - module: "not s3 and not integrations" - name: "standard" - report_job: 'standard (${{ matrix.os }} - py${{ matrix.python-version }})' - codecov-token: ${{ secrets.CODECOV_TOKEN }} - - e2e_management: - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.12" ] - os: [ ubuntu, windows ] - include: - - python-version: "3.7.16" - os: macos - - python-version: "3.12" - os: macos - runs-on: ${{ matrix.os }}-latest - name: 'management (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Test - uses: ./.github/actions/test-e2e - env: - NEPTUNE_API_TOKEN: ${{secrets.E2E_NEPTUNE_API_TOKEN}} - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - module: management - name: management - report_job: 'management (${{ matrix.os }} - py${{ matrix.python-version }})' - codecov-token: ${{ secrets.CODECOV_TOKEN }} - - e2e_s3_gcs: - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.12" ] - os: [ ubuntu, windows ] - include: - - python-version: "3.7.16" - os: macos - - python-version: "3.12" - os: macos - runs-on: ${{ matrix.os }}-latest - name: 's3-gcs (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Test - uses: ./.github/actions/test-e2e - env: - NEPTUNE_API_TOKEN: ${{secrets.E2E_SERVICE_ACCOUNT_API_TOKEN}} - AWS_ACCESS_KEY_ID: ${{secrets.E2E_GCS_ACCESS_KEY_ID}} - AWS_SECRET_ACCESS_KEY: ${{secrets.E2E_GCS_SECRET_ACCESS_KEY}} - S3_ENDPOINT_URL: "https://storage.googleapis.com/" - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - module: s3 - name: s3-gcs - report_job: 's3-gcs (${{ matrix.os }} - py${{ matrix.python-version }})' - codecov-token: ${{ secrets.CODECOV_TOKEN }} - - e2e_s3: - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.12" ] - os: [ ubuntu, windows ] - include: - - python-version: "3.7.16" - os: macos - - python-version: "3.12" - os: macos - runs-on: ${{ matrix.os }}-latest - name: 's3 (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Test - uses: ./.github/actions/test-e2e - env: - NEPTUNE_API_TOKEN: ${{secrets.E2E_SERVICE_ACCOUNT_API_TOKEN}} - AWS_ACCESS_KEY_ID: ${{secrets.E2E_AWS_ACCESS_KEY_ID}} - AWS_SECRET_ACCESS_KEY: ${{secrets.E2E_AWS_SECRET_ACCESS_KEY}} - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - module: s3 - name: s3 - report_job: 's3 (${{ matrix.os }} - py${{ matrix.python-version }})' - codecov-token: ${{ secrets.CODECOV_TOKEN }} - - e2e-tests-notify: - needs: [ e2e, e2e_management, e2e_s3_gcs, e2e_s3 ] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/workflow-notify - with: - slack-webhook: ${{ secrets.E2E_REGULAR_SLACK_WEBHOOK }} diff --git a/.github/workflows/integrations.yml b/.github/workflows/integrations.yml deleted file mode 100644 index 8fc67403f..000000000 --- a/.github/workflows/integrations.yml +++ /dev/null @@ -1,797 +0,0 @@ -name: integrations - -on: - workflow_call: - workflow_dispatch: -# schedule: -# - cron: "0 6 * * *" # Run every day at arbitrary time (6:00 AM UTC) - -env: - WORKSPACE_NAME: e2e-tests - NEPTUNE_API_TOKEN: ${{ secrets.E2E_NEPTUNE_API_TOKEN }} - SLACK_WEBHOOK_URL: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - -jobs: - fastai: - env: - NEPTUNE_PROJECT: e2e-tests/integrations - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.10" ] - os: [ ubuntu-latest, windows-latest ] - include: - - python-version: "3.7.16" - os: macos-latest - - python-version: "3.10" - os: macos-latest - runs-on: ${{ matrix.os }} - name: 'fastai (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install package - uses: ./.github/actions/install-package - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - - - name: Test integration - uses: neptune-ai/neptune-fastai/.github/actions/e2e@master - with: - working_directory: neptune-fastai - - fastai-notify: - needs: [fastai] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.fastai.result }} - - kedro: - env: - NEPTUNE_PROJECT: e2e-tests/integrations - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.9", "3.10" ] - os: [ ubuntu-latest, macos-latest ] - runs-on: ${{ matrix.os }} - name: 'kedro (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install package - uses: ./.github/actions/install-package - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - - - name: Test integration - uses: neptune-ai/kedro-neptune/.github/actions/e2e@main - with: - working_directory: kedro-neptune - - kedro-notify: - needs: [kedro] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.kedro.result }} - - prophet: - env: - NEPTUNE_PROJECT: e2e-tests/integrations - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.10" ] - os: [ ubuntu-latest ] - include: - - python-version: "3.7.16" - os: macos-latest - - python-version: "3.10" - os: macos-latest - runs-on: ${{ matrix.os }} - name: 'prophet (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install package - uses: ./.github/actions/install-package - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - - - name: Test integration - uses: neptune-ai/neptune-prophet/.github/actions/e2e@main - with: - working_directory: neptune-prophet - - prophet-notify: - needs: [prophet] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.prophet.result }} - - keras: - env: - NEPTUNE_PROJECT: e2e-tests/integrations - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.10" ] - os: [ ubuntu-latest, windows-latest ] - include: - - python-version: "3.7.16" - os: macos-latest - - python-version: "3.10" - os: macos-latest - runs-on: ${{ matrix.os }} - name: 'keras (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install package - uses: ./.github/actions/install-package - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - - - name: Test integration - uses: neptune-ai/neptune-tensorflow-keras/.github/actions/e2e@master - with: - working_directory: neptune-tensorflow-keras - - keras-notify: - needs: [keras] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.keras.result }} - - lightning: - env: - BUCKET_NAME: ${{ secrets.E2E_BUCKET_NAME }} - USER_USERNAME: ${{ secrets.E2E_USER_USERNAME }} - ADMIN_USERNAME: ${{ secrets.E2E_ADMIN_USERNAME }} - ADMIN_NEPTUNE_API_TOKEN: ${{ secrets.E2E_ADMIN_NEPTUNE_API_TOKEN }} - SERVICE_ACCOUNT_NAME: ${{ secrets.E2E_SERVICE_ACCOUNT_NAME }} - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.10" ] - os: [ ubuntu-latest ] - include: - - python-version: "3.7.16" - os: macos-latest - - python-version: "3.10" - os: macos-latest - runs-on: ${{ matrix.os }} - name: 'lightning (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Test - uses: ./.github/actions/test-e2e - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - module: lightning - report_job: 'lightning (${{ matrix.os }} - py${{ matrix.python-version }})' - pip_url: pytorch_lightning torch - - lightning-notify: - needs: [lightning] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.lightning.result }} - - huggingface: - env: - BUCKET_NAME: ${{ secrets.E2E_BUCKET_NAME }} - USER_USERNAME: ${{ secrets.E2E_USER_USERNAME }} - ADMIN_USERNAME: ${{ secrets.E2E_ADMIN_USERNAME }} - ADMIN_NEPTUNE_API_TOKEN: ${{ secrets.E2E_ADMIN_NEPTUNE_API_TOKEN }} - SERVICE_ACCOUNT_NAME: ${{ secrets.E2E_SERVICE_ACCOUNT_NAME }} - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.8", "3.10" ] - os: [ ubuntu-latest ] - include: - - python-version: "3.8" - os: macos-latest - - python-version: "3.10" - os: macos-latest - runs-on: ${{ matrix.os }} - name: 'huggingface (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Test - uses: ./.github/actions/test-e2e - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - module: huggingface - report_job: 'huggingface (${{ matrix.os }} - py${{ matrix.python-version }})' - pip_url: transformers torch optuna - - huggingface-notify: - needs: [huggingface] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.huggingface.result }} - - zenml: - env: - BUCKET_NAME: ${{ secrets.E2E_BUCKET_NAME }} - USER_USERNAME: ${{ secrets.E2E_USER_USERNAME }} - ADMIN_USERNAME: ${{ secrets.E2E_ADMIN_USERNAME }} - ADMIN_NEPTUNE_API_TOKEN: ${{ secrets.E2E_ADMIN_NEPTUNE_API_TOKEN }} - SERVICE_ACCOUNT_NAME: ${{ secrets.E2E_SERVICE_ACCOUNT_NAME }} - ZENML_ANALYTICS_OPT_IN: false - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.8", "3.10" ] - os: [ ubuntu-latest, macos-latest ] - runs-on: ${{ matrix.os }} - name: 'zenml (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Test - uses: ./.github/actions/test-e2e - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - module: zenml - report_job: 'zenml (${{ matrix.os }} - py${{ matrix.python-version }})' - pip_url: zenml scikit-learn - - zenml-notify: - needs: [zenml] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.zenml.result }} - - mosaicml: - env: - BUCKET_NAME: ${{ secrets.E2E_BUCKET_NAME }} - USER_USERNAME: ${{ secrets.E2E_USER_USERNAME }} - ADMIN_USERNAME: ${{ secrets.E2E_ADMIN_USERNAME }} - ADMIN_NEPTUNE_API_TOKEN: ${{ secrets.E2E_ADMIN_NEPTUNE_API_TOKEN }} - SERVICE_ACCOUNT_NAME: ${{ secrets.E2E_SERVICE_ACCOUNT_NAME }} - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.9", "3.11" ] - os: [ ubuntu-latest ] - runs-on: ${{ matrix.os }} - name: 'mosaicml (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Test - uses: ./.github/actions/test-e2e - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - module: mosaicml - report_job: 'mosaicml (${{ matrix.os }} - py${{ matrix.python-version }})' - pip_url: mosaicml torch torchvision - - mosaicml-notify: - needs: [ mosaicml ] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.mosaicml.result }} - - detectron2: - env: - NEPTUNE_PROJECT: e2e-tests/integrations - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.8", "3.9" ] - os: [ ubuntu-latest, macos-latest ] - runs-on: ${{ matrix.os }} - name: 'detectron2 (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install package - uses: ./.github/actions/install-package - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - - - name: Test integration - uses: neptune-ai/neptune-detectron2/.github/actions/e2e@main - with: - working_directory: neptune-detectron2 - - detectron2-notify: - needs: [ detectron2 ] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.detectron2.result }} - - rlang: - env: - NEPTUNE_PROJECT: e2e-tests/integrations - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - os: [ ubuntu-latest, macos-latest ] - runs-on: ${{ matrix.os }} - name: 'R (${{ matrix.os }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install package - uses: ./.github/actions/install-package - with: - os: ${{ matrix.os }} - python-version: "3.10" - - - name: Test integration - uses: neptune-ai/neptune-r/.github/actions/e2e@master - with: - working_directory: rlang - - rlang-notify: - needs: [ rlang ] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.rlang.result }} - - sklearn: - env: - NEPTUNE_PROJECT: e2e-tests/integrations - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.9" ] - os: [ ubuntu-latest, macos-latest ] - runs-on: ${{ matrix.os }} - name: 'sklearn (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install package - uses: ./.github/actions/install-package - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - - - name: Test integration - uses: neptune-ai/neptune-sklearn/.github/actions/e2e@master - with: - working_directory: neptune-sklearn - - sklearn-notify: - needs: [ sklearn ] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.sklearn.result }} - - xgboost: - env: - NEPTUNE_PROJECT: e2e-tests/integrations - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.9" ] - os: [ ubuntu-latest, macos-latest, windows-latest ] - runs-on: ${{ matrix.os }} - name: 'xgboost (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install package - uses: ./.github/actions/install-package - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - - - name: Test integration - uses: neptune-ai/neptune-xgboost/.github/actions/e2e@master - with: - working_directory: neptune-xgboost - - xgboost-notify: - needs: [ xgboost ] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.xgboost.result }} - - sacred: - env: - NEPTUNE_PROJECT: e2e-tests/integrations - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.10" ] - os: [ ubuntu-latest, macos-latest, windows-latest ] - runs-on: ${{ matrix.os }} - name: 'sacred (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install package - uses: ./.github/actions/install-package - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - - - name: Test integration - uses: neptune-ai/neptune-sacred/.github/actions/e2e@main - with: - working_directory: neptune-sacred - - sacred-notify: - needs: [ sacred ] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.sacred.result }} - - mlflow: - env: - NEPTUNE_PROJECT: e2e-tests/integrations - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.8", "3.10" ] - os: [ ubuntu-latest, macos-latest, windows-latest ] - runs-on: ${{ matrix.os }} - name: 'mlflow (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install package - uses: ./.github/actions/install-package - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - - - name: Test integration - uses: neptune-ai/neptune-mlflow/.github/actions/e2e@master - with: - working_directory: neptune-mlflow - - mlflow-notify: - needs: [ mlflow ] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.mlflow.result }} - - airflow: - env: - NEPTUNE_PROJECT: e2e-tests/integrations - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.8", "3.10" ] - os: [ ubuntu-latest, macos-latest, windows-latest ] - runs-on: ${{ matrix.os }} - name: 'airflow (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install package - uses: ./.github/actions/install-package - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - - - name: Install airflow - uses: neptune-ai/neptune-airflow/.github/actions/airflow-install@main - with: - python-version: ${{ matrix.python-version }} - working_directory: neptune-airflow - - - name: Test integration - uses: neptune-ai/neptune-airflow/.github/actions/e2e@main - with: - working_directory: neptune-airflow - - airflow-notify: - needs: [ airflow ] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.airflow.result }} - - fetcher: - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: [ "3.7", "3.10" ] - os: [ ubuntu-latest ] - runs-on: ${{ matrix.os }} - name: 'fetcher (${{ matrix.os }} - py${{ matrix.python-version }})' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Install fetcher - uses: neptune-ai/neptune-fetcher/.github/actions/install-package@main - with: - python-version: ${{ matrix.python-version }} - working_directory: neptune-fetcher - - - name: Install package - uses: ./.github/actions/install-package - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - - - name: List dependencies - run: | - pip list - shell: bash - - - name: Test integration - uses: neptune-ai/neptune-fetcher/.github/actions/test-integration@main - with: - working_directory: neptune-fetcher - - fetcher-notify: - needs: [ fetcher ] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/job-notify - with: - slack-webhook: ${{ secrets.E2E_INTEGRATIONS_SLACK_WEBHOOK }} - job-status: ${{ needs.fetcher.result }} diff --git a/.github/workflows/unit-in-pull-request.yml b/.github/workflows/unit-in-pull-request.yml deleted file mode 100644 index eb42476fc..000000000 --- a/.github/workflows/unit-in-pull-request.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: pull-request-unittests - -on: - workflow_dispatch: - push: - branches-ignore: - - master - -jobs: - test: - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - os: [ubuntu, windows, macos] - python-version: ["3.7"] - name: 'test (${{ matrix.os }} - py${{ matrix.python-version }})' - runs-on: ${{ matrix.os }}-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Run tests - uses: ./.github/actions/test-unit - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - report_job: 'test (${{ matrix.os }} - py${{ matrix.python-version }})' - codecov-token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/unit.yml b/.github/workflows/unit.yml deleted file mode 100644 index 3437a3118..000000000 --- a/.github/workflows/unit.yml +++ /dev/null @@ -1,65 +0,0 @@ -name: unit - -on: - workflow_call: - workflow_dispatch: - schedule: - - cron: "0 4 * * *" # Run every day at arbitrary time (4:00 AM UTC) - push: - branches: - - master - -jobs: - test: - timeout-minutes: 75 - strategy: - fail-fast: false - matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] - os: [ubuntu, windows] - include: - - python-version: "3.7.16" - os: macos - - python-version: "3.8" - os: macos - - python-version: "3.9" - os: macos - - python-version: "3.10" - os: macos - - python-version: "3.11" - os: macos - - python-version: "3.12" - os: macos - - name: 'test (${{ matrix.os }} - py${{ matrix.python-version }})' - runs-on: ${{ matrix.os }}-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Run tests - uses: ./.github/actions/test-unit - with: - python-version: ${{ matrix.python-version }} - os: ${{ matrix.os }} - report_job: 'test (${{ matrix.os }} - py${{ matrix.python-version }})' - codecov-token: ${{ secrets.CODECOV_TOKEN }} - - unit-tests-notify: - needs: [ test ] - runs-on: ubuntu-latest - if: (success() || failure()) && github.ref == 'refs/heads/master' - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: ${{ github.event.client_payload.pull_request.head.ref }} - - - name: Notify - uses: ./.github/actions/workflow-notify - with: - slack-webhook: ${{ secrets.E2E_REGULAR_SLACK_WEBHOOK }} diff --git a/poetry.lock b/poetry.lock index 501128b50..4a2be4e27 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,535 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. - -[[package]] -name = "arrow" -version = "1.2.3" -description = "Better dates & times for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, - {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, -] - -[package.dependencies] -python-dateutil = ">=2.7.0" -typing-extensions = {version = "*", markers = "python_version < \"3.8\""} - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - -[[package]] -name = "bravado" -version = "11.0.3" -description = "Library for accessing Swagger-enabled API's" -optional = false -python-versions = "!=3.0,!=3.1,!=3.2,!=3.3,!=3.4,!=3.5.0" -files = [ - {file = "bravado-11.0.3-py2.py3-none-any.whl", hash = "sha256:8ac8bbb645e49607917a5c07808116c708521f51e80d9c29bc4a168ff4dd22c6"}, - {file = "bravado-11.0.3.tar.gz", hash = "sha256:1bb6ef75d84140c851fffe6420baaee5037d840070cfe11d60913be6ab8e0530"}, -] - -[package.dependencies] -bravado-core = ">=5.16.1" -monotonic = "*" -msgpack = "*" -python-dateutil = "*" -pyyaml = "*" -requests = ">=2.17" -simplejson = "*" -six = "*" -typing-extensions = "*" - -[package.extras] -fido = ["fido (>=4.2.1)"] -integration-tests = ["bottle", "ephemeral-port-reserve", "pytest"] - -[[package]] -name = "bravado-core" -version = "6.1.1" -description = "Library for adding Swagger support to clients and servers" -optional = false -python-versions = ">=3.7" -files = [ - {file = "bravado-core-6.1.1.tar.gz", hash = "sha256:8cf1f7bbac2f7c696d37e970253938b5be4ddec92c8d5e64400b17469c3714b4"}, -] - -[package.dependencies] -jsonref = "*" -jsonschema = {version = ">=2.5.1", extras = ["format-nongpl"]} -msgpack = ">=0.5.2" -python-dateutil = "*" -pytz = "*" -pyyaml = "*" -requests = "*" -simplejson = "*" -six = "*" -swagger-spec-validator = ">=2.0.1" - -[[package]] -name = "cached-property" -version = "1.5.2" -description = "A decorator for caching properties in classes." -optional = false -python-versions = "*" -files = [ - {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, - {file = "cached_property-1.5.2-py2.py3-none-any.whl", hash = "sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0"}, -] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "fqdn" -version = "1.5.1" -description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -optional = false -python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -files = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] - -[package.dependencies] -cached-property = {version = ">=1.3.0", markers = "python_version < \"3.8\""} - -[[package]] -name = "gitdb" -version = "4.0.11" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.43" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, - {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} - -[package.extras] -doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] -test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "importlib-metadata" -version = "6.7.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, - {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, -] - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - -[[package]] -name = "importlib-resources" -version = "5.12.0" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, - {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - -[[package]] -name = "isoduration" -version = "20.11.0" -description = "Operations with ISO 8601 durations" -optional = false -python-versions = ">=3.7" -files = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] - -[package.dependencies] -arrow = ">=0.15.0" - -[[package]] -name = "jsonpointer" -version = "2.4" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, -] - -[[package]] -name = "jsonref" -version = "1.1.0" -description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonref-1.1.0-py3-none-any.whl", hash = "sha256:590dc7773df6c21cbf948b5dac07a72a251db28b0238ceecce0a2abfa8ec30a9"}, - {file = "jsonref-1.1.0.tar.gz", hash = "sha256:32fe8e1d85af0fdefbebce950af85590b22b60f9e95443176adbde4e1ecea552"}, -] - -[[package]] -name = "jsonschema" -version = "4.17.3" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, - {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, -] - -[package.dependencies] -attrs = ">=17.4.0" -fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" -rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} -typing-extensions = {version = "*", markers = "python_version < \"3.8\""} -uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] - -[[package]] -name = "monotonic" -version = "1.6" -description = "An implementation of time.monotonic() for Python 2 & < 3.3" -optional = false -python-versions = "*" -files = [ - {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, - {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, -] - -[[package]] -name = "msgpack" -version = "1.0.5" -description = "MessagePack serializer" -optional = false -python-versions = "*" -files = [ - {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:525228efd79bb831cf6830a732e2e80bc1b05436b086d4264814b4b2955b2fa9"}, - {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f8d8b3bf1ff2672567d6b5c725a1b347fe838b912772aa8ae2bf70338d5a198"}, - {file = "msgpack-1.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdc793c50be3f01106245a61b739328f7dccc2c648b501e237f0699fe1395b81"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cb47c21a8a65b165ce29f2bec852790cbc04936f502966768e4aae9fa763cb7"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42b9594cc3bf4d838d67d6ed62b9e59e201862a25e9a157019e171fbe672dd3"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b56a24893105dc52c1253649b60f475f36b3aa0fc66115bffafb624d7cb30b"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1967f6129fc50a43bfe0951c35acbb729be89a55d849fab7686004da85103f1c"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a97bf595a232c3ee6d57ddaadd5453d174a52594bf9c21d10407e2a2d9b3bd"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d25dd59bbbbb996eacf7be6b4ad082ed7eacc4e8f3d2df1ba43822da9bfa122a"}, - {file = "msgpack-1.0.5-cp310-cp310-win32.whl", hash = "sha256:382b2c77589331f2cb80b67cc058c00f225e19827dbc818d700f61513ab47bea"}, - {file = "msgpack-1.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:4867aa2df9e2a5fa5f76d7d5565d25ec76e84c106b55509e78c1ede0f152659a"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9f5ae84c5c8a857ec44dc180a8b0cc08238e021f57abdf51a8182e915e6299f0"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e6ca5d5699bcd89ae605c150aee83b5321f2115695e741b99618f4856c50898"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5494ea30d517a3576749cad32fa27f7585c65f5f38309c88c6d137877fa28a5a"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab2f3331cb1b54165976a9d976cb251a83183631c88076613c6c780f0d6e45a"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28592e20bbb1620848256ebc105fc420436af59515793ed27d5c77a217477705"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe5c63197c55bce6385d9aee16c4d0641684628f63ace85f73571e65ad1c1e8d"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed40e926fa2f297e8a653c954b732f125ef97bdd4c889f243182299de27e2aa9"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b2de4c1c0538dcb7010902a2b97f4e00fc4ddf2c8cda9749af0e594d3b7fa3d7"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf22a83f973b50f9d38e55c6aade04c41ddda19b00c4ebc558930d78eecc64ed"}, - {file = "msgpack-1.0.5-cp311-cp311-win32.whl", hash = "sha256:c396e2cc213d12ce017b686e0f53497f94f8ba2b24799c25d913d46c08ec422c"}, - {file = "msgpack-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c4c68d87497f66f96d50142a2b73b97972130d93677ce930718f68828b382e2"}, - {file = "msgpack-1.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a2b031c2e9b9af485d5e3c4520f4220d74f4d222a5b8dc8c1a3ab9448ca79c57"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f837b93669ce4336e24d08286c38761132bc7ab29782727f8557e1eb21b2080"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1d46dfe3832660f53b13b925d4e0fa1432b00f5f7210eb3ad3bb9a13c6204a6"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:366c9a7b9057e1547f4ad51d8facad8b406bab69c7d72c0eb6f529cf76d4b85f"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4c075728a1095efd0634a7dccb06204919a2f67d1893b6aa8e00497258bf926c"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:f933bbda5a3ee63b8834179096923b094b76f0c7a73c1cfe8f07ad608c58844b"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:36961b0568c36027c76e2ae3ca1132e35123dcec0706c4b7992683cc26c1320c"}, - {file = "msgpack-1.0.5-cp36-cp36m-win32.whl", hash = "sha256:b5ef2f015b95f912c2fcab19c36814963b5463f1fb9049846994b007962743e9"}, - {file = "msgpack-1.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:288e32b47e67f7b171f86b030e527e302c91bd3f40fd9033483f2cacc37f327a"}, - {file = "msgpack-1.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:137850656634abddfb88236008339fdaba3178f4751b28f270d2ebe77a563b6c"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c05a4a96585525916b109bb85f8cb6511db1c6f5b9d9cbcbc940dc6b4be944b"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a62ec00b636583e5cb6ad313bbed36bb7ead5fa3a3e38938503142c72cba4f"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef8108f8dedf204bb7b42994abf93882da1159728a2d4c5e82012edd92c9da9f"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1835c84d65f46900920b3708f5ba829fb19b1096c1800ad60bae8418652a951d"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e57916ef1bd0fee4f21c4600e9d1da352d8816b52a599c46460e93a6e9f17086"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:17358523b85973e5f242ad74aa4712b7ee560715562554aa2134d96e7aa4cbbf"}, - {file = "msgpack-1.0.5-cp37-cp37m-win32.whl", hash = "sha256:cb5aaa8c17760909ec6cb15e744c3ebc2ca8918e727216e79607b7bbce9c8f77"}, - {file = "msgpack-1.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:ab31e908d8424d55601ad7075e471b7d0140d4d3dd3272daf39c5c19d936bd82"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b72d0698f86e8d9ddf9442bdedec15b71df3598199ba33322d9711a19f08145c"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:379026812e49258016dd84ad79ac8446922234d498058ae1d415f04b522d5b2d"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:332360ff25469c346a1c5e47cbe2a725517919892eda5cfaffe6046656f0b7bb"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:476a8fe8fae289fdf273d6d2a6cb6e35b5a58541693e8f9f019bfe990a51e4ba"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9985b214f33311df47e274eb788a5893a761d025e2b92c723ba4c63936b69b1"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48296af57cdb1d885843afd73c4656be5c76c0c6328db3440c9601a98f303d87"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:addab7e2e1fcc04bd08e4eb631c2a90960c340e40dfc4a5e24d2ff0d5a3b3edb"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:916723458c25dfb77ff07f4c66aed34e47503b2eb3188b3adbec8d8aa6e00f48"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:821c7e677cc6acf0fd3f7ac664c98803827ae6de594a9f99563e48c5a2f27eb0"}, - {file = "msgpack-1.0.5-cp38-cp38-win32.whl", hash = "sha256:1c0f7c47f0087ffda62961d425e4407961a7ffd2aa004c81b9c07d9269512f6e"}, - {file = "msgpack-1.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:bae7de2026cbfe3782c8b78b0db9cbfc5455e079f1937cb0ab8d133496ac55e1"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:20c784e66b613c7f16f632e7b5e8a1651aa5702463d61394671ba07b2fc9e025"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:266fa4202c0eb94d26822d9bfd7af25d1e2c088927fe8de9033d929dd5ba24c5"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18334484eafc2b1aa47a6d42427da7fa8f2ab3d60b674120bce7a895a0a85bdd"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57e1f3528bd95cc44684beda696f74d3aaa8a5e58c816214b9046512240ef437"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586d0d636f9a628ddc6a17bfd45aa5b5efaf1606d2b60fa5d87b8986326e933f"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a740fa0e4087a734455f0fc3abf5e746004c9da72fbd541e9b113013c8dc3282"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3055b0455e45810820db1f29d900bf39466df96ddca11dfa6d074fa47054376d"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a61215eac016f391129a013c9e46f3ab308db5f5ec9f25811e811f96962599a8"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:362d9655cd369b08fda06b6657a303eb7172d5279997abe094512e919cf74b11"}, - {file = "msgpack-1.0.5-cp39-cp39-win32.whl", hash = "sha256:ac9dd47af78cae935901a9a500104e2dea2e253207c924cc95de149606dc43cc"}, - {file = "msgpack-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:06f5174b5f8ed0ed919da0e62cbd4ffde676a374aba4020034da05fab67b9164"}, - {file = "msgpack-1.0.5.tar.gz", hash = "sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c"}, -] - -[[package]] -name = "numpy" -version = "1.21.1" -description = "NumPy is the fundamental package for array computing with Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "numpy-1.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38e8648f9449a549a7dfe8d8755a5979b45b3538520d1e735637ef28e8c2dc50"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd7d7409fa643a91d0a05c7554dd68aa9c9bb16e186f6ccfe40d6e003156e33a"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a75b4498b1e93d8b700282dc8e655b8bd559c0904b3910b144646dbbbc03e062"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1412aa0aec3e00bc23fbb8664d76552b4efde98fb71f60737c83efbac24112f1"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e46ceaff65609b5399163de5893d8f2a82d3c77d5e56d976c8b5fb01faa6b671"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c6a2324085dd52f96498419ba95b5777e40b6bcbc20088fddb9e8cbb58885e8e"}, - {file = "numpy-1.21.1-cp37-cp37m-win32.whl", hash = "sha256:73101b2a1fef16602696d133db402a7e7586654682244344b8329cdcbbb82172"}, - {file = "numpy-1.21.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7a708a79c9a9d26904d1cca8d383bf869edf6f8e7650d85dbc77b041e8c5a0f8"}, - {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95b995d0c413f5d0428b3f880e8fe1660ff9396dcd1f9eedbc311f37b5652e16"}, - {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:635e6bd31c9fb3d475c8f44a089569070d10a9ef18ed13738b03049280281267"}, - {file = "numpy-1.21.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a3d5fb89bfe21be2ef47c0614b9c9c707b7362386c9a3ff1feae63e0267ccb6"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a326af80e86d0e9ce92bcc1e65c8ff88297de4fa14ee936cb2293d414c9ec63"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:791492091744b0fe390a6ce85cc1bf5149968ac7d5f0477288f78c89b385d9af"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0318c465786c1f63ac05d7c4dbcecd4d2d7e13f0959b01b534ea1e92202235c5"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a513bd9c1551894ee3d31369f9b07460ef223694098cf27d399513415855b68"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91c6f5fc58df1e0a3cc0c3a717bb3308ff850abdaa6d2d802573ee2b11f674a8"}, - {file = "numpy-1.21.1-cp38-cp38-win32.whl", hash = "sha256:978010b68e17150db8765355d1ccdd450f9fc916824e8c4e35ee620590e234cd"}, - {file = "numpy-1.21.1-cp38-cp38-win_amd64.whl", hash = "sha256:9749a40a5b22333467f02fe11edc98f022133ee1bfa8ab99bda5e5437b831214"}, - {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d7a4aeac3b94af92a9373d6e77b37691b86411f9745190d2c351f410ab3a791f"}, - {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9e7912a56108aba9b31df688a4c4f5cb0d9d3787386b87d504762b6754fbb1b"}, - {file = "numpy-1.21.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25b40b98ebdd272bc3020935427a4530b7d60dfbe1ab9381a39147834e985eac"}, - {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a92c5aea763d14ba9d6475803fc7904bda7decc2a0a68153f587ad82941fec1"}, - {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05a0f648eb28bae4bcb204e6fd14603de2908de982e761a2fc78efe0f19e96e1"}, - {file = "numpy-1.21.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f01f28075a92eede918b965e86e8f0ba7b7797a95aa8d35e1cc8821f5fc3ad6a"}, - {file = "numpy-1.21.1-cp39-cp39-win32.whl", hash = "sha256:88c0b89ad1cc24a5efbb99ff9ab5db0f9a86e9cc50240177a571fbe9c2860ac2"}, - {file = "numpy-1.21.1-cp39-cp39-win_amd64.whl", hash = "sha256:01721eefe70544d548425a07c80be8377096a54118070b8a62476866d5208e33"}, - {file = "numpy-1.21.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d4d1de6e6fb3d28781c73fbde702ac97f03d79e4ffd6598b880b2d95d62ead4"}, - {file = "numpy-1.21.1.zip", hash = "sha256:dff4af63638afcc57a3dfb9e4b26d434a7a602d225b42d746ea7fe2edf1342fd"}, -] - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "packaging" @@ -542,562 +11,27 @@ files = [ {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] -[[package]] -name = "pandas" -version = "1.1.5" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "pandas-1.1.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:bf23a3b54d128b50f4f9d4675b3c1857a688cc6731a32f931837d72effb2698d"}, - {file = "pandas-1.1.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5a780260afc88268a9d3ac3511d8f494fdcf637eece62fb9eb656a63d53eb7ca"}, - {file = "pandas-1.1.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:b61080750d19a0122469ab59b087380721d6b72a4e7d962e4d7e63e0c4504814"}, - {file = "pandas-1.1.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:0de3ddb414d30798cbf56e642d82cac30a80223ad6fe484d66c0ce01a84d6f2f"}, - {file = "pandas-1.1.5-cp36-cp36m-win32.whl", hash = "sha256:70865f96bb38fec46f7ebd66d4b5cfd0aa6b842073f298d621385ae3898d28b5"}, - {file = "pandas-1.1.5-cp36-cp36m-win_amd64.whl", hash = "sha256:19a2148a1d02791352e9fa637899a78e371a3516ac6da5c4edc718f60cbae648"}, - {file = "pandas-1.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26fa92d3ac743a149a31b21d6f4337b0594b6302ea5575b37af9ca9611e8981a"}, - {file = "pandas-1.1.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c16d59c15d946111d2716856dd5479221c9e4f2f5c7bc2d617f39d870031e086"}, - {file = "pandas-1.1.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3be7a7a0ca71a2640e81d9276f526bca63505850add10206d0da2e8a0a325dae"}, - {file = "pandas-1.1.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:573fba5b05bf2c69271a32e52399c8de599e4a15ab7cec47d3b9c904125ab788"}, - {file = "pandas-1.1.5-cp37-cp37m-win32.whl", hash = "sha256:21b5a2b033380adbdd36b3116faaf9a4663e375325831dac1b519a44f9e439bb"}, - {file = "pandas-1.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:24c7f8d4aee71bfa6401faeba367dd654f696a77151a8a28bc2013f7ced4af98"}, - {file = "pandas-1.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2860a97cbb25444ffc0088b457da0a79dc79f9c601238a3e0644312fcc14bf11"}, - {file = "pandas-1.1.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5008374ebb990dad9ed48b0f5d0038124c73748f5384cc8c46904dace27082d9"}, - {file = "pandas-1.1.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2c2f7c670ea4e60318e4b7e474d56447cf0c7d83b3c2a5405a0dbb2600b9c48e"}, - {file = "pandas-1.1.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0a643bae4283a37732ddfcecab3f62dd082996021b980f580903f4e8e01b3c5b"}, - {file = "pandas-1.1.5-cp38-cp38-win32.whl", hash = "sha256:5447ea7af4005b0daf695a316a423b96374c9c73ffbd4533209c5ddc369e644b"}, - {file = "pandas-1.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:4c62e94d5d49db116bef1bd5c2486723a292d79409fc9abd51adf9e05329101d"}, - {file = "pandas-1.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:731568be71fba1e13cae212c362f3d2ca8932e83cb1b85e3f1b4dd77d019254a"}, - {file = "pandas-1.1.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c61c043aafb69329d0f961b19faa30b1dab709dd34c9388143fc55680059e55a"}, - {file = "pandas-1.1.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2b1c6cd28a0dfda75c7b5957363333f01d370936e4c6276b7b8e696dd500582a"}, - {file = "pandas-1.1.5-cp39-cp39-win32.whl", hash = "sha256:c94ff2780a1fd89f190390130d6d36173ca59fcfb3fe0ff596f9a56518191ccb"}, - {file = "pandas-1.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:edda9bacc3843dfbeebaf7a701763e68e741b08fccb889c003b0a52f0ee95782"}, - {file = "pandas-1.1.5.tar.gz", hash = "sha256:f10fc41ee3c75a474d3bdf68d396f10782d013d7f67db99c0efbfd0acb99701b"}, -] - -[package.dependencies] -numpy = ">=1.15.4" -python-dateutil = ">=2.7.3" -pytz = ">=2017.2" - -[package.extras] -test = ["hypothesis (>=3.58)", "pytest (>=4.0.2)", "pytest-xdist"] - -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] - -[[package]] -name = "protobuf" -version = "4.24.4" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "protobuf-4.24.4-cp310-abi3-win32.whl", hash = "sha256:ec9912d5cb6714a5710e28e592ee1093d68c5ebfeda61983b3f40331da0b1ebb"}, - {file = "protobuf-4.24.4-cp310-abi3-win_amd64.whl", hash = "sha256:1badab72aa8a3a2b812eacfede5020472e16c6b2212d737cefd685884c191085"}, - {file = "protobuf-4.24.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e61a27f362369c2f33248a0ff6896c20dcd47b5d48239cb9720134bef6082e4"}, - {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:bffa46ad9612e6779d0e51ae586fde768339b791a50610d85eb162daeb23661e"}, - {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:b493cb590960ff863743b9ff1452c413c2ee12b782f48beca77c8da3e2ffe9d9"}, - {file = "protobuf-4.24.4-cp37-cp37m-win32.whl", hash = "sha256:dbbed8a56e56cee8d9d522ce844a1379a72a70f453bde6243e3c86c30c2a3d46"}, - {file = "protobuf-4.24.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6b7d2e1c753715dcfe9d284a25a52d67818dd43c4932574307daf836f0071e37"}, - {file = "protobuf-4.24.4-cp38-cp38-win32.whl", hash = "sha256:02212557a76cd99574775a81fefeba8738d0f668d6abd0c6b1d3adcc75503dbe"}, - {file = "protobuf-4.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:2fa3886dfaae6b4c5ed2730d3bf47c7a38a72b3a1f0acb4d4caf68e6874b947b"}, - {file = "protobuf-4.24.4-cp39-cp39-win32.whl", hash = "sha256:b77272f3e28bb416e2071186cb39efd4abbf696d682cbb5dc731308ad37fa6dd"}, - {file = "protobuf-4.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:9fee5e8aa20ef1b84123bb9232b3f4a5114d9897ed89b4b8142d81924e05d79b"}, - {file = "protobuf-4.24.4-py3-none-any.whl", hash = "sha256:80797ce7424f8c8d2f2547e2d42bfbb6c08230ce5832d6c099a37335c9c90a92"}, - {file = "protobuf-4.24.4.tar.gz", hash = "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667"}, -] - -[[package]] -name = "psutil" -version = "5.9.8" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "pyjwt" -version = "2.8.0" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, -] - -[package.dependencies] -typing-extensions = {version = "*", markers = "python_version <= \"3.7\""} - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pyrsistent" -version = "0.19.3" -description = "Persistent/Functional/Immutable data structures" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, - {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, - {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, - {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, - {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, - {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-oauthlib" -version = "2.0.0" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=3.4" -files = [ - {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, - {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "rfc3986-validator" -version = "0.1.1" -description = "Pure python rfc3986 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] - [[package]] name = "setuptools" -version = "69.5.1" +version = "75.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "simplejson" -version = "3.19.2" -description = "Simple, fast, extensible JSON encoder/decoder for Python" -optional = false -python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "simplejson-3.19.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3471e95110dcaf901db16063b2e40fb394f8a9e99b3fe9ee3acc6f6ef72183a2"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3194cd0d2c959062b94094c0a9f8780ffd38417a5322450a0db0ca1a23e7fbd2"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:8a390e56a7963e3946ff2049ee1eb218380e87c8a0e7608f7f8790ba19390867"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1537b3dd62d8aae644f3518c407aa8469e3fd0f179cdf86c5992792713ed717a"}, - {file = "simplejson-3.19.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a8617625369d2d03766413bff9e64310feafc9fc4f0ad2b902136f1a5cd8c6b0"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:2c433a412e96afb9a3ce36fa96c8e61a757af53e9c9192c97392f72871e18e69"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f1c70249b15e4ce1a7d5340c97670a95f305ca79f376887759b43bb33288c973"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:287e39ba24e141b046812c880f4619d0ca9e617235d74abc27267194fc0c7835"}, - {file = "simplejson-3.19.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6f0a0b41dd05eefab547576bed0cf066595f3b20b083956b1405a6f17d1be6ad"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f98d918f7f3aaf4b91f2b08c0c92b1774aea113334f7cde4fe40e777114dbe6"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d74beca677623481810c7052926365d5f07393c72cbf62d6cce29991b676402"}, - {file = "simplejson-3.19.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f2398361508c560d0bf1773af19e9fe644e218f2a814a02210ac2c97ad70db0"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ad331349b0b9ca6da86064a3599c425c7a21cd41616e175ddba0866da32df48"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:332c848f02d71a649272b3f1feccacb7e4f7e6de4a2e6dc70a32645326f3d428"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25785d038281cd106c0d91a68b9930049b6464288cea59ba95b35ee37c2d23a5"}, - {file = "simplejson-3.19.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18955c1da6fc39d957adfa346f75226246b6569e096ac9e40f67d102278c3bcb"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:11cc3afd8160d44582543838b7e4f9aa5e97865322844b75d51bf4e0e413bb3e"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b01fda3e95d07a6148702a641e5e293b6da7863f8bc9b967f62db9461330562c"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:778331444917108fa8441f59af45886270d33ce8a23bfc4f9b192c0b2ecef1b3"}, - {file = "simplejson-3.19.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9eb117db8d7ed733a7317c4215c35993b815bf6aeab67523f1f11e108c040672"}, - {file = "simplejson-3.19.2-cp310-cp310-win32.whl", hash = "sha256:39b6d79f5cbfa3eb63a869639cfacf7c41d753c64f7801efc72692c1b2637ac7"}, - {file = "simplejson-3.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:5675e9d8eeef0aa06093c1ff898413ade042d73dc920a03e8cea2fb68f62445a"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ed628c1431100b0b65387419551e822987396bee3c088a15d68446d92f554e0c"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adcb3332979cbc941b8fff07181f06d2b608625edc0a4d8bc3ffc0be414ad0c4"}, - {file = "simplejson-3.19.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08889f2f597ae965284d7b52a5c3928653a9406d88c93e3161180f0abc2433ba"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7938a78447174e2616be223f496ddccdbf7854f7bf2ce716dbccd958cc7d13"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a970a2e6d5281d56cacf3dc82081c95c1f4da5a559e52469287457811db6a79b"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554313db34d63eac3b3f42986aa9efddd1a481169c12b7be1e7512edebff8eaf"}, - {file = "simplejson-3.19.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d36081c0b1c12ea0ed62c202046dca11438bee48dd5240b7c8de8da62c620e9"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a3cd18e03b0ee54ea4319cdcce48357719ea487b53f92a469ba8ca8e39df285e"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66e5dc13bfb17cd6ee764fc96ccafd6e405daa846a42baab81f4c60e15650414"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:972a7833d4a1fcf7a711c939e315721a88b988553fc770a5b6a5a64bd6ebeba3"}, - {file = "simplejson-3.19.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3e74355cb47e0cd399ead3477e29e2f50e1540952c22fb3504dda0184fc9819f"}, - {file = "simplejson-3.19.2-cp311-cp311-win32.whl", hash = "sha256:1dd4f692304854352c3e396e9b5f0a9c9e666868dd0bdc784e2ac4c93092d87b"}, - {file = "simplejson-3.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:9300aee2a8b5992d0f4293d88deb59c218989833e3396c824b69ba330d04a589"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b8d940fd28eb34a7084877747a60873956893e377f15a32ad445fe66c972c3b8"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4969d974d9db826a2c07671273e6b27bc48e940738d768fa8f33b577f0978378"}, - {file = "simplejson-3.19.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c594642d6b13d225e10df5c16ee15b3398e21a35ecd6aee824f107a625690374"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2f5a398b5e77bb01b23d92872255e1bcb3c0c719a3be40b8df146570fe7781a"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176a1b524a3bd3314ed47029a86d02d5a95cc0bee15bd3063a1e1ec62b947de6"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3c7363a8cb8c5238878ec96c5eb0fc5ca2cb11fc0c7d2379863d342c6ee367a"}, - {file = "simplejson-3.19.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:346820ae96aa90c7d52653539a57766f10f33dd4be609206c001432b59ddf89f"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de9a2792612ec6def556d1dc621fd6b2073aff015d64fba9f3e53349ad292734"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1c768e7584c45094dca4b334af361e43b0aaa4844c04945ac7d43379eeda9bc2"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:9652e59c022e62a5b58a6f9948b104e5bb96d3b06940c6482588176f40f4914b"}, - {file = "simplejson-3.19.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9c1a4393242e321e344213a90a1e3bf35d2f624aa8b8f6174d43e3c6b0e8f6eb"}, - {file = "simplejson-3.19.2-cp312-cp312-win32.whl", hash = "sha256:7cb98be113911cb0ad09e5523d0e2a926c09a465c9abb0784c9269efe4f95917"}, - {file = "simplejson-3.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:6779105d2fcb7fcf794a6a2a233787f6bbd4731227333a072d8513b252ed374f"}, - {file = "simplejson-3.19.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:061e81ea2d62671fa9dea2c2bfbc1eec2617ae7651e366c7b4a2baf0a8c72cae"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4280e460e51f86ad76dc456acdbfa9513bdf329556ffc8c49e0200878ca57816"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11c39fbc4280d7420684494373b7c5904fa72a2b48ef543a56c2d412999c9e5d"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bccb3e88ec26ffa90f72229f983d3a5d1155e41a1171190fa723d4135523585b"}, - {file = "simplejson-3.19.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb5b50dc6dd671eb46a605a3e2eb98deb4a9af787a08fcdddabe5d824bb9664"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d94245caa3c61f760c4ce4953cfa76e7739b6f2cbfc94cc46fff6c050c2390c5"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d0e5ffc763678d48ecc8da836f2ae2dd1b6eb2d27a48671066f91694e575173c"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d222a9ed082cd9f38b58923775152003765016342a12f08f8c123bf893461f28"}, - {file = "simplejson-3.19.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8434dcdd347459f9fd9c526117c01fe7ca7b016b6008dddc3c13471098f4f0dc"}, - {file = "simplejson-3.19.2-cp36-cp36m-win32.whl", hash = "sha256:c9ac1c2678abf9270e7228133e5b77c6c3c930ad33a3c1dfbdd76ff2c33b7b50"}, - {file = "simplejson-3.19.2-cp36-cp36m-win_amd64.whl", hash = "sha256:92c4a4a2b1f4846cd4364855cbac83efc48ff5a7d7c06ba014c792dd96483f6f"}, - {file = "simplejson-3.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0d551dc931638e2102b8549836a1632e6e7cf620af3d093a7456aa642bff601d"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73a8a4653f2e809049999d63530180d7b5a344b23a793502413ad1ecea9a0290"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40847f617287a38623507d08cbcb75d51cf9d4f9551dd6321df40215128325a3"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be893258d5b68dd3a8cba8deb35dc6411db844a9d35268a8d3793b9d9a256f80"}, - {file = "simplejson-3.19.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9eb3cff1b7d71aa50c89a0536f469cb8d6dcdd585d8f14fb8500d822f3bdee4"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d0f402e787e6e7ee7876c8b05e2fe6464820d9f35ba3f172e95b5f8b699f6c7f"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fbbcc6b0639aa09b9649f36f1bcb347b19403fe44109948392fbb5ea69e48c3e"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2fc697be37585eded0c8581c4788fcfac0e3f84ca635b73a5bf360e28c8ea1a2"}, - {file = "simplejson-3.19.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b0a3eb6dd39cce23801a50c01a0976971498da49bc8a0590ce311492b82c44b"}, - {file = "simplejson-3.19.2-cp37-cp37m-win32.whl", hash = "sha256:49f9da0d6cd17b600a178439d7d2d57c5ef01f816b1e0e875e8e8b3b42db2693"}, - {file = "simplejson-3.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c87c22bd6a987aca976e3d3e23806d17f65426191db36d40da4ae16a6a494cbc"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e4c166f743bb42c5fcc60760fb1c3623e8fda94f6619534217b083e08644b46"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0a48679310e1dd5c9f03481799311a65d343748fe86850b7fb41df4e2c00c087"}, - {file = "simplejson-3.19.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0521e0f07cb56415fdb3aae0bbd8701eb31a9dfef47bb57206075a0584ab2a2"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d2d5119b1d7a1ed286b8af37357116072fc96700bce3bec5bb81b2e7057ab41"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c1467d939932901a97ba4f979e8f2642415fcf02ea12f53a4e3206c9c03bc17"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49aaf4546f6023c44d7e7136be84a03a4237f0b2b5fb2b17c3e3770a758fc1a0"}, - {file = "simplejson-3.19.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60848ab779195b72382841fc3fa4f71698a98d9589b0a081a9399904487b5832"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0436a70d8eb42bea4fe1a1c32d371d9bb3b62c637969cb33970ad624d5a3336a"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:49e0e3faf3070abdf71a5c80a97c1afc059b4f45a5aa62de0c2ca0444b51669b"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ff836cd4041e16003549449cc0a5e372f6b6f871eb89007ab0ee18fb2800fded"}, - {file = "simplejson-3.19.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3848427b65e31bea2c11f521b6fc7a3145d6e501a1038529da2391aff5970f2f"}, - {file = "simplejson-3.19.2-cp38-cp38-win32.whl", hash = "sha256:3f39bb1f6e620f3e158c8b2eaf1b3e3e54408baca96a02fe891794705e788637"}, - {file = "simplejson-3.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:0405984f3ec1d3f8777c4adc33eac7ab7a3e629f3b1c05fdded63acc7cf01137"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:445a96543948c011a3a47c8e0f9d61e9785df2544ea5be5ab3bc2be4bd8a2565"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a8c3cc4f9dfc33220246760358c8265dad6e1104f25f0077bbca692d616d358"}, - {file = "simplejson-3.19.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af9c7e6669c4d0ad7362f79cb2ab6784d71147503e62b57e3d95c4a0f222c01c"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:064300a4ea17d1cd9ea1706aa0590dcb3be81112aac30233823ee494f02cb78a"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9453419ea2ab9b21d925d0fd7e3a132a178a191881fab4169b6f96e118cc25bb"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e038c615b3906df4c3be8db16b3e24821d26c55177638ea47b3f8f73615111c"}, - {file = "simplejson-3.19.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16ca9c90da4b1f50f089e14485db8c20cbfff2d55424062791a7392b5a9b3ff9"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1018bd0d70ce85f165185d2227c71e3b1e446186f9fa9f971b69eee223e1e3cd"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e8dd53a8706b15bc0e34f00e6150fbefb35d2fd9235d095b4f83b3c5ed4fa11d"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d022b14d7758bfb98405672953fe5c202ea8a9ccf9f6713c5bd0718eba286fd"}, - {file = "simplejson-3.19.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:febffa5b1eda6622d44b245b0685aff6fb555ce0ed734e2d7b1c3acd018a2cff"}, - {file = "simplejson-3.19.2-cp39-cp39-win32.whl", hash = "sha256:4edcd0bf70087b244ba77038db23cd98a1ace2f91b4a3ecef22036314d77ac23"}, - {file = "simplejson-3.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:aad7405c033d32c751d98d3a65801e2797ae77fac284a539f6c3a3e13005edc4"}, - {file = "simplejson-3.19.2-py3-none-any.whl", hash = "sha256:bcedf4cae0d47839fee7de344f96b5694ca53c786f28b5f773d4f0b265a159eb"}, - {file = "simplejson-3.19.2.tar.gz", hash = "sha256:9eb442a2442ce417801c912df68e1f6ccfcd41577ae7274953ab3ad24ef7d82c"}, -] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "smmap" -version = "5.0.1" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.7" -files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, -] - -[[package]] -name = "swagger-spec-validator" -version = "3.0.3" -description = "Validation of Swagger specifications" -optional = false -python-versions = ">=3.7" -files = [ - {file = "swagger-spec-validator-3.0.3.tar.gz", hash = "sha256:16a5ce08c772824a77b1a4a05efc047d72eef1ed53fb969dfe0a18f437ac30a8"}, - {file = "swagger_spec_validator-3.0.3-py2.py3-none-any.whl", hash = "sha256:174b5de4ab0899df9a57d35c880aaa515511c4b8b578d9d519b09a9596537055"}, -] - -[package.dependencies] -jsonschema = "*" -pyyaml = "*" -typing-extensions = "*" - -[[package]] -name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" -optional = false -python-versions = ">=3.7" -files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, -] - -[[package]] -name = "uri-template" -version = "1.3.0" -description = "RFC 6570 URI Template Processor" -optional = false -python-versions = ">=3.7" -files = [ - {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, - {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, -] - -[package.extras] -dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] - -[[package]] -name = "urllib3" -version = "2.0.7" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.7" -files = [ - {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, - {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "webcolors" -version = "1.13" -description = "A library for working with the color formats defined by HTML and CSS." -optional = false -python-versions = ">=3.7" -files = [ - {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, - {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, -] - -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["pytest", "pytest-cov"] - -[[package]] -name = "websocket-client" -version = "1.6.1" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.7" -files = [ - {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, - {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, -] - -[package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "zipp" -version = "3.15.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, + {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "cca1ac6c028d29b8529bf5aabc34ff44ad1af93b4f90ff93baf83ff35c9b3d2f" +content-hash = "1b9c16955b07ff5a0670e76782e13737332533ee0e6724e6b92df4df24069d2c" diff --git a/pyproject.toml b/pyproject.toml index 2fbf9ad04..a6b18d275 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,73 +11,10 @@ pattern = "default-unprefixed" [tool.poetry.dependencies] python = "^3.7" -# Python lack of functionalities from future versions -importlib-metadata = { version = "*", python = "<3.8" } -typing-extensions = ">=3.10.0" - # Utility packaging = "*" -click = ">=7.0" setuptools = { version = "*", python = ">=3.12" } -# Networking -bravado = "^11.0.0" -oauthlib = ">=2.1.0" -PyJWT = "*" -requests = ">=2.20.0" -requests-oauthlib = ">=1.0.0" -websocket-client = ">=0.35.0, !=1.0.0" -urllib3 = "*" -swagger-spec-validator = ">=2.7.4" -protobuf = "^4.0.0" - -# Built-in integrations -GitPython = ">=2.0.8" -psutil = "*" -pandas = "*" - -# Additional integrations -kedro-neptune = { version = "*", optional = true, python = ">=3.9,<3.12" } -neptune-detectron2 = { version = "*", optional = true, python = ">=3.7"} -neptune-fastai = { version = "*", optional = true } -neptune-lightgbm = { version = "*", optional = true } -pytorch-lightning = { version = "*", optional = true } -neptune-optuna = { version = "*", optional = true } -neptune-prophet = { version = "*", optional = true } -neptune-pytorch = { version = "*", optional = true } -neptune-sacred = { version = "*", optional = true } -neptune-sklearn = { version = "*", optional = true } -neptune-tensorflow-keras = { version = "*", optional = true } -neptune-tensorboard = { version = "*", optional = true } -neptune-xgboost = { version = "*", optional = true } -transformers = { version = "*", optional = true } -zenml = { version = "*", optional = true } -mosaicml = { version = "*", optional = true } -neptune-aws = { version = "*", optional = true } -neptune-mlflow = { version = "*", optional = true } -neptune-airflow = { version = "*", optional = true } - -[tool.poetry.extras] -aws = ["neptune-aws"] -detectron2 = ["neptune-detectron2"] -fastai = ["neptune-fastai"] -kedro = ["kedro-neptune"] -lightgbm = ["neptune-lightgbm"] -optuna = ["neptune-optuna"] -prophet = ["neptune-prophet"] -pytorch = ["neptune-pytorch"] -pytorch-lightning = ["pytorch-lightning"] -sacred = ["neptune-sacred"] -sklearn = ["neptune-sklearn"] -tensorflow-keras = ["neptune-tensorflow-keras"] -tensorboard = ["neptune-tensorboard"] -transformers = ["transformers"] -xgboost = ["neptune-xgboost"] -zenml = ["zenml"] -mosaicml = ["mosaicml"] -mlflow = ["neptune-mlflow"] -airflow = ["neptune-airflow"] -experimental = ["neptune-experimental"] [tool.poetry] authors = ["neptune.ai "] @@ -120,9 +57,6 @@ packages = [ "Tracker" = "https://github.com/neptune-ai/neptune-client/issues" "Documentation" = "https://docs.neptune.ai/" -[tool.poetry.scripts] -neptune = "neptune.cli.__main__:main" - [tool.black] line-length = 120 target-version = ['py37', 'py38', 'py39', 'py310', 'py311', 'py312'] @@ -159,158 +93,3 @@ check_untyped_defs = "True" warn_return_any = "True" show_error_codes = "True" warn_unused_ignores = "True" - -[[tool.mypy.overrides]] -module = [ - "*.impl", - "transformers.integrations", - "composer.loggers", - "pytorch_lightning.loggers", -] -ignore_missing_imports = "True" - -[[tool.mypy.overrides]] -module = [ - "neptune.api.exceptions_utils", - "neptune.objects.abstract", - "neptune.types.value_copy", - "neptune.types.namespace", - "neptune.types.file_set", - "neptune.types.mode", - "neptune.types.series.series_value", - "neptune.types.atoms.file", - "neptune.types.atoms.datetime", - "neptune.types.atoms.boolean", - "neptune.types.atoms.float", - "neptune.types.atoms.integer", - "neptune.types.atoms.string", - "neptune.types.atoms.artifact", - "neptune.types.series.float_series", - "neptune.types.series.string_series", - "neptune.types.series.file_series", - "neptune.types.series.series", - "neptune.types.sets.string_set", - "neptune.attributes.atoms.artifact", - "neptune.attributes.atoms.boolean", - "neptune.attributes.atoms.copiable_atom", - "neptune.attributes.atoms.datetime", - "neptune.attributes.atoms.file", - "neptune.attributes.atoms.float", - "neptune.attributes.atoms.integer", - "neptune.attributes.atoms.string", - "neptune.attributes.attribute", - "neptune.attributes.file_set", - "neptune.attributes.namespace", - "neptune.attributes.series.fetchable_series", - "neptune.attributes.series.file_series", - "neptune.attributes.series.float_series", - "neptune.attributes.series.series", - "neptune.attributes.series.string_series", - "neptune.attributes.sets.string_set", - "neptune.attributes.utils", - "neptune.internal.exceptions", - "neptune.internal.utils.git_info", - "neptune.internal.hardware.cgroup.cgroup_filesystem_reader", - "neptune.internal.hardware.cgroup.cgroup_monitor", - "neptune.internal.hardware.gauges.cpu", - "neptune.internal.hardware.gauges.gauge", - "neptune.internal.hardware.gauges.gauge_factory", - "neptune.internal.hardware.gauges.gpu", - "neptune.internal.hardware.gauges.memory", - "neptune.internal.hardware.gpu.gpu_monitor", - "neptune.internal.hardware.metrics.metric", - "neptune.internal.hardware.metrics.metrics_container", - "neptune.internal.hardware.metrics.metrics_factory", - "neptune.internal.hardware.metrics.reports.metric_reporter", - "neptune.internal.hardware.metrics.reports.metric_reporter_factory", - "neptune.internal.hardware.metrics.service.metric_service", - "neptune.internal.hardware.metrics.service.metric_service_factory", - "neptune.internal.hardware.resources.gpu_card_indices_provider", - "neptune.internal.hardware.resources.system_resource_info", - "neptune.internal.hardware.resources.system_resource_info_factory", - "neptune.internal.hardware.system.system_monitor", - "neptune.internal.oauth", - "neptune.internal.patches.bravado", - "neptune.internal.storage.datastream", - "neptune.internal.storage.storage_utils", - "neptune.internal.utils.utils", - "neptune.internal.warnings", - "neptune.internal.websockets.reconnecting_websocket", - "neptune.internal.websockets.websocket_client_adapter", - "neptune.exceptions", - "neptune.handler", - "neptune.integrations.python_logger", - "neptune.integrations.utils", - "neptune.internal.types.stringify_value", - "neptune.internal.types.file_types", - "neptune.internal.artifacts.drivers.local", - "neptune.internal.artifacts.drivers.s3", - "neptune.internal.artifacts.file_hasher", - "neptune.internal.artifacts.local_file_hash_storage", - "neptune.internal.artifacts.types", - "neptune.internal.backends.api_model", - "neptune.internal.backends.hosted_artifact_operations", - "neptune.internal.backends.hosted_client", - "neptune.internal.backends.hosted_file_operations", - "neptune.internal.backends.hosted_neptune_backend", - "neptune.internal.backends.neptune_backend", - "neptune.internal.backends.neptune_backend_mock", - "neptune.internal.backends.offline_neptune_backend", - "neptune.internal.backends.operation_api_name_visitor", - "neptune.internal.backends.operation_api_object_converter", - "neptune.internal.backends.operations_preprocessor", - "neptune.internal.backends.project_name_lookup", - "neptune.internal.backends.swagger_client_wrapper", - "neptune.internal.backends.utils", - "neptune.internal.backgroud_job_list", - "neptune.internal.background_job", - "neptune.internal.container_structure", - "neptune.internal.credentials", - "neptune.internal.hardware.gpu.gpu_monitor", - "neptune.internal.hardware.hardware_metric_reporting_job", - "neptune.internal.id_formats", - "neptune.internal.init.model", - "neptune.internal.init.model_version", - "neptune.internal.init.project", - "neptune.internal.init.run", - "neptune.internal.notebooks.comm", - "neptune.internal.notebooks.notebooks", - "neptune.internal.operation", - "neptune.internal.streams.std_capture_background_job", - "neptune.internal.streams.std_stream_capture_logger", - "neptune.internal.threading.daemon", - "neptune.internal.utils.deprecation", - "neptune.internal.utils.generic_attribute_mapper", - "neptune.internal.utils.git", - "neptune.internal.utils.hashing", - "neptune.internal.utils.images", - "neptune.internal.utils.limits", - "neptune.internal.utils.logger", - "neptune.internal.utils.ping_background_job", - "neptune.internal.utils.process_killer", - "neptune.internal.utils.run_state", - "neptune.internal.utils.runningmode", - "neptune.internal.utils.s3", - "neptune.internal.utils.source_code", - "neptune.internal.utils.traceback_job", - "neptune.internal.utils.uncaught_exception_handler", - "neptune.internal.websockets.websocket_signals_background_job", - "neptune.internal.websockets.websockets_factory", - "neptune.management.exceptions", - "neptune.management.internal.api", - "neptune.management.internal.dto", - "neptune.management.internal.utils", - "neptune.objects.neptune_object", - "neptune.objects.model", - "neptune.objects.model_version", - "neptune.objects.project", - "neptune.objects.run", - "neptune.types.type_casting", - "neptune.vendor.pynvml", - "neptune.api.requests_utils", - "neptune.version", - "neptune.internal.container_type", - "neptune.internal.patches", - "neptune.internal.utils", -] -ignore_errors = "True" diff --git a/src/neptune/__init__.py b/src/neptune/__init__.py index 58ab7fc09..8d06af532 100644 --- a/src/neptune/__init__.py +++ b/src/neptune/__init__.py @@ -13,111 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # -"""Log and organize all your ML model metadata with neptune.ai. - -There are four kinds of Neptune objects: run, model, model version, and project. -They help you track, store, and visualize metadata related to your model-training experiments. -The package contains the functions and constructors needed to initialize the objects. -You can either create new objects or connect to existing ones (to, for example, fetch or add more metadata). - -Functions: - init_run() - init_model() - init_model_version() - init_project() - -Classes: - Run - Model - ModelVersion - Project - -Constants: - ANONYMOUS_API_TOKEN - -Tracking runs -------------- -A Neptune run tracks some things automatically during the execution of your model training -script, such as hardware consumption, source code, and Git information. You can also -assign any metadata to the run manually and organize it in a structure of your choosing. - ->>> run = neptune.init_run() ->>> run["some/structure"] = some_metadata - -Model registry --------------- -Create a model object to register a model: - ->>> model = neptune.init_model(key="MOD") ->>> model["signature"].upload("signature.json") - -Then create as many versions of the model as you need, tracking their metadata and -lifecycles separately: - ->>> model_version = neptune.init_model_version(model="PROJ-MOD") ->>> model_version["dataset_version"].track_files("./data/train.csv") ->>> model_version.change_stage("staging") - -Project metadata ----------------- -Initialize your entire Neptune project and log metadata on project-level: - ->>> project = neptune.init_project(project="ml-team/classification") ->>> project["datasets"].upload("./data/") - -Initializing with class constructor ------------------------------------ -You can also use the class constructor to initialize a Neptune object. - ->>> from neptune import Run ->>> run = Run() - ->>> from neptune import ModelVersion ->>> model_version = ModelVersion(with_id="PROJ-MOD-3") # connect to existing model version ->>> model_version.change_stage("production") - -Anonymous logging ------------------ -To try out Neptune without registering, you can pass the `ANONYMOUS_API_TOKEN` constant -to the `api_token` argument when initializing Neptune. - ->>> with neptune.init_run(api_token=neptune.ANONYMOUS_API_TOKEN) as run: -... ... - ---- - -Learn more in the docs: https://docs.neptune.ai/api/neptune/ -""" -__all__ = [ - "ANONYMOUS_API_TOKEN", - "init_model", - "init_model_version", - "init_project", - "init_run", - "Run", - "Model", - "ModelVersion", - "Project", - "__version__", -] - - -from neptune.constants import ANONYMOUS_API_TOKEN -from neptune.internal.extensions import load_extensions -from neptune.internal.patches import apply_patches -from neptune.objects import ( - Model, - ModelVersion, - Project, - Run, -) -from neptune.version import __version__ - -# Apply patches of external libraries -apply_patches() -load_extensions() - -init_run = Run -init_model = Model -init_model_version = ModelVersion -init_project = Project diff --git a/src/neptune/api/__init__.py b/src/neptune/api/__init__.py deleted file mode 100644 index 8d06af532..000000000 --- a/src/neptune/api/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/api/fetching_series_values.py b/src/neptune/api/fetching_series_values.py deleted file mode 100644 index f30fccaed..000000000 --- a/src/neptune/api/fetching_series_values.py +++ /dev/null @@ -1,60 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ("fetch_series_values",) - -from typing import ( - Any, - Callable, - Iterator, - Optional, - TypeVar, -) - -from neptune.api.models import ( - FloatPointValue, - StringPointValue, -) -from neptune.internal.backends.utils import construct_progress_bar -from neptune.typing import ProgressBarType - -PointValue = TypeVar("PointValue", StringPointValue, FloatPointValue) - - -def fetch_series_values( - getter: Callable[..., Any], path: str, step_size: int = 1000, progress_bar: Optional[ProgressBarType] = None -) -> Iterator[PointValue]: - first_batch = getter(from_step=None, limit=1) - data_count = 0 - total = first_batch.total - last_step_value = (first_batch.values[-1].step - 1) if first_batch.values else None - progress_bar = False if total < step_size else progress_bar - - if total <= 1: - yield from first_batch.values - return - - with construct_progress_bar(progress_bar, f"Fetching {path} values") as bar: - bar.update(by=data_count, total=total) - - while data_count < first_batch.total: - batch = getter(from_step=last_step_value, limit=step_size) - - bar.update(by=len(batch.values), total=total) - - yield from batch.values - - last_step_value = batch.values[-1].step if batch.values else None - data_count += len(batch.values) diff --git a/src/neptune/api/field_visitor.py b/src/neptune/api/field_visitor.py deleted file mode 100644 index acc4c8d51..000000000 --- a/src/neptune/api/field_visitor.py +++ /dev/null @@ -1,91 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ("FieldToValueVisitor",) - -from datetime import datetime -from typing import ( - Any, - Optional, - Set, -) - -from neptune.api.models import ( - ArtifactField, - BoolField, - DateTimeField, - FieldVisitor, - FileField, - FileSetField, - FloatField, - FloatSeriesField, - GitRefField, - ImageSeriesField, - IntField, - NotebookRefField, - ObjectStateField, - StringField, - StringSeriesField, - StringSetField, -) -from neptune.exceptions import MetadataInconsistency - - -class FieldToValueVisitor(FieldVisitor[Any]): - - def visit_float(self, field: FloatField) -> float: - return field.value - - def visit_int(self, field: IntField) -> int: - return field.value - - def visit_bool(self, field: BoolField) -> bool: - return field.value - - def visit_string(self, field: StringField) -> str: - return field.value - - def visit_datetime(self, field: DateTimeField) -> datetime: - return field.value - - def visit_file(self, field: FileField) -> None: - raise MetadataInconsistency("Cannot get value for file attribute. Use download() instead.") - - def visit_file_set(self, field: FileSetField) -> None: - raise MetadataInconsistency("Cannot get value for file set attribute. Use download() instead.") - - def visit_float_series(self, field: FloatSeriesField) -> Optional[float]: - return field.last - - def visit_string_series(self, field: StringSeriesField) -> Optional[str]: - return field.last - - def visit_image_series(self, field: ImageSeriesField) -> None: - raise MetadataInconsistency("Cannot get value for image series.") - - def visit_string_set(self, field: StringSetField) -> Set[str]: - return field.values - - def visit_git_ref(self, field: GitRefField) -> Optional[str]: - return field.commit.commit_id if field.commit is not None else None - - def visit_object_state(self, field: ObjectStateField) -> str: - return field.value - - def visit_notebook_ref(self, field: NotebookRefField) -> Optional[str]: - return field.notebook_name - - def visit_artifact(self, field: ArtifactField) -> str: - return field.hash diff --git a/src/neptune/api/models.py b/src/neptune/api/models.py deleted file mode 100644 index bff32d838..000000000 --- a/src/neptune/api/models.py +++ /dev/null @@ -1,865 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -__all__ = ( - "FileEntry", - "Field", - "FieldType", - "GitCommit", - "LeaderboardEntry", - "LeaderboardEntriesSearchResult", - "FieldVisitor", - "FloatField", - "IntField", - "BoolField", - "StringField", - "DateTimeField", - "FileField", - "FileSetField", - "FloatSeriesField", - "StringSeriesField", - "ImageSeriesField", - "StringSetField", - "GitRefField", - "ObjectStateField", - "NotebookRefField", - "ArtifactField", - "FieldDefinition", - "FloatSeriesValues", - "FloatPointValue", - "StringSeriesValues", - "StringPointValue", - "ImageSeriesValues", - "QueryFieldDefinitionsResult", - "NextPage", - "QueryFieldsResult", -) - -import abc -import re -from dataclasses import dataclass -from dataclasses import field as dataclass_field -from datetime import ( - datetime, - timezone, -) -from enum import Enum -from typing import ( - Any, - ClassVar, - Dict, - Generic, - List, - Optional, - Set, - Type, - TypeVar, -) - -from neptune.api.proto.neptune_pb.api.model.attributes_pb2 import ProtoAttributeDefinitionDTO -from neptune.api.proto.neptune_pb.api.model.leaderboard_entries_pb2 import ( - ProtoAttributeDTO, - ProtoAttributesDTO, - ProtoBoolAttributeDTO, - ProtoDatetimeAttributeDTO, - ProtoFloatAttributeDTO, - ProtoFloatSeriesAttributeDTO, - ProtoIntAttributeDTO, - ProtoLeaderboardEntriesSearchResultDTO, - ProtoStringAttributeDTO, - ProtoStringSetAttributeDTO, -) -from neptune.internal.utils.iso_dates import parse_iso_date -from neptune.internal.utils.run_state import RunState - -Ret = TypeVar("Ret") - - -@dataclass -class FileEntry: - name: str - size: int - mtime: datetime - file_type: str - - @classmethod - def from_dto(cls, file_dto: Any) -> "FileEntry": - return cls(name=file_dto.name, size=file_dto.size, mtime=file_dto.mtime, file_type=file_dto.fileType) - - -class FieldType(Enum): - FLOAT = "float" - INT = "int" - BOOL = "bool" - STRING = "string" - DATETIME = "datetime" - FILE = "file" - FILE_SET = "fileSet" - FLOAT_SERIES = "floatSeries" - STRING_SERIES = "stringSeries" - IMAGE_SERIES = "imageSeries" - STRING_SET = "stringSet" - GIT_REF = "gitRef" - OBJECT_STATE = "experimentState" - NOTEBOOK_REF = "notebookRef" - ARTIFACT = "artifact" - - -@dataclass -class Field(abc.ABC): - path: str - type: ClassVar[FieldType] = dataclass_field(init=False) - _registry: ClassVar[Dict[str, Type[Field]]] = {} - - def __init_subclass__(cls, *args: Any, field_type: FieldType, **kwargs: Any) -> None: - super().__init_subclass__(*args, **kwargs) - cls.type = field_type - cls._registry[field_type.value] = cls - - @classmethod - def by_type(cls, field_type: FieldType) -> Type[Field]: - return cls._registry[field_type.value] - - @abc.abstractmethod - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: ... - - @staticmethod - def from_dict(data: Dict[str, Any]) -> Field: - field_type = data["type"] - return Field._registry[field_type].from_dict(data[f"{field_type}Properties"]) - - @staticmethod - def from_model(model: Any) -> Field: - field_type = str(model.type) - return Field._registry[field_type].from_model(model.__getattr__(f"{field_type}Properties")) - - @staticmethod - def from_proto(data: Any) -> Field: - field_type = str(data.type) - return Field._registry[field_type].from_proto(data.__getattribute__(f"{camel_to_snake(field_type)}_properties")) - - -def camel_to_snake(name: str) -> str: - # Insert an underscore before any uppercase letters and convert the string to lowercase - s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) - # Handle the case where there are uppercase letters in the middle of the name - return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower() - - -class FieldVisitor(Generic[Ret], abc.ABC): - - def visit(self, field: Field) -> Ret: - return field.accept(self) - - @abc.abstractmethod - def visit_float(self, field: FloatField) -> Ret: ... - - @abc.abstractmethod - def visit_int(self, field: IntField) -> Ret: ... - - @abc.abstractmethod - def visit_bool(self, field: BoolField) -> Ret: ... - - @abc.abstractmethod - def visit_string(self, field: StringField) -> Ret: ... - - @abc.abstractmethod - def visit_datetime(self, field: DateTimeField) -> Ret: ... - - @abc.abstractmethod - def visit_file(self, field: FileField) -> Ret: ... - - @abc.abstractmethod - def visit_file_set(self, field: FileSetField) -> Ret: ... - - @abc.abstractmethod - def visit_float_series(self, field: FloatSeriesField) -> Ret: ... - - @abc.abstractmethod - def visit_string_series(self, field: StringSeriesField) -> Ret: ... - - @abc.abstractmethod - def visit_image_series(self, field: ImageSeriesField) -> Ret: ... - - @abc.abstractmethod - def visit_string_set(self, field: StringSetField) -> Ret: ... - - @abc.abstractmethod - def visit_git_ref(self, field: GitRefField) -> Ret: ... - - @abc.abstractmethod - def visit_object_state(self, field: ObjectStateField) -> Ret: ... - - @abc.abstractmethod - def visit_notebook_ref(self, field: NotebookRefField) -> Ret: ... - - @abc.abstractmethod - def visit_artifact(self, field: ArtifactField) -> Ret: ... - - -@dataclass -class FloatField(Field, field_type=FieldType.FLOAT): - value: float - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_float(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> FloatField: - return FloatField(path=data["attributeName"], value=float(data["value"])) - - @staticmethod - def from_model(model: Any) -> FloatField: - return FloatField(path=model.attributeName, value=model.value) - - @staticmethod - def from_proto(data: ProtoFloatAttributeDTO) -> FloatField: - return FloatField(path=data.attribute_name, value=data.value) - - -@dataclass -class IntField(Field, field_type=FieldType.INT): - value: int - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_int(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> IntField: - return IntField(path=data["attributeName"], value=int(data["value"])) - - @staticmethod - def from_model(model: Any) -> IntField: - return IntField(path=model.attributeName, value=model.value) - - @staticmethod - def from_proto(data: ProtoIntAttributeDTO) -> IntField: - return IntField(path=data.attribute_name, value=data.value) - - -@dataclass -class BoolField(Field, field_type=FieldType.BOOL): - value: bool - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_bool(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> BoolField: - return BoolField(path=data["attributeName"], value=bool(data["value"])) - - @staticmethod - def from_model(model: Any) -> BoolField: - return BoolField(path=model.attributeName, value=model.value) - - @staticmethod - def from_proto(data: ProtoBoolAttributeDTO) -> BoolField: - return BoolField(path=data.attribute_name, value=data.value) - - -@dataclass -class StringField(Field, field_type=FieldType.STRING): - value: str - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_string(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> StringField: - return StringField(path=data["attributeName"], value=str(data["value"])) - - @staticmethod - def from_model(model: Any) -> StringField: - return StringField(path=model.attributeName, value=model.value) - - @staticmethod - def from_proto(data: ProtoStringAttributeDTO) -> StringField: - return StringField(path=data.attribute_name, value=data.value) - - -@dataclass -class DateTimeField(Field, field_type=FieldType.DATETIME): - value: datetime - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_datetime(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> DateTimeField: - return DateTimeField(path=data["attributeName"], value=parse_iso_date(data["value"])) - - @staticmethod - def from_model(model: Any) -> DateTimeField: - return DateTimeField(path=model.attributeName, value=parse_iso_date(model.value)) - - @staticmethod - def from_proto(data: ProtoDatetimeAttributeDTO) -> DateTimeField: - return DateTimeField( - path=data.attribute_name, value=datetime.fromtimestamp(data.value / 1000.0, tz=timezone.utc) - ) - - -@dataclass -class FileField(Field, field_type=FieldType.FILE): - name: str - ext: str - size: int - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_file(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> FileField: - return FileField(path=data["attributeName"], name=data["name"], ext=data["ext"], size=int(data["size"])) - - @staticmethod - def from_model(model: Any) -> FileField: - return FileField(path=model.attributeName, name=model.name, ext=model.ext, size=model.size) - - @staticmethod - def from_proto(data: Any) -> FileField: - raise NotImplementedError() - - -@dataclass -class FileSetField(Field, field_type=FieldType.FILE_SET): - size: int - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_file_set(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> FileSetField: - return FileSetField(path=data["attributeName"], size=int(data["size"])) - - @staticmethod - def from_model(model: Any) -> FileSetField: - return FileSetField(path=model.attributeName, size=model.size) - - @staticmethod - def from_proto(data: Any) -> FileSetField: - raise NotImplementedError() - - -@dataclass -class FloatSeriesField(Field, field_type=FieldType.FLOAT_SERIES): - last: Optional[float] - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_float_series(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> FloatSeriesField: - last = float(data["last"]) if "last" in data else None - return FloatSeriesField(path=data["attributeName"], last=last) - - @staticmethod - def from_model(model: Any) -> FloatSeriesField: - return FloatSeriesField(path=model.attributeName, last=model.last) - - @staticmethod - def from_proto(data: ProtoFloatSeriesAttributeDTO) -> FloatSeriesField: - last = data.last if data.HasField("last") else None - return FloatSeriesField(path=data.attribute_name, last=last) - - -@dataclass -class StringSeriesField(Field, field_type=FieldType.STRING_SERIES): - last: Optional[str] - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_string_series(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> StringSeriesField: - last = str(data["last"]) if "last" in data else None - return StringSeriesField(path=data["attributeName"], last=last) - - @staticmethod - def from_model(model: Any) -> StringSeriesField: - return StringSeriesField(path=model.attributeName, last=model.last) - - @staticmethod - def from_proto(data: Any) -> StringSeriesField: - raise NotImplementedError() - - -@dataclass -class ImageSeriesField(Field, field_type=FieldType.IMAGE_SERIES): - last_step: Optional[float] - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_image_series(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> ImageSeriesField: - last_step = float(data["lastStep"]) if "lastStep" in data else None - return ImageSeriesField(path=data["attributeName"], last_step=last_step) - - @staticmethod - def from_model(model: Any) -> ImageSeriesField: - return ImageSeriesField(path=model.attributeName, last_step=model.lastStep) - - @staticmethod - def from_proto(data: Any) -> ImageSeriesField: - raise NotImplementedError() - - -@dataclass -class StringSetField(Field, field_type=FieldType.STRING_SET): - values: Set[str] - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_string_set(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> StringSetField: - return StringSetField(path=data["attributeName"], values=set(map(str, data["values"]))) - - @staticmethod - def from_model(model: Any) -> StringSetField: - return StringSetField(path=model.attributeName, values=set(model.values)) - - @staticmethod - def from_proto(data: ProtoStringSetAttributeDTO) -> StringSetField: - return StringSetField(path=data.attribute_name, values=set(data.value)) - - -@dataclass -class GitCommit: - commit_id: Optional[str] - - @staticmethod - def from_dict(data: Dict[str, Any]) -> GitCommit: - commit_id = str(data["commitId"]) if "commitId" in data else None - return GitCommit(commit_id=commit_id) - - @staticmethod - def from_model(model: Any) -> GitCommit: - return GitCommit(commit_id=model.commitId) - - @staticmethod - def from_proto(data: Any) -> GitCommit: - raise NotImplementedError() - - -@dataclass -class GitRefField(Field, field_type=FieldType.GIT_REF): - commit: Optional[GitCommit] - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_git_ref(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> GitRefField: - commit = GitCommit.from_dict(data["commit"]) if "commit" in data else None - return GitRefField(path=data["attributeName"], commit=commit) - - @staticmethod - def from_model(model: Any) -> GitRefField: - commit = GitCommit.from_model(model.commit) if model.commit is not None else None - return GitRefField(path=model.attributeName, commit=commit) - - @staticmethod - def from_proto(data: ProtoAttributeDTO) -> GitRefField: - raise NotImplementedError() - - -@dataclass -class ObjectStateField(Field, field_type=FieldType.OBJECT_STATE): - value: str - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_object_state(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> ObjectStateField: - value = RunState.from_api(str(data["value"])).value - return ObjectStateField(path=data["attributeName"], value=value) - - @staticmethod - def from_model(model: Any) -> ObjectStateField: - value = RunState.from_api(str(model.value)).value - return ObjectStateField(path=model.attributeName, value=value) - - @staticmethod - def from_proto(data: Any) -> ObjectStateField: - raise NotImplementedError() - - -@dataclass -class NotebookRefField(Field, field_type=FieldType.NOTEBOOK_REF): - notebook_name: Optional[str] - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_notebook_ref(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> NotebookRefField: - notebook_name = str(data["notebookName"]) if "notebookName" in data else None - return NotebookRefField(path=data["attributeName"], notebook_name=notebook_name) - - @staticmethod - def from_model(model: Any) -> NotebookRefField: - return NotebookRefField(path=model.attributeName, notebook_name=model.notebookName) - - @staticmethod - def from_proto(data: Any) -> NotebookRefField: - raise NotImplementedError() - - -@dataclass -class ArtifactField(Field, field_type=FieldType.ARTIFACT): - hash: str - - def accept(self, visitor: FieldVisitor[Ret]) -> Ret: - return visitor.visit_artifact(self) - - @staticmethod - def from_dict(data: Dict[str, Any]) -> ArtifactField: - return ArtifactField(path=data["attributeName"], hash=str(data["hash"])) - - @staticmethod - def from_model(model: Any) -> ArtifactField: - return ArtifactField(path=model.attributeName, hash=model.hash) - - @staticmethod - def from_proto(data: Any) -> ArtifactField: - raise NotImplementedError() - - -@dataclass -class LeaderboardEntry: - object_id: str - fields: List[Field] - - @staticmethod - def from_dict(data: Dict[str, Any]) -> LeaderboardEntry: - return LeaderboardEntry( - object_id=data["experimentId"], fields=[Field.from_dict(field) for field in data["attributes"]] - ) - - @staticmethod - def from_model(model: Any) -> LeaderboardEntry: - return LeaderboardEntry( - object_id=model.experimentId, fields=[Field.from_model(field) for field in model.attributes] - ) - - @staticmethod - def from_proto(data: ProtoAttributesDTO) -> LeaderboardEntry: - with_proto_support = { - FieldType.STRING.value, - FieldType.BOOL.value, - FieldType.INT.value, - FieldType.FLOAT.value, - FieldType.DATETIME.value, - FieldType.STRING_SET.value, - FieldType.FLOAT_SERIES.value, - } - - return LeaderboardEntry( - object_id=data.experiment_id, - fields=[Field.from_proto(field) for field in data.attributes if str(field.type) in with_proto_support], - ) - - -@dataclass -class LeaderboardEntriesSearchResult: - entries: List[LeaderboardEntry] - matching_item_count: int - - @staticmethod - def from_dict(result: Dict[str, Any]) -> LeaderboardEntriesSearchResult: - return LeaderboardEntriesSearchResult( - entries=[LeaderboardEntry.from_dict(entry) for entry in result.get("entries", [])], - matching_item_count=result["matchingItemCount"], - ) - - @staticmethod - def from_model(result: Any) -> LeaderboardEntriesSearchResult: - return LeaderboardEntriesSearchResult( - entries=[LeaderboardEntry.from_model(entry) for entry in result.entries], - matching_item_count=result.matchingItemCount, - ) - - @staticmethod - def from_proto(data: ProtoLeaderboardEntriesSearchResultDTO) -> LeaderboardEntriesSearchResult: - return LeaderboardEntriesSearchResult( - entries=[LeaderboardEntry.from_proto(entry) for entry in data.entries], - matching_item_count=data.matching_item_count, - ) - - -@dataclass -class NextPage: - limit: Optional[int] - next_page_token: Optional[str] - - @staticmethod - def from_dict(data: Dict[str, Any]) -> NextPage: - return NextPage(limit=data.get("limit"), next_page_token=data.get("nextPageToken")) - - @staticmethod - def from_model(model: Any) -> NextPage: - return NextPage(limit=model.limit, next_page_token=model.nextPageToken) - - @staticmethod - def from_proto(data: Any) -> NextPage: - return NextPage(limit=data.limit, next_page_token=data.nextPageToken) - - def to_dto(self) -> Dict[str, Any]: - return { - "limit": self.limit, - "nextPageToken": self.next_page_token, - } - - -@dataclass -class QueryFieldsExperimentResult: - object_id: str - object_key: str - fields: List[Field] - - # Any field the type of which is not in this set will not be - # returned to the user. Applies to protobuf calls only. - PROTO_SUPPORTED_FIELD_TYPES = { - FieldType.STRING.value, - FieldType.BOOL.value, - FieldType.INT.value, - FieldType.FLOAT.value, - FieldType.DATETIME.value, - FieldType.STRING_SET.value, - FieldType.FLOAT_SERIES.value, - } - - @staticmethod - def from_dict(data: Dict[str, Any]) -> QueryFieldsExperimentResult: - return QueryFieldsExperimentResult( - object_id=data["experimentId"], - object_key=data["experimentShortId"], - fields=[Field.from_dict(field) for field in data["attributes"]], - ) - - @staticmethod - def from_model(model: Any) -> QueryFieldsExperimentResult: - return QueryFieldsExperimentResult( - object_id=model.experimentId, - object_key=model.experimentShortId, - fields=[Field.from_model(field) for field in model.attributes], - ) - - @staticmethod - def from_proto(data: Any) -> QueryFieldsExperimentResult: - return QueryFieldsExperimentResult( - object_id=data.experimentId, - object_key=data.experimentShortId, - fields=[ - Field.from_proto(field) - for field in data.attributes - if field.type in QueryFieldsExperimentResult.PROTO_SUPPORTED_FIELD_TYPES - ], - ) - - -@dataclass -class QueryFieldsResult: - entries: List[QueryFieldsExperimentResult] - next_page: NextPage - - @staticmethod - def from_dict(data: Dict[str, Any]) -> QueryFieldsResult: - return QueryFieldsResult( - entries=[QueryFieldsExperimentResult.from_dict(entry) for entry in data["entries"]], - next_page=NextPage.from_dict(data["nextPage"]), - ) - - @staticmethod - def from_model(model: Any) -> QueryFieldsResult: - return QueryFieldsResult( - entries=[QueryFieldsExperimentResult.from_model(entry) for entry in model.entries], - next_page=NextPage.from_model(model.nextPage), - ) - - @staticmethod - def from_proto(data: Any) -> QueryFieldsResult: - return QueryFieldsResult( - entries=[QueryFieldsExperimentResult.from_proto(entry) for entry in data.entries], - next_page=NextPage.from_proto(data.nextPage), - ) - - -@dataclass -class QueryFieldDefinitionsResult: - entries: List[FieldDefinition] - next_page: NextPage - - @staticmethod - def from_dict(data: Dict[str, Any]) -> QueryFieldDefinitionsResult: - return QueryFieldDefinitionsResult( - entries=[FieldDefinition.from_dict(entry) for entry in data["entries"]], - next_page=NextPage.from_dict(data["nextPage"]), - ) - - @staticmethod - def from_model(model: Any) -> QueryFieldDefinitionsResult: - return QueryFieldDefinitionsResult( - entries=[FieldDefinition.from_model(entry) for entry in model.entries], - next_page=NextPage.from_model(model.nextPage), - ) - - @staticmethod - def from_proto(data: Any) -> QueryFieldDefinitionsResult: - raise NotImplementedError() - - -@dataclass -class FieldDefinition: - path: str - type: FieldType - - @staticmethod - def from_dict(data: Dict[str, Any]) -> FieldDefinition: - return FieldDefinition(path=data["name"], type=FieldType(data["type"])) - - @staticmethod - def from_model(model: Any) -> FieldDefinition: - return FieldDefinition(path=model.name, type=FieldType(model.type)) - - @staticmethod - def from_proto(data: ProtoAttributeDefinitionDTO) -> FieldDefinition: - return FieldDefinition(path=data.name, type=FieldType(data.type)) - - -@dataclass -class FloatSeriesValues: - total: int - values: List[FloatPointValue] - - @staticmethod - def from_dict(data: Dict[str, Any]) -> FloatSeriesValues: - return FloatSeriesValues( - total=data["totalItemCount"], values=[FloatPointValue.from_dict(value) for value in data["values"]] - ) - - @staticmethod - def from_model(model: Any) -> FloatSeriesValues: - return FloatSeriesValues( - total=model.totalItemCount, values=[FloatPointValue.from_model(value) for value in model.values] - ) - - @staticmethod - def from_proto(data: Any) -> FloatSeriesValues: - return FloatSeriesValues( - total=data.total_item_count, values=[FloatPointValue.from_proto(value) for value in data.values] - ) - - -@dataclass -class FloatPointValue: - timestamp: datetime - value: float - step: float - - @staticmethod - def from_dict(data: Dict[str, Any]) -> FloatPointValue: - return FloatPointValue( - timestamp=datetime.fromtimestamp(data["timestampMillis"] / 1000.0, tz=timezone.utc), - value=float(data["value"]), - step=float(data["step"]), - ) - - @staticmethod - def from_model(model: Any) -> FloatPointValue: - return FloatPointValue( - timestamp=datetime.fromtimestamp(model.timestampMillis / 1000.0, tz=timezone.utc), - value=model.value, - step=model.step, - ) - - @staticmethod - def from_proto(data: Any) -> FloatPointValue: - return FloatPointValue( - timestamp=datetime.fromtimestamp(data.timestamp_millis / 1000.0, tz=timezone.utc), - value=data.value, - step=data.step, - ) - - -@dataclass -class StringSeriesValues: - total: int - values: List[StringPointValue] - - @staticmethod - def from_dict(data: Dict[str, Any]) -> StringSeriesValues: - return StringSeriesValues( - total=data["totalItemCount"], values=[StringPointValue.from_dict(value) for value in data["values"]] - ) - - @staticmethod - def from_model(model: Any) -> StringSeriesValues: - return StringSeriesValues( - total=model.totalItemCount, values=[StringPointValue.from_model(value) for value in model.values] - ) - - @staticmethod - def from_proto(data: Any) -> StringSeriesValues: - raise NotImplementedError() - - -@dataclass -class StringPointValue: - timestamp: datetime - step: float - value: str - - @staticmethod - def from_dict(data: Dict[str, Any]) -> StringPointValue: - return StringPointValue( - timestamp=datetime.fromtimestamp(data["timestampMillis"] / 1000.0, tz=timezone.utc), - value=str(data["value"]), - step=float(data["step"]), - ) - - @staticmethod - def from_model(model: Any) -> StringPointValue: - return StringPointValue( - timestamp=datetime.fromtimestamp(model.timestampMillis / 1000.0, tz=timezone.utc), - value=model.value, - step=model.step, - ) - - @staticmethod - def from_proto(data: Any) -> StringPointValue: - raise NotImplementedError() - - -@dataclass -class ImageSeriesValues: - total: int - - @staticmethod - def from_dict(data: Dict[str, Any]) -> ImageSeriesValues: - return ImageSeriesValues(total=data["totalItemCount"]) - - @staticmethod - def from_model(model: Any) -> ImageSeriesValues: - return ImageSeriesValues(total=model.totalItemCount) - - @staticmethod - def from_proto(data: Any) -> ImageSeriesValues: - raise NotImplementedError() diff --git a/src/neptune/api/pagination.py b/src/neptune/api/pagination.py deleted file mode 100644 index 4d7646c71..000000000 --- a/src/neptune/api/pagination.py +++ /dev/null @@ -1,78 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ("paginate_over",) - -import abc -import itertools -from dataclasses import dataclass -from typing import ( - Any, - Callable, - Iterator, - List, - Optional, - TypeVar, -) - -from typing_extensions import Protocol - -from neptune.api.models import NextPage - - -@dataclass -class WithPagination(abc.ABC): - next_page: Optional[NextPage] - - -T = TypeVar("T", bound=WithPagination) -Entry = TypeVar("Entry") - - -class Paginatable(Protocol): - def __call__(self, *, next_page: Optional[NextPage] = None, **kwargs: Any) -> Any: ... - - -def paginate_over( - getter: Paginatable, - extract_entries: Callable[[T], List[Entry]], - page_size: int = 50, - limit: Optional[int] = None, - **kwargs: Any, -) -> Iterator[Entry]: - """ - Generic approach to pagination via `NextPage` - """ - counter = 0 - data = getter(**kwargs, next_page=NextPage(limit=page_size, next_page_token=None)) - results = extract_entries(data) - if limit is not None: - counter = len(results[:limit]) - - yield from itertools.islice(results, limit) - - while data.next_page is not None and data.next_page.next_page_token is not None: - to_fetch = page_size - if limit is not None: - if counter >= limit: - break - to_fetch = min(page_size, limit - counter) - - data = getter(**kwargs, next_page=NextPage(limit=to_fetch, next_page_token=data.next_page.next_page_token)) - results = extract_entries(data) - if limit is not None: - counter += len(results[:to_fetch]) - - yield from itertools.islice(results, to_fetch) diff --git a/src/neptune/api/proto/__init__.py b/src/neptune/api/proto/__init__.py deleted file mode 100644 index 665b8500e..000000000 --- a/src/neptune/api/proto/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/api/proto/neptune_pb/__init__.py b/src/neptune/api/proto/neptune_pb/__init__.py deleted file mode 100644 index 665b8500e..000000000 --- a/src/neptune/api/proto/neptune_pb/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/api/proto/neptune_pb/api/__init__.py b/src/neptune/api/proto/neptune_pb/api/__init__.py deleted file mode 100644 index 665b8500e..000000000 --- a/src/neptune/api/proto/neptune_pb/api/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/api/proto/neptune_pb/api/model/__init__.py b/src/neptune/api/proto/neptune_pb/api/model/__init__.py deleted file mode 100644 index 665b8500e..000000000 --- a/src/neptune/api/proto/neptune_pb/api/model/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/api/proto/neptune_pb/api/model/attributes_pb2.py b/src/neptune/api/proto/neptune_pb/api/model/attributes_pb2.py deleted file mode 100644 index 846de5969..000000000 --- a/src/neptune/api/proto/neptune_pb/api/model/attributes_pb2.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: neptune_pb/api/model/attributes.proto -# Protobuf Python Version: 4.25.1 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from neptune.api.proto.neptune_pb.api.model import leaderboard_entries_pb2 as neptune__pb_dot_api_dot_model_dot_leaderboard__entries__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%neptune_pb/api/model/attributes.proto\x12\x11neptune.api.model\x1a.neptune_pb/api/model/leaderboard_entries.proto\"a\n\x1eProtoAttributesSearchResultDTO\x12?\n\x07\x65ntries\x18\x01 \x03(\x0b\x32..neptune.api.model.ProtoAttributeDefinitionDTO\"9\n\x1bProtoAttributeDefinitionDTO\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\"\xa3\x01\n\x1dProtoQueryAttributesResultDTO\x12K\n\x07\x65ntries\x18\x01 \x03(\x0b\x32:.neptune.api.model.ProtoQueryAttributesExperimentResultDTO\x12\x35\n\x08nextPage\x18\x02 \x01(\x0b\x32#.neptune.api.model.ProtoNextPageDTO\"^\n\x10ProtoNextPageDTO\x12\x1a\n\rnextPageToken\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05limit\x18\x02 \x01(\rH\x01\x88\x01\x01\x42\x10\n\x0e_nextPageTokenB\x08\n\x06_limit\"\x94\x01\n\'ProtoQueryAttributesExperimentResultDTO\x12\x14\n\x0c\x65xperimentId\x18\x01 \x01(\t\x12\x19\n\x11\x65xperimentShortId\x18\x02 \x01(\t\x12\x38\n\nattributes\x18\x03 \x03(\x0b\x32$.neptune.api.model.ProtoAttributeDTOB4\n0ml.neptune.leaderboard.api.model.proto.generatedP\x01\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'neptune.api.proto.neptune_pb.api.model.attributes_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - _globals['DESCRIPTOR']._options = None - _globals['DESCRIPTOR']._serialized_options = b'\n0ml.neptune.leaderboard.api.model.proto.generatedP\001' - _globals['_PROTOATTRIBUTESSEARCHRESULTDTO']._serialized_start=108 - _globals['_PROTOATTRIBUTESSEARCHRESULTDTO']._serialized_end=205 - _globals['_PROTOATTRIBUTEDEFINITIONDTO']._serialized_start=207 - _globals['_PROTOATTRIBUTEDEFINITIONDTO']._serialized_end=264 - _globals['_PROTOQUERYATTRIBUTESRESULTDTO']._serialized_start=267 - _globals['_PROTOQUERYATTRIBUTESRESULTDTO']._serialized_end=430 - _globals['_PROTONEXTPAGEDTO']._serialized_start=432 - _globals['_PROTONEXTPAGEDTO']._serialized_end=526 - _globals['_PROTOQUERYATTRIBUTESEXPERIMENTRESULTDTO']._serialized_start=529 - _globals['_PROTOQUERYATTRIBUTESEXPERIMENTRESULTDTO']._serialized_end=677 -# @@protoc_insertion_point(module_scope) diff --git a/src/neptune/api/proto/neptune_pb/api/model/attributes_pb2.pyi b/src/neptune/api/proto/neptune_pb/api/model/attributes_pb2.pyi deleted file mode 100644 index 22c51bcdf..000000000 --- a/src/neptune/api/proto/neptune_pb/api/model/attributes_pb2.pyi +++ /dev/null @@ -1,119 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" -import builtins -import collections.abc -import google.protobuf.descriptor -import google.protobuf.internal.containers -import google.protobuf.message -import neptune.api.proto.neptune_pb.api.model.leaderboard_entries_pb2 -import sys -import typing - -if sys.version_info >= (3, 8): - import typing as typing_extensions -else: - import typing_extensions - -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor - -@typing_extensions.final -class ProtoAttributesSearchResultDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - ENTRIES_FIELD_NUMBER: builtins.int - @property - def entries(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProtoAttributeDefinitionDTO]: ... - def __init__( - self, - *, - entries: collections.abc.Iterable[global___ProtoAttributeDefinitionDTO] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["entries", b"entries"]) -> None: ... - -global___ProtoAttributesSearchResultDTO = ProtoAttributesSearchResultDTO - -@typing_extensions.final -class ProtoAttributeDefinitionDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - NAME_FIELD_NUMBER: builtins.int - TYPE_FIELD_NUMBER: builtins.int - name: builtins.str - type: builtins.str - def __init__( - self, - *, - name: builtins.str = ..., - type: builtins.str = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "type", b"type"]) -> None: ... - -global___ProtoAttributeDefinitionDTO = ProtoAttributeDefinitionDTO - -@typing_extensions.final -class ProtoQueryAttributesResultDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - ENTRIES_FIELD_NUMBER: builtins.int - NEXTPAGE_FIELD_NUMBER: builtins.int - @property - def entries(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProtoQueryAttributesExperimentResultDTO]: ... - @property - def nextPage(self) -> global___ProtoNextPageDTO: ... - def __init__( - self, - *, - entries: collections.abc.Iterable[global___ProtoQueryAttributesExperimentResultDTO] | None = ..., - nextPage: global___ProtoNextPageDTO | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["nextPage", b"nextPage"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["entries", b"entries", "nextPage", b"nextPage"]) -> None: ... - -global___ProtoQueryAttributesResultDTO = ProtoQueryAttributesResultDTO - -@typing_extensions.final -class ProtoNextPageDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - NEXTPAGETOKEN_FIELD_NUMBER: builtins.int - LIMIT_FIELD_NUMBER: builtins.int - nextPageToken: builtins.str - limit: builtins.int - def __init__( - self, - *, - nextPageToken: builtins.str | None = ..., - limit: builtins.int | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["_limit", b"_limit", "_nextPageToken", b"_nextPageToken", "limit", b"limit", "nextPageToken", b"nextPageToken"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["_limit", b"_limit", "_nextPageToken", b"_nextPageToken", "limit", b"limit", "nextPageToken", b"nextPageToken"]) -> None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_limit", b"_limit"]) -> typing_extensions.Literal["limit"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_nextPageToken", b"_nextPageToken"]) -> typing_extensions.Literal["nextPageToken"] | None: ... - -global___ProtoNextPageDTO = ProtoNextPageDTO - -@typing_extensions.final -class ProtoQueryAttributesExperimentResultDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - EXPERIMENTID_FIELD_NUMBER: builtins.int - EXPERIMENTSHORTID_FIELD_NUMBER: builtins.int - ATTRIBUTES_FIELD_NUMBER: builtins.int - experimentId: builtins.str - experimentShortId: builtins.str - @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[neptune.api.proto.neptune_pb.api.model.leaderboard_entries_pb2.ProtoAttributeDTO]: ... - def __init__( - self, - *, - experimentId: builtins.str = ..., - experimentShortId: builtins.str = ..., - attributes: collections.abc.Iterable[neptune.api.proto.neptune_pb.api.model.leaderboard_entries_pb2.ProtoAttributeDTO] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "experimentId", b"experimentId", "experimentShortId", b"experimentShortId"]) -> None: ... - -global___ProtoQueryAttributesExperimentResultDTO = ProtoQueryAttributesExperimentResultDTO diff --git a/src/neptune/api/proto/neptune_pb/api/model/leaderboard_entries_pb2.py b/src/neptune/api/proto/neptune_pb/api/model/leaderboard_entries_pb2.py deleted file mode 100644 index 9b1608bcc..000000000 --- a/src/neptune/api/proto/neptune_pb/api/model/leaderboard_entries_pb2.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: neptune_pb/api/model/leaderboard_entries.proto -# Protobuf Python Version: 4.25.1 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.neptune_pb/api/model/leaderboard_entries.proto\x12\x11neptune.api.model\"\xb3\x01\n&ProtoLeaderboardEntriesSearchResultDTO\x12\x1b\n\x13matching_item_count\x18\x01 \x01(\x03\x12\x1e\n\x11total_group_count\x18\x02 \x01(\x03H\x00\x88\x01\x01\x12\x36\n\x07\x65ntries\x18\x03 \x03(\x0b\x32%.neptune.api.model.ProtoAttributesDTOB\x14\n\x12_total_group_count\"\xd1\x01\n\x12ProtoAttributesDTO\x12\x15\n\rexperiment_id\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x12\n\nproject_id\x18\x03 \x01(\t\x12\x17\n\x0forganization_id\x18\x04 \x01(\t\x12\x14\n\x0cproject_name\x18\x05 \x01(\t\x12\x19\n\x11organization_name\x18\x06 \x01(\t\x12\x38\n\nattributes\x18\x07 \x03(\x0b\x32$.neptune.api.model.ProtoAttributeDTO\"\xed\x05\n\x11ProtoAttributeDTO\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x44\n\x0eint_properties\x18\x03 \x01(\x0b\x32\'.neptune.api.model.ProtoIntAttributeDTOH\x00\x88\x01\x01\x12H\n\x10\x66loat_properties\x18\x04 \x01(\x0b\x32).neptune.api.model.ProtoFloatAttributeDTOH\x01\x88\x01\x01\x12J\n\x11string_properties\x18\x05 \x01(\x0b\x32*.neptune.api.model.ProtoStringAttributeDTOH\x02\x88\x01\x01\x12\x46\n\x0f\x62ool_properties\x18\x06 \x01(\x0b\x32(.neptune.api.model.ProtoBoolAttributeDTOH\x03\x88\x01\x01\x12N\n\x13\x64\x61tetime_properties\x18\x07 \x01(\x0b\x32,.neptune.api.model.ProtoDatetimeAttributeDTOH\x04\x88\x01\x01\x12Q\n\x15string_set_properties\x18\x08 \x01(\x0b\x32-.neptune.api.model.ProtoStringSetAttributeDTOH\x05\x88\x01\x01\x12U\n\x17\x66loat_series_properties\x18\t \x01(\x0b\x32/.neptune.api.model.ProtoFloatSeriesAttributeDTOH\x06\x88\x01\x01\x42\x11\n\x0f_int_propertiesB\x13\n\x11_float_propertiesB\x14\n\x12_string_propertiesB\x12\n\x10_bool_propertiesB\x16\n\x14_datetime_propertiesB\x18\n\x16_string_set_propertiesB\x1a\n\x18_float_series_properties\"U\n\x14ProtoIntAttributeDTO\x12\x16\n\x0e\x61ttribute_name\x18\x01 \x01(\t\x12\x16\n\x0e\x61ttribute_type\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\x03\"W\n\x16ProtoFloatAttributeDTO\x12\x16\n\x0e\x61ttribute_name\x18\x01 \x01(\t\x12\x16\n\x0e\x61ttribute_type\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\x01\"X\n\x17ProtoStringAttributeDTO\x12\x16\n\x0e\x61ttribute_name\x18\x01 \x01(\t\x12\x16\n\x0e\x61ttribute_type\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\"V\n\x15ProtoBoolAttributeDTO\x12\x16\n\x0e\x61ttribute_name\x18\x01 \x01(\t\x12\x16\n\x0e\x61ttribute_type\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\x08\"Z\n\x19ProtoDatetimeAttributeDTO\x12\x16\n\x0e\x61ttribute_name\x18\x01 \x01(\t\x12\x16\n\x0e\x61ttribute_type\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\x03\"[\n\x1aProtoStringSetAttributeDTO\x12\x16\n\x0e\x61ttribute_name\x18\x01 \x01(\t\x12\x16\n\x0e\x61ttribute_type\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x03(\t\"\xd1\x02\n\x1cProtoFloatSeriesAttributeDTO\x12\x16\n\x0e\x61ttribute_name\x18\x01 \x01(\t\x12\x16\n\x0e\x61ttribute_type\x18\x02 \x01(\t\x12\x16\n\tlast_step\x18\x03 \x01(\x01H\x00\x88\x01\x01\x12\x11\n\x04last\x18\x04 \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03min\x18\x05 \x01(\x01H\x02\x88\x01\x01\x12\x10\n\x03max\x18\x06 \x01(\x01H\x03\x88\x01\x01\x12\x14\n\x07\x61verage\x18\x07 \x01(\x01H\x04\x88\x01\x01\x12\x15\n\x08variance\x18\x08 \x01(\x01H\x05\x88\x01\x01\x12\x45\n\x06\x63onfig\x18\t \x01(\x0b\x32\x35.neptune.api.model.ProtoFloatSeriesAttributeConfigDTOB\x0c\n\n_last_stepB\x07\n\x05_lastB\x06\n\x04_minB\x06\n\x04_maxB\n\n\x08_averageB\x0b\n\t_variance\"t\n\"ProtoFloatSeriesAttributeConfigDTO\x12\x10\n\x03min\x18\x01 \x01(\x01H\x00\x88\x01\x01\x12\x10\n\x03max\x18\x02 \x01(\x01H\x01\x88\x01\x01\x12\x11\n\x04unit\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x06\n\x04_minB\x06\n\x04_maxB\x07\n\x05_unitB4\n0ml.neptune.leaderboard.api.model.proto.generatedP\x01\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'neptune.api.proto.neptune_pb.api.model.leaderboard_entries_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - _globals['DESCRIPTOR']._options = None - _globals['DESCRIPTOR']._serialized_options = b'\n0ml.neptune.leaderboard.api.model.proto.generatedP\001' - _globals['_PROTOLEADERBOARDENTRIESSEARCHRESULTDTO']._serialized_start=70 - _globals['_PROTOLEADERBOARDENTRIESSEARCHRESULTDTO']._serialized_end=249 - _globals['_PROTOATTRIBUTESDTO']._serialized_start=252 - _globals['_PROTOATTRIBUTESDTO']._serialized_end=461 - _globals['_PROTOATTRIBUTEDTO']._serialized_start=464 - _globals['_PROTOATTRIBUTEDTO']._serialized_end=1213 - _globals['_PROTOINTATTRIBUTEDTO']._serialized_start=1215 - _globals['_PROTOINTATTRIBUTEDTO']._serialized_end=1300 - _globals['_PROTOFLOATATTRIBUTEDTO']._serialized_start=1302 - _globals['_PROTOFLOATATTRIBUTEDTO']._serialized_end=1389 - _globals['_PROTOSTRINGATTRIBUTEDTO']._serialized_start=1391 - _globals['_PROTOSTRINGATTRIBUTEDTO']._serialized_end=1479 - _globals['_PROTOBOOLATTRIBUTEDTO']._serialized_start=1481 - _globals['_PROTOBOOLATTRIBUTEDTO']._serialized_end=1567 - _globals['_PROTODATETIMEATTRIBUTEDTO']._serialized_start=1569 - _globals['_PROTODATETIMEATTRIBUTEDTO']._serialized_end=1659 - _globals['_PROTOSTRINGSETATTRIBUTEDTO']._serialized_start=1661 - _globals['_PROTOSTRINGSETATTRIBUTEDTO']._serialized_end=1752 - _globals['_PROTOFLOATSERIESATTRIBUTEDTO']._serialized_start=1755 - _globals['_PROTOFLOATSERIESATTRIBUTEDTO']._serialized_end=2092 - _globals['_PROTOFLOATSERIESATTRIBUTECONFIGDTO']._serialized_start=2094 - _globals['_PROTOFLOATSERIESATTRIBUTECONFIGDTO']._serialized_end=2210 -# @@protoc_insertion_point(module_scope) diff --git a/src/neptune/api/proto/neptune_pb/api/model/leaderboard_entries_pb2.pyi b/src/neptune/api/proto/neptune_pb/api/model/leaderboard_entries_pb2.pyi deleted file mode 100644 index 7cd4e6ff9..000000000 --- a/src/neptune/api/proto/neptune_pb/api/model/leaderboard_entries_pb2.pyi +++ /dev/null @@ -1,345 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" -import builtins -import collections.abc -import google.protobuf.descriptor -import google.protobuf.internal.containers -import google.protobuf.message -import sys -import typing - -if sys.version_info >= (3, 8): - import typing as typing_extensions -else: - import typing_extensions - -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor - -@typing_extensions.final -class ProtoLeaderboardEntriesSearchResultDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - MATCHING_ITEM_COUNT_FIELD_NUMBER: builtins.int - TOTAL_GROUP_COUNT_FIELD_NUMBER: builtins.int - ENTRIES_FIELD_NUMBER: builtins.int - matching_item_count: builtins.int - total_group_count: builtins.int - @property - def entries(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProtoAttributesDTO]: ... - def __init__( - self, - *, - matching_item_count: builtins.int = ..., - total_group_count: builtins.int | None = ..., - entries: collections.abc.Iterable[global___ProtoAttributesDTO] | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["_total_group_count", b"_total_group_count", "total_group_count", b"total_group_count"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["_total_group_count", b"_total_group_count", "entries", b"entries", "matching_item_count", b"matching_item_count", "total_group_count", b"total_group_count"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["_total_group_count", b"_total_group_count"]) -> typing_extensions.Literal["total_group_count"] | None: ... - -global___ProtoLeaderboardEntriesSearchResultDTO = ProtoLeaderboardEntriesSearchResultDTO - -@typing_extensions.final -class ProtoAttributesDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - EXPERIMENT_ID_FIELD_NUMBER: builtins.int - TYPE_FIELD_NUMBER: builtins.int - PROJECT_ID_FIELD_NUMBER: builtins.int - ORGANIZATION_ID_FIELD_NUMBER: builtins.int - PROJECT_NAME_FIELD_NUMBER: builtins.int - ORGANIZATION_NAME_FIELD_NUMBER: builtins.int - ATTRIBUTES_FIELD_NUMBER: builtins.int - experiment_id: builtins.str - type: builtins.str - project_id: builtins.str - organization_id: builtins.str - project_name: builtins.str - organization_name: builtins.str - @property - def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProtoAttributeDTO]: ... - def __init__( - self, - *, - experiment_id: builtins.str = ..., - type: builtins.str = ..., - project_id: builtins.str = ..., - organization_id: builtins.str = ..., - project_name: builtins.str = ..., - organization_name: builtins.str = ..., - attributes: collections.abc.Iterable[global___ProtoAttributeDTO] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "experiment_id", b"experiment_id", "organization_id", b"organization_id", "organization_name", b"organization_name", "project_id", b"project_id", "project_name", b"project_name", "type", b"type"]) -> None: ... - -global___ProtoAttributesDTO = ProtoAttributesDTO - -@typing_extensions.final -class ProtoAttributeDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - NAME_FIELD_NUMBER: builtins.int - TYPE_FIELD_NUMBER: builtins.int - INT_PROPERTIES_FIELD_NUMBER: builtins.int - FLOAT_PROPERTIES_FIELD_NUMBER: builtins.int - STRING_PROPERTIES_FIELD_NUMBER: builtins.int - BOOL_PROPERTIES_FIELD_NUMBER: builtins.int - DATETIME_PROPERTIES_FIELD_NUMBER: builtins.int - STRING_SET_PROPERTIES_FIELD_NUMBER: builtins.int - FLOAT_SERIES_PROPERTIES_FIELD_NUMBER: builtins.int - name: builtins.str - type: builtins.str - @property - def int_properties(self) -> global___ProtoIntAttributeDTO: ... - @property - def float_properties(self) -> global___ProtoFloatAttributeDTO: ... - @property - def string_properties(self) -> global___ProtoStringAttributeDTO: ... - @property - def bool_properties(self) -> global___ProtoBoolAttributeDTO: ... - @property - def datetime_properties(self) -> global___ProtoDatetimeAttributeDTO: ... - @property - def string_set_properties(self) -> global___ProtoStringSetAttributeDTO: ... - @property - def float_series_properties(self) -> global___ProtoFloatSeriesAttributeDTO: ... - def __init__( - self, - *, - name: builtins.str = ..., - type: builtins.str = ..., - int_properties: global___ProtoIntAttributeDTO | None = ..., - float_properties: global___ProtoFloatAttributeDTO | None = ..., - string_properties: global___ProtoStringAttributeDTO | None = ..., - bool_properties: global___ProtoBoolAttributeDTO | None = ..., - datetime_properties: global___ProtoDatetimeAttributeDTO | None = ..., - string_set_properties: global___ProtoStringSetAttributeDTO | None = ..., - float_series_properties: global___ProtoFloatSeriesAttributeDTO | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["_bool_properties", b"_bool_properties", "_datetime_properties", b"_datetime_properties", "_float_properties", b"_float_properties", "_float_series_properties", b"_float_series_properties", "_int_properties", b"_int_properties", "_string_properties", b"_string_properties", "_string_set_properties", b"_string_set_properties", "bool_properties", b"bool_properties", "datetime_properties", b"datetime_properties", "float_properties", b"float_properties", "float_series_properties", b"float_series_properties", "int_properties", b"int_properties", "string_properties", b"string_properties", "string_set_properties", b"string_set_properties"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["_bool_properties", b"_bool_properties", "_datetime_properties", b"_datetime_properties", "_float_properties", b"_float_properties", "_float_series_properties", b"_float_series_properties", "_int_properties", b"_int_properties", "_string_properties", b"_string_properties", "_string_set_properties", b"_string_set_properties", "bool_properties", b"bool_properties", "datetime_properties", b"datetime_properties", "float_properties", b"float_properties", "float_series_properties", b"float_series_properties", "int_properties", b"int_properties", "name", b"name", "string_properties", b"string_properties", "string_set_properties", b"string_set_properties", "type", b"type"]) -> None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_bool_properties", b"_bool_properties"]) -> typing_extensions.Literal["bool_properties"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_datetime_properties", b"_datetime_properties"]) -> typing_extensions.Literal["datetime_properties"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_float_properties", b"_float_properties"]) -> typing_extensions.Literal["float_properties"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_float_series_properties", b"_float_series_properties"]) -> typing_extensions.Literal["float_series_properties"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_int_properties", b"_int_properties"]) -> typing_extensions.Literal["int_properties"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_string_properties", b"_string_properties"]) -> typing_extensions.Literal["string_properties"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_string_set_properties", b"_string_set_properties"]) -> typing_extensions.Literal["string_set_properties"] | None: ... - -global___ProtoAttributeDTO = ProtoAttributeDTO - -@typing_extensions.final -class ProtoIntAttributeDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - ATTRIBUTE_NAME_FIELD_NUMBER: builtins.int - ATTRIBUTE_TYPE_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - attribute_name: builtins.str - attribute_type: builtins.str - value: builtins.int - def __init__( - self, - *, - attribute_name: builtins.str = ..., - attribute_type: builtins.str = ..., - value: builtins.int = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attribute_name", b"attribute_name", "attribute_type", b"attribute_type", "value", b"value"]) -> None: ... - -global___ProtoIntAttributeDTO = ProtoIntAttributeDTO - -@typing_extensions.final -class ProtoFloatAttributeDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - ATTRIBUTE_NAME_FIELD_NUMBER: builtins.int - ATTRIBUTE_TYPE_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - attribute_name: builtins.str - attribute_type: builtins.str - value: builtins.float - def __init__( - self, - *, - attribute_name: builtins.str = ..., - attribute_type: builtins.str = ..., - value: builtins.float = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attribute_name", b"attribute_name", "attribute_type", b"attribute_type", "value", b"value"]) -> None: ... - -global___ProtoFloatAttributeDTO = ProtoFloatAttributeDTO - -@typing_extensions.final -class ProtoStringAttributeDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - ATTRIBUTE_NAME_FIELD_NUMBER: builtins.int - ATTRIBUTE_TYPE_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - attribute_name: builtins.str - attribute_type: builtins.str - value: builtins.str - def __init__( - self, - *, - attribute_name: builtins.str = ..., - attribute_type: builtins.str = ..., - value: builtins.str = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attribute_name", b"attribute_name", "attribute_type", b"attribute_type", "value", b"value"]) -> None: ... - -global___ProtoStringAttributeDTO = ProtoStringAttributeDTO - -@typing_extensions.final -class ProtoBoolAttributeDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - ATTRIBUTE_NAME_FIELD_NUMBER: builtins.int - ATTRIBUTE_TYPE_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - attribute_name: builtins.str - attribute_type: builtins.str - value: builtins.bool - def __init__( - self, - *, - attribute_name: builtins.str = ..., - attribute_type: builtins.str = ..., - value: builtins.bool = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attribute_name", b"attribute_name", "attribute_type", b"attribute_type", "value", b"value"]) -> None: ... - -global___ProtoBoolAttributeDTO = ProtoBoolAttributeDTO - -@typing_extensions.final -class ProtoDatetimeAttributeDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - ATTRIBUTE_NAME_FIELD_NUMBER: builtins.int - ATTRIBUTE_TYPE_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - attribute_name: builtins.str - attribute_type: builtins.str - value: builtins.int - def __init__( - self, - *, - attribute_name: builtins.str = ..., - attribute_type: builtins.str = ..., - value: builtins.int = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attribute_name", b"attribute_name", "attribute_type", b"attribute_type", "value", b"value"]) -> None: ... - -global___ProtoDatetimeAttributeDTO = ProtoDatetimeAttributeDTO - -@typing_extensions.final -class ProtoStringSetAttributeDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - ATTRIBUTE_NAME_FIELD_NUMBER: builtins.int - ATTRIBUTE_TYPE_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - attribute_name: builtins.str - attribute_type: builtins.str - @property - def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... - def __init__( - self, - *, - attribute_name: builtins.str = ..., - attribute_type: builtins.str = ..., - value: collections.abc.Iterable[builtins.str] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["attribute_name", b"attribute_name", "attribute_type", b"attribute_type", "value", b"value"]) -> None: ... - -global___ProtoStringSetAttributeDTO = ProtoStringSetAttributeDTO - -@typing_extensions.final -class ProtoFloatSeriesAttributeDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - ATTRIBUTE_NAME_FIELD_NUMBER: builtins.int - ATTRIBUTE_TYPE_FIELD_NUMBER: builtins.int - LAST_STEP_FIELD_NUMBER: builtins.int - LAST_FIELD_NUMBER: builtins.int - MIN_FIELD_NUMBER: builtins.int - MAX_FIELD_NUMBER: builtins.int - AVERAGE_FIELD_NUMBER: builtins.int - VARIANCE_FIELD_NUMBER: builtins.int - CONFIG_FIELD_NUMBER: builtins.int - attribute_name: builtins.str - attribute_type: builtins.str - last_step: builtins.float - last: builtins.float - min: builtins.float - max: builtins.float - average: builtins.float - variance: builtins.float - @property - def config(self) -> global___ProtoFloatSeriesAttributeConfigDTO: ... - def __init__( - self, - *, - attribute_name: builtins.str = ..., - attribute_type: builtins.str = ..., - last_step: builtins.float | None = ..., - last: builtins.float | None = ..., - min: builtins.float | None = ..., - max: builtins.float | None = ..., - average: builtins.float | None = ..., - variance: builtins.float | None = ..., - config: global___ProtoFloatSeriesAttributeConfigDTO | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["_average", b"_average", "_last", b"_last", "_last_step", b"_last_step", "_max", b"_max", "_min", b"_min", "_variance", b"_variance", "average", b"average", "config", b"config", "last", b"last", "last_step", b"last_step", "max", b"max", "min", b"min", "variance", b"variance"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["_average", b"_average", "_last", b"_last", "_last_step", b"_last_step", "_max", b"_max", "_min", b"_min", "_variance", b"_variance", "attribute_name", b"attribute_name", "attribute_type", b"attribute_type", "average", b"average", "config", b"config", "last", b"last", "last_step", b"last_step", "max", b"max", "min", b"min", "variance", b"variance"]) -> None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_average", b"_average"]) -> typing_extensions.Literal["average"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_last", b"_last"]) -> typing_extensions.Literal["last"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_last_step", b"_last_step"]) -> typing_extensions.Literal["last_step"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_max", b"_max"]) -> typing_extensions.Literal["max"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_min", b"_min"]) -> typing_extensions.Literal["min"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_variance", b"_variance"]) -> typing_extensions.Literal["variance"] | None: ... - -global___ProtoFloatSeriesAttributeDTO = ProtoFloatSeriesAttributeDTO - -@typing_extensions.final -class ProtoFloatSeriesAttributeConfigDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - MIN_FIELD_NUMBER: builtins.int - MAX_FIELD_NUMBER: builtins.int - UNIT_FIELD_NUMBER: builtins.int - min: builtins.float - max: builtins.float - unit: builtins.str - def __init__( - self, - *, - min: builtins.float | None = ..., - max: builtins.float | None = ..., - unit: builtins.str | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["_max", b"_max", "_min", b"_min", "_unit", b"_unit", "max", b"max", "min", b"min", "unit", b"unit"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["_max", b"_max", "_min", b"_min", "_unit", b"_unit", "max", b"max", "min", b"min", "unit", b"unit"]) -> None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_max", b"_max"]) -> typing_extensions.Literal["max"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_min", b"_min"]) -> typing_extensions.Literal["min"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_unit", b"_unit"]) -> typing_extensions.Literal["unit"] | None: ... - -global___ProtoFloatSeriesAttributeConfigDTO = ProtoFloatSeriesAttributeConfigDTO diff --git a/src/neptune/api/proto/neptune_pb/api/model/series_values_pb2.py b/src/neptune/api/proto/neptune_pb/api/model/series_values_pb2.py deleted file mode 100644 index 78867b352..000000000 --- a/src/neptune/api/proto/neptune_pb/api/model/series_values_pb2.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: neptune_pb/api/model/series_values.proto -# Protobuf Python Version: 4.25.1 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(neptune_pb/api/model/series_values.proto\x12\x11neptune.api.model\"q\n\x19ProtoFloatSeriesValuesDTO\x12\x18\n\x10total_item_count\x18\x01 \x01(\x03\x12:\n\x06values\x18\x02 \x03(\x0b\x32*.neptune.api.model.ProtoFloatPointValueDTO\"P\n\x17ProtoFloatPointValueDTO\x12\x18\n\x10timestamp_millis\x18\x01 \x01(\x03\x12\x0c\n\x04step\x18\x02 \x01(\x01\x12\r\n\x05value\x18\x03 \x01(\x01\x42\x34\n0ml.neptune.leaderboard.api.model.proto.generatedP\x01\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'neptune.api.proto.neptune_pb.api.model.series_values_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - _globals['DESCRIPTOR']._options = None - _globals['DESCRIPTOR']._serialized_options = b'\n0ml.neptune.leaderboard.api.model.proto.generatedP\001' - _globals['_PROTOFLOATSERIESVALUESDTO']._serialized_start=63 - _globals['_PROTOFLOATSERIESVALUESDTO']._serialized_end=176 - _globals['_PROTOFLOATPOINTVALUEDTO']._serialized_start=178 - _globals['_PROTOFLOATPOINTVALUEDTO']._serialized_end=258 -# @@protoc_insertion_point(module_scope) diff --git a/src/neptune/api/proto/neptune_pb/api/model/series_values_pb2.pyi b/src/neptune/api/proto/neptune_pb/api/model/series_values_pb2.pyi deleted file mode 100644 index 9cb329797..000000000 --- a/src/neptune/api/proto/neptune_pb/api/model/series_values_pb2.pyi +++ /dev/null @@ -1,57 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" -import builtins -import collections.abc -import google.protobuf.descriptor -import google.protobuf.internal.containers -import google.protobuf.message -import sys - -if sys.version_info >= (3, 8): - import typing as typing_extensions -else: - import typing_extensions - -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor - -@typing_extensions.final -class ProtoFloatSeriesValuesDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - TOTAL_ITEM_COUNT_FIELD_NUMBER: builtins.int - VALUES_FIELD_NUMBER: builtins.int - total_item_count: builtins.int - @property - def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProtoFloatPointValueDTO]: ... - def __init__( - self, - *, - total_item_count: builtins.int = ..., - values: collections.abc.Iterable[global___ProtoFloatPointValueDTO] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["total_item_count", b"total_item_count", "values", b"values"]) -> None: ... - -global___ProtoFloatSeriesValuesDTO = ProtoFloatSeriesValuesDTO - -@typing_extensions.final -class ProtoFloatPointValueDTO(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - TIMESTAMP_MILLIS_FIELD_NUMBER: builtins.int - STEP_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - timestamp_millis: builtins.int - step: builtins.float - value: builtins.float - def __init__( - self, - *, - timestamp_millis: builtins.int = ..., - step: builtins.float = ..., - value: builtins.float = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["step", b"step", "timestamp_millis", b"timestamp_millis", "value", b"value"]) -> None: ... - -global___ProtoFloatPointValueDTO = ProtoFloatPointValueDTO diff --git a/src/neptune/api/requests_utils.py b/src/neptune/api/requests_utils.py deleted file mode 100644 index 3fe79a300..000000000 --- a/src/neptune/api/requests_utils.py +++ /dev/null @@ -1,34 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ensure_json_response"] - -from typing import ( - TYPE_CHECKING, - Any, - Dict, -) - -from simplejson.errors import JSONDecodeError - -if TYPE_CHECKING: - from bravado_core.response import IncomingResponse - - -def ensure_json_response(response: "IncomingResponse") -> Dict[str, Any]: - try: - return response.json() or dict() - except JSONDecodeError: - return dict() diff --git a/src/neptune/api/searching_entries.py b/src/neptune/api/searching_entries.py deleted file mode 100644 index ac4fc1651..000000000 --- a/src/neptune/api/searching_entries.py +++ /dev/null @@ -1,248 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["get_single_page", "iter_over_pages", "find_attribute"] - -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Generator, - Iterable, - Optional, -) - -from bravado.exception import HTTPBadRequest # type: ignore -from typing_extensions import ( - Literal, - TypeAlias, -) - -from neptune.api.field_visitor import FieldToValueVisitor -from neptune.api.models import ( - Field, - FieldType, - LeaderboardEntriesSearchResult, - LeaderboardEntry, -) -from neptune.api.proto.neptune_pb.api.model.leaderboard_entries_pb2 import ProtoLeaderboardEntriesSearchResultDTO -from neptune.exceptions import NeptuneInvalidQueryException -from neptune.internal.backends.hosted_client import ( - DEFAULT_PROTO_REQUEST_KWARGS, - DEFAULT_REQUEST_KWARGS, -) -from neptune.internal.backends.nql import ( - NQLAggregator, - NQLAttributeOperator, - NQLAttributeType, - NQLEmptyQuery, - NQLQuery, - NQLQueryAggregate, - NQLQueryAttribute, -) -from neptune.internal.backends.utils import construct_progress_bar -from neptune.internal.init.parameters import MAX_SERVER_OFFSET -from neptune.typing import ProgressBarType - -if TYPE_CHECKING: - from neptune.internal.backends.swagger_client_wrapper import SwaggerClientWrapper - from neptune.internal.id_formats import UniqueId - - -SUPPORTED_ATTRIBUTE_TYPES = {item.value for item in FieldType} - -SORT_BY_COLUMN_TYPE: TypeAlias = Literal["string", "datetime", "integer", "boolean", "float"] - - -class NoLimit(int): - def __gt__(self, other: Any) -> bool: - return True - - def __lt__(self, other: Any) -> bool: - return False - - def __ge__(self, other: Any) -> bool: - return True - - def __le__(self, other: Any) -> bool: - return False - - def __eq__(self, other: Any) -> bool: - return False - - def __ne__(self, other: Any) -> bool: - return True - - -def get_single_page( - *, - client: "SwaggerClientWrapper", - project_id: "UniqueId", - attributes_filter: Dict[str, Any], - limit: int, - offset: int, - sort_by: str, - sort_by_column_type: SORT_BY_COLUMN_TYPE, - ascending: bool, - types: Optional[Iterable[str]], - query: Optional["NQLQuery"], - searching_after: Optional[str], - use_proto: Optional[bool] = None, -) -> LeaderboardEntriesSearchResult: - normalized_query = query or NQLEmptyQuery() - sort_by_column_type = sort_by_column_type if sort_by_column_type else FieldType.STRING.value - if sort_by and searching_after: - sort_by_as_nql = NQLQueryAttribute( - name=sort_by, - type=NQLAttributeType(sort_by_column_type), - operator=NQLAttributeOperator.GREATER_THAN if ascending else NQLAttributeOperator.LESS_THAN, - value=searching_after, - ) - - if not isinstance(normalized_query, NQLEmptyQuery): - normalized_query = NQLQueryAggregate(items=[normalized_query, sort_by_as_nql], aggregator=NQLAggregator.AND) - else: - normalized_query = sort_by_as_nql - - sorting = ( - { - "sorting": { - "dir": "ascending" if ascending else "descending", - "aggregationMode": "none", - "sortBy": { - "name": sort_by, - "type": sort_by_column_type if sort_by_column_type else FieldType.STRING.value, - }, - } - } - if sort_by - else {} - ) - - params = { - "projectIdentifier": project_id, - "type": types, - "params": { - **sorting, - **attributes_filter, - "query": {"query": str(normalized_query)}, - "pagination": {"limit": limit, "offset": offset}, - }, - } - - try: - if use_proto: - result = ( - client.api.searchLeaderboardEntriesProto(**params, **DEFAULT_PROTO_REQUEST_KWARGS).response().result - ) - proto_data = ProtoLeaderboardEntriesSearchResultDTO.FromString(result) - return LeaderboardEntriesSearchResult.from_proto(proto_data) - else: - model_data = client.api.searchLeaderboardEntries(**params, **DEFAULT_REQUEST_KWARGS).response().result - return LeaderboardEntriesSearchResult.from_model(model_data) - except HTTPBadRequest as e: - title = e.response.json().get("title") - if title == "Syntax error": - raise NeptuneInvalidQueryException(nql_query=str(normalized_query)) - raise e - - -def find_attribute(*, entry: LeaderboardEntry, path: str) -> Optional[Field]: - return next((attr for attr in entry.fields if attr.path == path), None) - - -def iter_over_pages( - *, - step_size: int, - limit: Optional[int], - sort_by: str, - sort_by_column_type: SORT_BY_COLUMN_TYPE, - ascending: bool, - progress_bar: Optional[ProgressBarType], - max_offset: int = MAX_SERVER_OFFSET, - **kwargs: Any, -) -> Generator[Any, None, None]: - searching_after = None - last_page = None - - data = get_single_page( - limit=0, - offset=0, - sort_by=sort_by, - ascending=ascending, - sort_by_column_type=sort_by_column_type, - searching_after=None, - **kwargs, - ) - total = data.matching_item_count - - limit = limit if limit is not None else NoLimit() - - total = total if total < limit else limit - - progress_bar = False if total <= step_size else progress_bar # disable progress bar if only one page is fetched - - extracted_records = 0 - - field_to_value_visitor = FieldToValueVisitor() - - with construct_progress_bar(progress_bar, "Fetching table...") as bar: - # beginning of the first page - bar.update( - by=0, - total=total, - ) - - while True: - if last_page: - searching_after_field = find_attribute(entry=last_page[-1], path=sort_by) - if not searching_after_field: - raise ValueError(f"Cannot find attribute {sort_by} in last page") - searching_after = field_to_value_visitor.visit(searching_after_field) - - for offset in range(0, max_offset, step_size): - local_limit = min(step_size, max_offset - offset) - if extracted_records + local_limit > limit: - local_limit = limit - extracted_records - - result = get_single_page( - limit=local_limit, - offset=offset, - sort_by=sort_by, - sort_by_column_type=sort_by_column_type, - searching_after=searching_after, - ascending=ascending, - **kwargs, - ) - - # fetch the item count everytime a new page is started (except for the very fist page) - if offset == 0 and last_page is not None: - total += result.matching_item_count - - total = min(total, limit) - - page = result.entries - extracted_records += len(page) - bar.update(by=len(page), total=total) - - if not page: - return - - yield from page - - if extracted_records == limit: - return - - last_page = page diff --git a/src/neptune/attributes/__init__.py b/src/neptune/attributes/__init__.py deleted file mode 100644 index 0b4157bf7..000000000 --- a/src/neptune/attributes/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = [ - "Artifact", - "Boolean", - "Datetime", - "File", - "Float", - "GitRef", - "Integer", - "NotebookRef", - "RunState", - "String", - "FileSet", - "FileSeries", - "FloatSeries", - "StringSeries", - "StringSet", - "create_attribute_from_type", -] - - -from .atoms import ( - Artifact, - Boolean, - Datetime, - File, - Float, - GitRef, - Integer, - NotebookRef, - RunState, - String, -) -from .file_set import FileSet -from .series import ( - FileSeries, - FloatSeries, - StringSeries, -) -from .sets import StringSet -from .utils import create_attribute_from_type diff --git a/src/neptune/attributes/atoms/__init__.py b/src/neptune/attributes/atoms/__init__.py deleted file mode 100644 index fe5030d47..000000000 --- a/src/neptune/attributes/atoms/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Artifact", "Boolean", "Datetime", "File", "Float", "GitRef", "Integer", "NotebookRef", "RunState", "String"] - -from .artifact import Artifact -from .boolean import Boolean -from .datetime import Datetime -from .file import File -from .float import Float -from .git_ref import GitRef -from .integer import Integer -from .notebook_ref import NotebookRef -from .run_state import RunState -from .string import String diff --git a/src/neptune/attributes/atoms/artifact.py b/src/neptune/attributes/atoms/artifact.py deleted file mode 100644 index 1d9145ccb..000000000 --- a/src/neptune/attributes/atoms/artifact.py +++ /dev/null @@ -1,84 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Artifact"] - -import pathlib -import typing - -from neptune.attributes.atoms.atom import Atom -from neptune.internal.artifacts.types import ( - ArtifactDriver, - ArtifactDriversMap, - ArtifactFileData, -) -from neptune.internal.backends.api_model import OptionalFeatures -from neptune.internal.operation import ( - AssignArtifact, - TrackFilesToArtifact, -) -from neptune.types.atoms.artifact import Artifact as ArtifactVal -from neptune.typing import ProgressBarType - - -class Artifact(Atom): - def _check_feature(self): - self._container._backend.verify_feature_available(OptionalFeatures.ARTIFACTS) - - def assign(self, value: ArtifactVal, *, wait: bool = False): - self._check_feature() - # this function should be used only with ArtifactVal - if not isinstance(value, ArtifactVal): - raise TypeError("Value of unsupported type {}".format(type(value))) - - with self._container.lock(): - self._enqueue_operation(AssignArtifact(self._path, value.hash), wait=wait) - - def fetch(self) -> ArtifactVal: - self._check_feature() - return ArtifactVal(self.fetch_hash()) - - def fetch_hash(self) -> str: - self._check_feature() - val = self._backend.get_artifact_attribute(self._container_id, self._container_type, self._path) - return val.hash - - def fetch_files_list(self) -> typing.List[ArtifactFileData]: - self._check_feature() - artifact_hash = self.fetch_hash() - return self._backend.list_artifact_files( - self._container._project_id, - artifact_hash, - ) - - def download(self, destination: str = None, progress_bar: typing.Optional[ProgressBarType] = None): - self._check_feature() - for file_definition in self.fetch_files_list(): - driver: typing.Type[ArtifactDriver] = ArtifactDriversMap.match_type(file_definition.type) - file_destination = pathlib.Path(destination or ".") / pathlib.Path(file_definition.file_path) - file_destination.parent.mkdir(parents=True, exist_ok=True) - driver.download_file(file_destination, file_definition) - - def track_files(self, path: str, *, destination: str = None, wait: bool = False): - self._check_feature() - with self._container.lock(): - self._enqueue_operation( - TrackFilesToArtifact( - self._path, - self._container._project_id, - [(path, destination)], - ), - wait=wait, - ) diff --git a/src/neptune/attributes/atoms/atom.py b/src/neptune/attributes/atoms/atom.py deleted file mode 100644 index f5ae4256e..000000000 --- a/src/neptune/attributes/atoms/atom.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Atom"] - -from neptune.attributes.attribute import Attribute - - -class Atom(Attribute): - pass diff --git a/src/neptune/attributes/atoms/boolean.py b/src/neptune/attributes/atoms/boolean.py deleted file mode 100644 index df11c26d8..000000000 --- a/src/neptune/attributes/atoms/boolean.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Boolean"] - -import typing - -from neptune.attributes.atoms.copiable_atom import CopiableAtom -from neptune.internal.container_type import ContainerType -from neptune.internal.operation import AssignBool -from neptune.types.atoms.boolean import Boolean as BooleanVal - -if typing.TYPE_CHECKING: - from neptune.internal.backends.neptune_backend import NeptuneBackend - - -class Boolean(CopiableAtom): - @staticmethod - def create_assignment_operation(path, value: bool): - return AssignBool(path, value) - - @staticmethod - def getter( - backend: "NeptuneBackend", - container_id: str, - container_type: ContainerType, - path: typing.List[str], - ) -> bool: - val = backend.get_bool_attribute(container_id, container_type, path) - return val.value - - def assign(self, value: typing.Union[BooleanVal, bool], *, wait: bool = False): - if not isinstance(value, BooleanVal): - value = BooleanVal(value) - - with self._container.lock(): - self._enqueue_operation(self.create_assignment_operation(self._path, value.value), wait=wait) diff --git a/src/neptune/attributes/atoms/copiable_atom.py b/src/neptune/attributes/atoms/copiable_atom.py deleted file mode 100644 index b1742e3d0..000000000 --- a/src/neptune/attributes/atoms/copiable_atom.py +++ /dev/null @@ -1,63 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["CopiableAtom"] - -import abc -import typing - -from neptune.attributes.atoms.atom import Atom -from neptune.internal.container_type import ContainerType -from neptune.internal.operation import CopyAttribute -from neptune.internal.utils.paths import parse_path -from neptune.types.value_copy import ValueCopy - -if typing.TYPE_CHECKING: - from neptune.internal.backends.neptune_backend import NeptuneBackend - - -class CopiableAtom(Atom): - supports_copy = True - - def copy(self, value: ValueCopy, *, wait: bool = False): - with self._container.lock(): - source_path = value.source_handler._path - source_attr = value.source_handler._get_attribute() - self._enqueue_operation( - CopyAttribute( - self._path, - container_id=source_attr._container_id, - container_type=source_attr._container_type, - source_path=parse_path(source_path), - source_attr_cls=source_attr.__class__, - ), - wait=wait, - ) - - @staticmethod - @abc.abstractmethod - def create_assignment_operation(path, value: int): ... - - @staticmethod - @abc.abstractmethod - def getter( - backend: "NeptuneBackend", - container_id: str, - container_type: ContainerType, - path: typing.List[str], - ) -> int: ... - - def fetch(self): - return self.getter(self._backend, self._container_id, self._container_type, self._path) diff --git a/src/neptune/attributes/atoms/datetime.py b/src/neptune/attributes/atoms/datetime.py deleted file mode 100644 index 87fe0afa6..000000000 --- a/src/neptune/attributes/atoms/datetime.py +++ /dev/null @@ -1,54 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Datetime"] - -import typing -from datetime import datetime - -from neptune.attributes.atoms.copiable_atom import CopiableAtom -from neptune.internal.container_type import ContainerType -from neptune.internal.operation import AssignDatetime -from neptune.internal.types.stringify_value import StringifyValue -from neptune.internal.utils import verify_type -from neptune.types.atoms.datetime import Datetime as DatetimeVal - -if typing.TYPE_CHECKING: - from neptune.internal.backends.neptune_backend import NeptuneBackend - - -class Datetime(CopiableAtom): - @staticmethod - def create_assignment_operation(path, value: datetime): - return AssignDatetime(path, value) - - @staticmethod - def getter( - backend: "NeptuneBackend", - container_id: str, - container_type: ContainerType, - path: typing.List[str], - ) -> datetime: - val = backend.get_datetime_attribute(container_id, container_type, path) - return val.value - - def assign(self, value: typing.Union[DatetimeVal, datetime, StringifyValue], *, wait: bool = False): - verify_type("value", value, (DatetimeVal, datetime, StringifyValue)) - if isinstance(value, (DatetimeVal, StringifyValue)): - value = value.value - else: - value = value.replace(microsecond=1000 * int(value.microsecond / 1000)) - with self._container.lock(): - self._enqueue_operation(self.create_assignment_operation(self._path, value), wait=wait) diff --git a/src/neptune/attributes/atoms/file.py b/src/neptune/attributes/atoms/file.py deleted file mode 100644 index 4c97559ab..000000000 --- a/src/neptune/attributes/atoms/file.py +++ /dev/null @@ -1,58 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["File"] - -from typing import Optional - -from neptune.attributes.atoms.atom import Atom -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.operation import UploadFile -from neptune.internal.utils import verify_type -from neptune.types.atoms.file import File as FileVal -from neptune.typing import ProgressBarType - - -class File(Atom): - def assign(self, value: FileVal, *, wait: bool = False) -> None: - raise NeptuneUnsupportedFunctionalityException - verify_type("value", value, FileVal) - - operation = UploadFile.of_file( - value=value, - attribute_path=self._path, - operation_storage=self._container._op_processor.operation_storage, - ) - - with self._container.lock(): - self._enqueue_operation(operation, wait=wait) - - def upload(self, value, *, wait: bool = False) -> None: - raise NeptuneUnsupportedFunctionalityException - self.assign(FileVal.create_from(value), wait=wait) - - def download( - self, - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, - ) -> None: - raise NeptuneUnsupportedFunctionalityException - verify_type("destination", destination, (str, type(None))) - self._backend.download_file(self._container_id, self._container_type, self._path, destination, progress_bar) - - def fetch_extension(self) -> str: - raise NeptuneUnsupportedFunctionalityException - val = self._backend.get_file_attribute(self._container_id, self._container_type, self._path) - return val.ext diff --git a/src/neptune/attributes/atoms/float.py b/src/neptune/attributes/atoms/float.py deleted file mode 100644 index c401e0e98..000000000 --- a/src/neptune/attributes/atoms/float.py +++ /dev/null @@ -1,65 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Float"] - -import typing - -from neptune.attributes.atoms.copiable_atom import CopiableAtom -from neptune.internal.container_type import ContainerType -from neptune.internal.operation import AssignFloat -from neptune.internal.types.utils import is_unsupported_float -from neptune.internal.warnings import ( - NeptuneUnsupportedValue, - warn_once, -) -from neptune.types.atoms.float import Float as FloatVal - -if typing.TYPE_CHECKING: - from neptune.internal.backends.neptune_backend import NeptuneBackend - - -class Float(CopiableAtom): - @staticmethod - def create_assignment_operation(path, value: float): - return AssignFloat(path, value) - - @staticmethod - def getter( - backend: "NeptuneBackend", - container_id: str, - container_type: ContainerType, - path: typing.List[str], - ) -> float: - val = backend.get_float_attribute(container_id, container_type, path) - return val.value - - def assign(self, value: typing.Union[FloatVal, float, int], *, wait: bool = False): - if not isinstance(value, FloatVal): - value = FloatVal(value) - - if is_unsupported_float(value.value): - warn_once( - message=f"WARNING: The value you're trying to log is a nonstandard float value ({str(value.value)}) " - f"that is not currently supported. " - f"We'll add support for this type of value in the future. " - f"For now, you can use utils.stringify_unsupported() to log one or more values as strings: " - f"run['field'] = stringify_unsupported(float({str(value.value)}))", - exception=NeptuneUnsupportedValue, - ) - return - - with self._container.lock(): - self._enqueue_operation(self.create_assignment_operation(self._path, value.value), wait=wait) diff --git a/src/neptune/attributes/atoms/git_ref.py b/src/neptune/attributes/atoms/git_ref.py deleted file mode 100644 index 4b1fe892f..000000000 --- a/src/neptune/attributes/atoms/git_ref.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["GitRef"] - -from neptune.attributes.atoms.atom import Atom - - -class GitRef(Atom): - pass diff --git a/src/neptune/attributes/atoms/integer.py b/src/neptune/attributes/atoms/integer.py deleted file mode 100644 index 000e2fc1a..000000000 --- a/src/neptune/attributes/atoms/integer.py +++ /dev/null @@ -1,68 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Integer"] - -import typing - -from neptune.attributes.atoms.copiable_atom import CopiableAtom -from neptune.constants import ( - MAX_32_BIT_INT, - MIN_32_BIT_INT, -) -from neptune.internal.container_type import ContainerType -from neptune.internal.operation import AssignInt -from neptune.internal.utils.logger import get_logger -from neptune.types.atoms.integer import Integer as IntegerVal - -if typing.TYPE_CHECKING: - from neptune.internal.backends.neptune_backend import NeptuneBackend - -logger = get_logger() - - -class Integer(CopiableAtom): - @staticmethod - def create_assignment_operation(path, value: int): - return AssignInt(path, value) - - @staticmethod - def getter( - backend: "NeptuneBackend", - container_id: str, - container_type: ContainerType, - path: typing.List[str], - ) -> int: - val = backend.get_int_attribute(container_id, container_type, path) - return val.value - - def assign(self, value: typing.Union[IntegerVal, float, int], *, wait: bool = False): - if not isinstance(value, IntegerVal): - value = IntegerVal(value) - - if value.value < MIN_32_BIT_INT or value.value > MAX_32_BIT_INT: - logger.warning( - "WARNING: The value you're trying to log is outside the range of 32-bit integers " - "(%s to %s) and will be skipped. " - "We'll support 64-bit integers in the future. " - 'For now, try logging the value as a float instead: run["field"] = float(%s)', - MIN_32_BIT_INT, - MAX_32_BIT_INT, - value.value, - ) - return - - with self._container.lock(): - self._enqueue_operation(self.create_assignment_operation(self._path, value.value), wait=wait) diff --git a/src/neptune/attributes/atoms/notebook_ref.py b/src/neptune/attributes/atoms/notebook_ref.py deleted file mode 100644 index fe1044e18..000000000 --- a/src/neptune/attributes/atoms/notebook_ref.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["NotebookRef"] - - -from neptune.attributes.atoms.atom import Atom - - -class NotebookRef(Atom): - pass diff --git a/src/neptune/attributes/atoms/run_state.py b/src/neptune/attributes/atoms/run_state.py deleted file mode 100644 index 20bb761ce..000000000 --- a/src/neptune/attributes/atoms/run_state.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["RunState"] - -from neptune.attributes.atoms.atom import Atom - - -class RunState(Atom): - pass diff --git a/src/neptune/attributes/atoms/string.py b/src/neptune/attributes/atoms/string.py deleted file mode 100644 index 21bb5eeeb..000000000 --- a/src/neptune/attributes/atoms/string.py +++ /dev/null @@ -1,75 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["String"] - -import typing - -from neptune.attributes.atoms.copiable_atom import CopiableAtom -from neptune.internal.container_type import ContainerType -from neptune.internal.operation import AssignString -from neptune.internal.utils.logger import get_logger -from neptune.internal.utils.paths import path_to_str -from neptune.types.atoms.string import String as StringVal - -if typing.TYPE_CHECKING: - from neptune.internal.backends.neptune_backend import NeptuneBackend - from neptune.objects import NeptuneObject - -logger = get_logger() - - -class String(CopiableAtom): - - MAX_VALUE_LENGTH = 16384 - - def __init__(self, container: "NeptuneObject", path: typing.List[str]): - super().__init__(container, path) - self._value_truncation_occurred = False - - @staticmethod - def create_assignment_operation(path, value: str): - return AssignString(path, value) - - @staticmethod - def getter( - backend: "NeptuneBackend", - container_id: str, - container_type: ContainerType, - path: typing.List[str], - ) -> str: - val = backend.get_string_attribute(container_id, container_type, path) - return val.value - - def assign(self, value: typing.Union[StringVal, str], *, wait: bool = False): - if not isinstance(value, StringVal): - value = StringVal(value) - - if len(value.value) > String.MAX_VALUE_LENGTH: - value.value = value.value[: String.MAX_VALUE_LENGTH] - - if not self._value_truncation_occurred: - # the first truncation - self._value_truncation_occurred = True - logger.warning( - "Warning: string '%s' value was" - " longer than %s characters and was truncated." - " This warning is printed only once.", - path_to_str(self._path), - String.MAX_VALUE_LENGTH, - ) - - with self._container.lock(): - self._enqueue_operation(self.create_assignment_operation(self._path, value.value), wait=wait) diff --git a/src/neptune/attributes/attribute.py b/src/neptune/attributes/attribute.py deleted file mode 100644 index c39d65b2a..000000000 --- a/src/neptune/attributes/attribute.py +++ /dev/null @@ -1,66 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Attribute"] - -from typing import ( - TYPE_CHECKING, - List, -) - -from neptune.exceptions import TypeDoesNotSupportAttributeException -from neptune.internal.backends.neptune_backend import NeptuneBackend -from neptune.internal.operation import Operation -from neptune.types.value_copy import ValueCopy - -if TYPE_CHECKING: - from neptune.internal.container_type import ContainerType - from neptune.objects import NeptuneObject - - -class Attribute: - supports_copy = False - - def __init__(self, container: "NeptuneObject", path: List[str]): - super().__init__() - self._container = container - self._path = path - - def __getattr__(self, attr): - raise TypeDoesNotSupportAttributeException(type_=type(self), attribute=attr) - - def _enqueue_operation(self, operation: Operation, *, wait: bool): - self._container._op_processor.enqueue_operation(operation, wait=wait) - - @property - def _backend(self) -> NeptuneBackend: - return self._container._backend - - @property - def _container_id(self) -> str: - return self._container._id - - @property - def _container_type(self) -> "ContainerType": - return self._container.container_type - - def copy(self, value: ValueCopy, wait: bool = False): - raise Exception(f"{type(self).__name__} doesn't support copying") - - def process_assignment(self, value, wait=False): - if isinstance(value, ValueCopy): - return self.copy(value, wait=wait) - else: - return self.assign(value, wait=wait) diff --git a/src/neptune/attributes/constants.py b/src/neptune/attributes/constants.py deleted file mode 100644 index cdb26fa20..000000000 --- a/src/neptune/attributes/constants.py +++ /dev/null @@ -1,72 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "ARTIFACT_ATTRIBUTE_SPACE", - "DIFF_HEAD_INDEX_PATH", - "LOG_ATTRIBUTE_SPACE", - "MONITORING_ATTRIBUTE_SPACE", - "MONITORING_STDERR_ATTRIBUTE_PATH", - "MONITORING_STDOUT_ATTRIBUTE_PATH", - "MONITORING_TRACEBACK_ATTRIBUTE_PATH", - "PARAMETERS_ATTRIBUTE_SPACE", - "PROPERTIES_ATTRIBUTE_SPACE", - "SOURCE_CODE_ATTRIBUTE_SPACE", - "SOURCE_CODE_ENTRYPOINT_ATTRIBUTE_PATH", - "SOURCE_CODE_FILES_ATTRIBUTE_PATH", - "SYSTEM_ATTRIBUTE_SPACE", - "SYSTEM_DESCRIPTION_ATTRIBUTE_PATH", - "SYSTEM_HOSTNAME_ATTRIBUTE_PATH", - "SYSTEM_NAME_ATTRIBUTE_PATH", - "SYSTEM_STATE_ATTRIBUTE_PATH", - "SYSTEM_TAGS_ATTRIBUTE_PATH", - "SYSTEM_FAILED_ATTRIBUTE_PATH", - "SYSTEM_STAGE_ATTRIBUTE_PATH", - "SIGNAL_TYPE_STOP", - "SIGNAL_TYPE_ABORT", - "UPSTREAM_INDEX_DIFF", -] - -ARTIFACT_ATTRIBUTE_SPACE = "artifacts/" - -LOG_ATTRIBUTE_SPACE = "logs/" - -MONITORING_ATTRIBUTE_SPACE = "monitoring/" -MONITORING_STDERR_ATTRIBUTE_PATH = f"{MONITORING_ATTRIBUTE_SPACE}stderr" -MONITORING_STDOUT_ATTRIBUTE_PATH = f"{MONITORING_ATTRIBUTE_SPACE}stdout" -MONITORING_TRACEBACK_ATTRIBUTE_PATH = f"{MONITORING_ATTRIBUTE_SPACE}traceback" - -PARAMETERS_ATTRIBUTE_SPACE = "parameters/" - -PROPERTIES_ATTRIBUTE_SPACE = "properties/" - -SOURCE_CODE_ATTRIBUTE_SPACE = "source_code/" -SOURCE_CODE_ENTRYPOINT_ATTRIBUTE_PATH = f"{SOURCE_CODE_ATTRIBUTE_SPACE}entrypoint" -SOURCE_CODE_FILES_ATTRIBUTE_PATH = f"{SOURCE_CODE_ATTRIBUTE_SPACE}files" - -SYSTEM_ATTRIBUTE_SPACE = "sys/" -SYSTEM_DESCRIPTION_ATTRIBUTE_PATH = f"{SYSTEM_ATTRIBUTE_SPACE}description" -SYSTEM_HOSTNAME_ATTRIBUTE_PATH = f"{SYSTEM_ATTRIBUTE_SPACE}hostname" -SYSTEM_NAME_ATTRIBUTE_PATH = f"{SYSTEM_ATTRIBUTE_SPACE}name" -SYSTEM_STATE_ATTRIBUTE_PATH = f"{SYSTEM_ATTRIBUTE_SPACE}state" -SYSTEM_TAGS_ATTRIBUTE_PATH = f"{SYSTEM_ATTRIBUTE_SPACE}tags" -SYSTEM_FAILED_ATTRIBUTE_PATH = f"{SYSTEM_ATTRIBUTE_SPACE}failed" -SYSTEM_STAGE_ATTRIBUTE_PATH = f"{SYSTEM_ATTRIBUTE_SPACE}stage" - -SIGNAL_TYPE_STOP = "neptune/stop" -SIGNAL_TYPE_ABORT = "neptune/abort" - -DIFF_HEAD_INDEX_PATH = f"{SOURCE_CODE_ATTRIBUTE_SPACE}diff" -UPSTREAM_INDEX_DIFF = f"{SOURCE_CODE_ATTRIBUTE_SPACE}upstream_diff_" diff --git a/src/neptune/attributes/file_set.py b/src/neptune/attributes/file_set.py deleted file mode 100644 index 6876def69..000000000 --- a/src/neptune/attributes/file_set.py +++ /dev/null @@ -1,88 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["FileSet"] - -import os -from typing import ( - Iterable, - List, - Optional, - Union, -) - -from neptune.api.models import FileEntry -from neptune.attributes.attribute import Attribute -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.operation import ( - DeleteFiles, - UploadFileSet, -) -from neptune.internal.utils import ( - verify_collection_type, - verify_type, -) -from neptune.types.file_set import FileSet as FileSetVal -from neptune.typing import ProgressBarType - - -class FileSet(Attribute): - def assign(self, value: Union[FileSetVal, str, Iterable[str]], *, wait: bool = False) -> None: - raise NeptuneUnsupportedFunctionalityException - verify_type("value", value, (FileSetVal, str, Iterable)) - if isinstance(value, FileSetVal): - value = value.file_globs - elif isinstance(value, str): - value = [value] - else: - verify_collection_type("value", value, str) - self._enqueue_upload_operation(value, reset=True, wait=wait) - - def upload_files(self, globs: Union[str, Iterable[str]], *, wait: bool = False) -> None: - raise NeptuneUnsupportedFunctionalityException - if isinstance(globs, str): - globs = [globs] - else: - verify_collection_type("globs", globs, str) - self._enqueue_upload_operation(globs, reset=False, wait=wait) - - def delete_files(self, paths: Union[str, Iterable[str]], *, wait: bool = False) -> None: - raise NeptuneUnsupportedFunctionalityException - if isinstance(paths, str): - paths = [paths] - else: - verify_collection_type("paths", paths, str) - with self._container.lock(): - self._enqueue_operation(DeleteFiles(self._path, set(paths)), wait=wait) - - def _enqueue_upload_operation(self, globs: Iterable[str], *, reset: bool, wait: bool): - raise NeptuneUnsupportedFunctionalityException - with self._container.lock(): - abs_file_globs = list(os.path.abspath(file_glob) for file_glob in globs) - self._enqueue_operation(UploadFileSet(self._path, abs_file_globs, reset=reset), wait=wait) - - def download( - self, - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, - ) -> None: - raise NeptuneUnsupportedFunctionalityException - verify_type("destination", destination, (str, type(None))) - self._backend.download_file_set(self._container_id, self._container_type, self._path, destination, progress_bar) - - def list_fileset_files(self, path: Optional[str] = None) -> List[FileEntry]: - raise NeptuneUnsupportedFunctionalityException - path = path or "" - return self._backend.list_fileset_files(self._path, self._container_id, path) diff --git a/src/neptune/attributes/namespace.py b/src/neptune/attributes/namespace.py deleted file mode 100644 index b336b297d..000000000 --- a/src/neptune/attributes/namespace.py +++ /dev/null @@ -1,137 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Namespace", "NamespaceBuilder"] - -import argparse -from collections.abc import MutableMapping -from typing import ( - TYPE_CHECKING, - Any, - Collection, - Dict, - Iterable, - Iterator, - List, - Mapping, - Optional, - Union, -) - -from neptune.attributes.attribute import Attribute -from neptune.internal.container_structure import ContainerStructure -from neptune.internal.utils.generic_attribute_mapper import ( - NoValue, - atomic_attribute_types_map, -) -from neptune.internal.utils.logger import get_logger -from neptune.internal.utils.paths import ( - parse_path, - path_to_str, -) -from neptune.types.namespace import Namespace as NamespaceVal - -if TYPE_CHECKING: - from neptune.objects import NeptuneObject - -logger = get_logger() -RunStructure = ContainerStructure # backwards compatibility - - -class Namespace(Attribute, MutableMapping): - def __init__(self, container: "NeptuneObject", path: List[str]): - Attribute.__init__(self, container, path) - self._attributes = {} - self._str_path = path_to_str(path) - - def __setitem__(self, k: str, v: Attribute) -> None: - if not parse_path(k): - logger.warning( - f'Key "{k}" can\'t be used in Namespaces and dicts stored in Neptune. Please use a non-empty key ' - f"instead. The value {v!r} will be dropped.", - ) - return - self._attributes[k] = v - - def __delitem__(self, k: str) -> None: - del self._attributes[k] - - def __getitem__(self, k: str) -> Attribute: - return self._attributes[k] - - def __len__(self) -> int: - return len(self._attributes) - - def __iter__(self) -> Iterator[str]: - yield from self._attributes.__iter__() - - def extend( - self, - value: Union[Any, Iterable[Any]], - *, - steps: Optional[Collection[float]] = None, - timestamps: Optional[Collection[float]] = None, - wait: bool = False, - **kwargs, - ) -> None: - if not isinstance(value, NamespaceVal): - value = NamespaceVal(value) - for k, v in value.value.items(): - self._container[f"{self._str_path}/{k}"].extend(v, steps=steps, timestamps=timestamps, wait=wait, **kwargs) - - def to_dict(self) -> Dict[str, Any]: - result = {} - for key, value in self._attributes.items(): - if isinstance(value, Namespace): - result[key] = value.to_dict() - else: - result[key] = value - return result - - def assign(self, value: Union[NamespaceVal, dict, Mapping], *, wait: bool = False): - if isinstance(value, argparse.Namespace): - value = NamespaceVal(vars(value)) - elif not isinstance(value, NamespaceVal): - value = NamespaceVal(value) - - for k, v in value.value.items(): - self._container[f"{self._str_path}/{k}"].assign(v, wait=wait) - - def _collect_atom_values(self, attribute_dict) -> dict: - result = {} - for k, v in attribute_dict.items(): - if isinstance(v, dict): - result[k] = self._collect_atom_values(v) - else: - attr_type, attr_value = v - if attr_type in atomic_attribute_types_map and attr_value is not NoValue: - result[k] = v[1] - return result - - def fetch(self) -> dict: - attributes = self._backend.fetch_atom_attribute_values(self._container_id, self._container_type, self._path) - run_struct = ContainerStructure() - prefix_len = len(self._path) - for attr_name, attr_type, attr_value in attributes: - run_struct.set(parse_path(attr_name)[prefix_len:], (attr_type, attr_value)) - return self._collect_atom_values(run_struct.get_structure()) - - -class NamespaceBuilder: - def __init__(self, container: "NeptuneObject"): - self._run = container - - def __call__(self, path: List[str]) -> Namespace: - return Namespace(self._run, path) diff --git a/src/neptune/attributes/series/__init__.py b/src/neptune/attributes/series/__init__.py deleted file mode 100644 index 0875a882e..000000000 --- a/src/neptune/attributes/series/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "FileSeries", - "FloatSeries", - "StringSeries", -] - -from .file_series import FileSeries -from .float_series import FloatSeries -from .string_series import StringSeries diff --git a/src/neptune/attributes/series/fetchable_series.py b/src/neptune/attributes/series/fetchable_series.py deleted file mode 100644 index 8326594e5..000000000 --- a/src/neptune/attributes/series/fetchable_series.py +++ /dev/null @@ -1,77 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["FetchableSeries"] - -import abc -from datetime import datetime -from functools import partial -from typing import ( - Dict, - Generic, - Optional, - TypeVar, - Union, -) - -from neptune.api.fetching_series_values import fetch_series_values -from neptune.api.models import ( - FloatPointValue, - StringPointValue, -) -from neptune.internal.utils.paths import path_to_str -from neptune.typing import ProgressBarType - -Row = TypeVar("Row", StringPointValue, FloatPointValue) - - -def make_row(entry: Row, include_timestamp: bool = True) -> Dict[str, Union[str, float, datetime]]: - row: Dict[str, Union[str, float, datetime]] = { - "step": entry.step, - "value": entry.value, - } - - if include_timestamp: - row["timestamp"] = entry.timestamp - - return row - - -class FetchableSeries(Generic[Row]): - @abc.abstractmethod - def _fetch_values_from_backend( - self, limit: int, from_step: Optional[float] = None, include_inherited: bool = True - ) -> Row: ... - - def fetch_values( - self, - *, - include_timestamp: bool = True, - progress_bar: Optional[ProgressBarType] = None, - include_inherited: bool = True, - ): - import pandas as pd - - path = path_to_str(self._path) if hasattr(self, "_path") else "" - data = fetch_series_values( - getter=partial(self._fetch_values_from_backend, include_inherited=include_inherited), - path=path, - progress_bar=progress_bar, - ) - - rows = dict((n, make_row(entry=entry, include_timestamp=include_timestamp)) for (n, entry) in enumerate(data)) - - df = pd.DataFrame.from_dict(data=rows, orient="index") - return df diff --git a/src/neptune/attributes/series/file_series.py b/src/neptune/attributes/series/file_series.py deleted file mode 100644 index 7dfb86d98..000000000 --- a/src/neptune/attributes/series/file_series.py +++ /dev/null @@ -1,133 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["FileSeries"] - -import io -import os -import pathlib -from typing import ( - Iterable, - List, - Optional, -) - -from neptune.attributes.series.series import Series -from neptune.exceptions import ( - FileNotFound, - NeptuneUnsupportedFunctionalityException, - OperationNotSupported, -) -from neptune.internal.operation import ( - ClearImageLog, - ImageValue, - LogImages, - Operation, -) -from neptune.internal.types.file_types import FileType -from neptune.internal.utils import base64_encode -from neptune.internal.utils.limits import image_size_exceeds_limit_for_logging -from neptune.types import File -from neptune.types.series.file_series import FileSeries as FileSeriesVal -from neptune.typing import ProgressBarType - -Val = FileSeriesVal -Data = File -LogOperation = LogImages - - -class FileSeries(Series[Val, Data, LogOperation], max_batch_size=1, operation_cls=LogOperation): - @classmethod - def _map_series_val(cls, value: Val) -> List[ImageValue]: - return [ - ImageValue( - data=cls._get_base64_image_content(val), - name=value.name, - description=value.description, - ) - for val in value.values - ] - - def _get_clear_operation(self) -> Operation: - return ClearImageLog(self._path) - - def _data_to_value(self, values: Iterable, **kwargs) -> Val: - return FileSeriesVal(values, **kwargs) - - def _is_value_type(self, value) -> bool: - return isinstance(value, FileSeriesVal) - - @staticmethod - def _get_base64_image_content(file: File) -> str: - if file.file_type is FileType.LOCAL_FILE: - if not os.path.exists(file.path): - raise FileNotFound(file.path) - with open(file.path, "rb") as image_file: - file_content = File.from_stream(image_file).content - else: - file_content = file.content - - from PIL import ( - Image, - UnidentifiedImageError, - ) - - try: - with Image.open(io.BytesIO(file_content)): - ... - except UnidentifiedImageError: - raise OperationNotSupported( - "FileSeries supports only image files for now. Other file types will be implemented in future." - ) - - if image_size_exceeds_limit_for_logging(len(file_content)): - file_content = b"" - - return base64_encode(file_content) - - def download(self, destination: Optional[str], progress_bar: Optional[ProgressBarType] = None): - target_dir = self._get_destination(destination) - item_count = self._backend.get_image_series_values( - self._container_id, self._container_type, self._path, 0, 1 - ).total - for i in range(0, item_count): - self._backend.download_file_series_by_index( - self._container_id, self._container_type, self._path, i, target_dir, progress_bar - ) - - def download_last(self, destination: Optional[str]): - raise NeptuneUnsupportedFunctionalityException - target_dir = self._get_destination(destination) - item_count = self._backend.get_image_series_values( - self._container_id, self._container_type, self._path, 0, 1 - ).total - if item_count > 0: - self._backend.download_file_series_by_index( - self._container_id, - self._container_type, - self._path, - item_count - 1, - target_dir, - progress_bar=None, - ) - else: - raise ValueError("Unable to download last file - series is empty") - - def _get_destination(self, destination: Optional[str]): - target_dir = destination - if destination is None: - target_dir = os.path.join("neptune", self._path[-1]) - pathlib.Path(os.path.abspath(target_dir)).mkdir(parents=True, exist_ok=True) - return target_dir diff --git a/src/neptune/attributes/series/float_series.py b/src/neptune/attributes/series/float_series.py deleted file mode 100644 index 07723a7f1..000000000 --- a/src/neptune/attributes/series/float_series.py +++ /dev/null @@ -1,85 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["FloatSeries"] - -from typing import ( - Iterable, - Optional, - Union, -) - -from neptune.api.models import FloatSeriesValues -from neptune.attributes.series.fetchable_series import FetchableSeries -from neptune.attributes.series.series import Series -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.operation import ( - ClearFloatLog, - ConfigFloatSeries, - LogFloats, - Operation, -) -from neptune.internal.utils import verify_type -from neptune.types.series.float_series import FloatSeries as FloatSeriesVal - -Val = FloatSeriesVal -Data = Union[float, int] -LogOperation = LogFloats - - -class FloatSeries( - Series[Val, Data, LogOperation], FetchableSeries[FloatSeriesValues], max_batch_size=100, operation_cls=LogOperation -): - def configure( - self, - min: Optional[Union[float, int]] = None, - max: Optional[Union[float, int]] = None, - unit: Optional[str] = None, - wait: bool = False, - ) -> None: - verify_type("min", min, (float, int)) - verify_type("max", max, (float, int)) - verify_type("unit", unit, str) - with self._container.lock(): - self._enqueue_operation(ConfigFloatSeries(self._path, min, max, unit), wait=wait) - - def _get_clear_operation(self) -> Operation: - return ClearFloatLog(self._path) - - def _get_config_operation_from_value(self, value: Val) -> Optional[Operation]: - return ConfigFloatSeries(self._path, value.min, value.max, value.unit) - - def _data_to_value(self, values: Iterable, **kwargs) -> Val: - return FloatSeriesVal(values, **kwargs) - - def _is_value_type(self, value) -> bool: - return isinstance(value, FloatSeriesVal) - - def fetch_last(self) -> float: - raise NeptuneUnsupportedFunctionalityException - val = self._backend.get_float_series_attribute(self._container_id, self._container_type, self._path) - return val.last - - def _fetch_values_from_backend( - self, limit: int, from_step: Optional[float] = None, include_inherited: bool = True - ) -> FloatSeriesValues: - return self._backend.get_float_series_values( - container_id=self._container_id, - container_type=self._container_type, - path=self._path, - from_step=from_step, - limit=limit, - include_inherited=include_inherited, - ) diff --git a/src/neptune/attributes/series/series.py b/src/neptune/attributes/series/series.py deleted file mode 100644 index d5254b200..000000000 --- a/src/neptune/attributes/series/series.py +++ /dev/null @@ -1,167 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Series"] - -import abc -from typing import ( - Collection, - Generic, - Iterable, - List, - Optional, - TypeVar, - Union, -) - -from neptune.attributes.attribute import Attribute -from neptune.internal.operation import LogOperation -from neptune.internal.types.stringify_value import StringifyValue -from neptune.internal.utils import ( - is_collection, - is_stringify_value, - verify_collection_type, - verify_type, -) -from neptune.internal.utils.iteration import get_batches -from neptune.types.series.series import Series as SeriesVal - -ValTV = TypeVar("ValTV", bound=SeriesVal) -DataTV = TypeVar("DataTV") -LogOperationTV = TypeVar("LogOperationTV", bound=LogOperation) - - -class Series(Attribute, Generic[ValTV, DataTV, LogOperationTV]): - def __init_subclass__(cls, max_batch_size: int, operation_cls: type(LogOperationTV)): - cls.max_batch_size = max_batch_size - cls.operation_cls = operation_cls - - def clear(self, *, wait: bool = False) -> None: - self._clear_impl(wait) - - def _get_log_operations_from_value(self, value: ValTV) -> List[LogOperationTV]: - mapped_values = self._map_series_val(value) - values_with_step_and_ts = zip(mapped_values, value.steps, value.timestamps) - log_values = [self.operation_cls.ValueType(val, step=step, ts=ts) for val, step, ts in values_with_step_and_ts] - return [ - self.operation_cls(self._path, chunk) for chunk in get_batches(log_values, batch_size=self.max_batch_size) - ] - - @classmethod - def _map_series_val(cls, value: ValTV) -> List[DataTV]: - return value.values - - def _get_config_operation_from_value(self, value: ValTV) -> Optional[LogOperationTV]: - return None - - @abc.abstractmethod - def _get_clear_operation(self) -> LogOperationTV: - pass - - @abc.abstractmethod - def _data_to_value(self, values: Iterable, **kwargs) -> ValTV: - pass - - @abc.abstractmethod - def _is_value_type(self, value) -> bool: - pass - - def _handle_stringified_value(self, value): - return value.value - - def assign(self, value, wait: bool = False) -> None: - if not self._is_value_type(value): - value = self._data_to_value(value) - clear_op = self._get_clear_operation() - config_op = self._get_config_operation_from_value(value) - with self._container.lock(): - if config_op: - self._enqueue_operation(config_op, wait=False) - if not value.values: - self._enqueue_operation(clear_op, wait=wait) - else: - self._enqueue_operation(clear_op, wait=False) - ops = self._get_log_operations_from_value(value) - for op in ops: - self._enqueue_operation(op, wait=wait) - - def log( - self, - value: Union[DataTV, Iterable[DataTV]], - step: Optional[float] = None, - timestamp: Optional[float] = None, - wait: bool = False, - **kwargs, - ) -> None: - """log is a deprecated method, this code should be removed in future""" - if step is not None: - verify_type("step", step, (float, int)) - if timestamp is not None: - verify_type("timestamp", timestamp, (float, int)) - - if is_stringify_value(value): - value = self._handle_stringified_value(value) - - if is_collection(value): - if step is not None and len(value) > 1: - raise ValueError("Collection of values is not supported for explicitly defined 'step'.") - steps = None if step is None else [step] * len(value) - timestamps = None if timestamp is None else [timestamp] * len(value) - value = self._data_to_value(value, steps=steps, timestamps=timestamps, **kwargs) - else: - steps = None if step is None else [step] - timestamps = None if timestamp is None else [timestamp] - value = self._data_to_value([value], steps=steps, timestamps=timestamps, **kwargs) - - ops = self._get_log_operations_from_value(value) - - with self._container.lock(): - for op in ops: - self._enqueue_operation(op, wait=wait) - - def extend( - self, - values: Union[Collection[DataTV], StringifyValue], - steps: Optional[Collection[float]] = None, - timestamps: Optional[Collection[float]] = None, - wait: bool = False, - **kwargs, - ) -> None: - if is_stringify_value(values): - values = self._handle_stringified_value(values) - - if steps is not None: - verify_collection_type("steps", steps, (float, int)) - if len(steps) != len(values): - raise ValueError(f"Number of steps must be equal to number of values ({len(steps)} != {len(values)}") - - if timestamps is not None: - verify_collection_type("timestamps", timestamps, (float, int)) - if len(timestamps) != len(values): - raise ValueError( - f"Number of timestamps must be equal to number of values ({len(timestamps)} != {len(values)}" - ) - - value = self._data_to_value(values, steps=steps, timestamps=timestamps, **kwargs) - ops = self._get_log_operations_from_value(value) - - with self._container.lock(): - for op in ops: - self._enqueue_operation(op, wait=wait) - - def _clear_impl(self, wait: bool = False) -> None: - op = self._get_clear_operation() - with self._container.lock(): - self._enqueue_operation(op, wait=wait) diff --git a/src/neptune/attributes/series/string_series.py b/src/neptune/attributes/series/string_series.py deleted file mode 100644 index e66cf536c..000000000 --- a/src/neptune/attributes/series/string_series.py +++ /dev/null @@ -1,109 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["StringSeries"] - -from typing import ( - TYPE_CHECKING, - Iterable, - List, - Optional, - Union, -) - -from neptune.api.models import StringSeriesValues -from neptune.attributes.series.fetchable_series import FetchableSeries -from neptune.attributes.series.series import Series -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.operation import ( - ClearStringLog, - LogStrings, - Operation, -) -from neptune.internal.utils import is_collection -from neptune.internal.utils.logger import get_logger -from neptune.internal.utils.paths import path_to_str -from neptune.types.series.string_series import MAX_STRING_SERIES_VALUE_LENGTH -from neptune.types.series.string_series import StringSeries as StringSeriesVal - -if TYPE_CHECKING: - from neptune.objects import NeptuneObject - -Val = StringSeriesVal -Data = str -LogOperation = LogStrings - -logger = get_logger() - - -class StringSeries( - Series[Val, Data, LogOperation], FetchableSeries[StringSeriesValues], max_batch_size=10, operation_cls=LogOperation -): - def __init__(self, container: "NeptuneObject", path: List[str]): - super().__init__(container, path) - self._value_truncation_occurred = False - - def _get_log_operations_from_value( - self, - value: Val, - ) -> List[LogOperation]: - if not self._value_truncation_occurred and value.truncated: - # the first truncation - self._value_truncation_occurred = True - logger.warning( - "Warning: string series '%s' value was" - " longer than %s characters and was truncated." - " This warning is printed only once per series.", - path_to_str(self._path), - MAX_STRING_SERIES_VALUE_LENGTH, - ) - - return super()._get_log_operations_from_value(value) - - def _get_clear_operation(self) -> Operation: - return ClearStringLog(self._path) - - def _data_to_value(self, values: Iterable, **kwargs) -> Val: - steps = kwargs.pop("steps", None) - timestamps = kwargs.pop("timestamps", None) - - if kwargs: - logger.warning("Warning: unexpected arguments (%s) in StringSeries", kwargs) - - return StringSeriesVal(values, steps=steps, timestamps=timestamps) - - def _is_value_type(self, value) -> bool: - return isinstance(value, StringSeriesVal) - - def _handle_stringified_value(self, value) -> Union[List[str], str]: - if is_collection(value.value): - return list(map(str, value.value)) - return str(value.value) - - def fetch_last(self) -> str: - raise NeptuneUnsupportedFunctionalityException - val = self._backend.get_string_series_attribute(self._container_id, self._container_type, self._path) - return val.last - - def _fetch_values_from_backend( - self, limit: int, from_step: Optional[float] = None, include_inherited: bool = True - ) -> StringSeriesValues: - return self._backend.get_string_series_values( - container_id=self._container_id, - container_type=self._container_type, - path=self._path, - from_step=from_step, - limit=limit, - ) diff --git a/src/neptune/attributes/sets/__init__.py b/src/neptune/attributes/sets/__init__.py deleted file mode 100644 index 81987fa2a..000000000 --- a/src/neptune/attributes/sets/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "StringSet", -] - -from .string_set import StringSet diff --git a/src/neptune/attributes/sets/set.py b/src/neptune/attributes/sets/set.py deleted file mode 100644 index 7c83ef921..000000000 --- a/src/neptune/attributes/sets/set.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Set"] - -from neptune.attributes.attribute import Attribute - - -class Set(Attribute): - pass diff --git a/src/neptune/attributes/sets/string_set.py b/src/neptune/attributes/sets/string_set.py deleted file mode 100644 index 906fac8bf..000000000 --- a/src/neptune/attributes/sets/string_set.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["StringSet"] - -import typing -from typing import ( - Iterable, - Union, -) - -from neptune.attributes.sets.set import Set -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.operation import ( - AddStrings, - ClearStringSet, - RemoveStrings, -) -from neptune.internal.utils import ( - is_collection, - verify_collection_type, - verify_type, -) -from neptune.types.sets.string_set import StringSet as StringSetVal - - -class StringSet(Set): - def assign(self, value: StringSetVal, *, wait: bool = False): - verify_type("value", value, StringSetVal) - with self._container.lock(): - if not value.values: - self._enqueue_operation(ClearStringSet(self._path), wait=wait) - else: - self._enqueue_operation(ClearStringSet(self._path), wait=False) - self._enqueue_operation(AddStrings(self._path, value.values), wait=wait) - - def add(self, values: Union[str, Iterable[str]], *, wait: bool = False): - values = self._to_proper_value_type(values) - with self._container.lock(): - self._enqueue_operation(AddStrings(self._path, set(values)), wait=wait) - - def remove(self, values: Union[str, Iterable[str]], *, wait: bool = False): - raise NeptuneUnsupportedFunctionalityException - values = self._to_proper_value_type(values) - with self._container.lock(): - self._enqueue_operation(RemoveStrings(self._path, set(values)), wait=wait) - - def clear(self, *, wait: bool = False): - raise NeptuneUnsupportedFunctionalityException - with self._container.lock(): - self._enqueue_operation(ClearStringSet(self._path), wait=wait) - - def fetch(self) -> typing.Set[str]: - val = self._backend.get_string_set_attribute(self._container_id, self._container_type, self._path) - return val.values - - @staticmethod - def _to_proper_value_type(values: Union[str, Iterable[str]]) -> Iterable[str]: - if is_collection(values): - verify_collection_type("values", values, str) - return list(values) - else: - verify_type("values", values, str) - return [values] diff --git a/src/neptune/attributes/utils.py b/src/neptune/attributes/utils.py deleted file mode 100644 index 18d83410b..000000000 --- a/src/neptune/attributes/utils.py +++ /dev/null @@ -1,78 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["create_attribute_from_type"] - -from typing import ( - TYPE_CHECKING, - List, -) - -from neptune.api.models import FieldType -from neptune.attributes import ( - Artifact, - Boolean, - Datetime, - File, - FileSeries, - FileSet, - Float, - FloatSeries, - GitRef, - Integer, - NotebookRef, - RunState, - String, - StringSeries, - StringSet, -) -from neptune.internal.exceptions import InternalClientError - -if TYPE_CHECKING: - from neptune.attributes.attribute import Attribute - from neptune.objects import NeptuneObject - -_attribute_type_to_attr_class_map = { - FieldType.FLOAT: Float, - FieldType.INT: Integer, - FieldType.BOOL: Boolean, - FieldType.STRING: String, - FieldType.DATETIME: Datetime, - FieldType.FILE: File, - FieldType.FILE_SET: FileSet, - FieldType.FLOAT_SERIES: FloatSeries, - FieldType.STRING_SERIES: StringSeries, - FieldType.IMAGE_SERIES: FileSeries, - FieldType.STRING_SET: StringSet, - FieldType.GIT_REF: GitRef, - FieldType.OBJECT_STATE: RunState, - FieldType.NOTEBOOK_REF: NotebookRef, - FieldType.ARTIFACT: Artifact, -} - - -def create_attribute_from_type( - attribute_type: FieldType, - container: "NeptuneObject", - path: List[str], -) -> "Attribute": - try: - return _attribute_type_to_attr_class_map[attribute_type](container, path) - except KeyError: - raise InternalClientError(f"Unexpected type: {attribute_type}") - - -def delayed_(): - pass diff --git a/src/neptune/cli/__init__.py b/src/neptune/cli/__init__.py deleted file mode 100644 index de4b6376a..000000000 --- a/src/neptune/cli/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = ["sync", "status"] - -from neptune.cli.commands import ( - status, - sync, -) diff --git a/src/neptune/cli/__main__.py b/src/neptune/cli/__main__.py deleted file mode 100644 index 961b59302..000000000 --- a/src/neptune/cli/__main__.py +++ /dev/null @@ -1,48 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import warnings - -import click -import pkg_resources - -from neptune.cli.commands import ( - clear, - status, - sync, -) - - -@click.group() -def main() -> None: - pass - - -main.add_command(sync) -main.add_command(status) -main.add_command(clear) - -plugins = {entry_point.name: entry_point for entry_point in pkg_resources.iter_entry_points("neptune.plugins")} - -for name, entry_point in plugins.items(): - # loading an entry_point may fail and this - # will cause all CLI commands to fail. - # So, we load the plug-ins in try and except block. - try: - loaded_plugin = entry_point.load() - except Exception as e: - warnings.warn(f"Failed to load neptune plug-in `{name}` with exception: {e}") - main.add_command(loaded_plugin, name) diff --git a/src/neptune/cli/clear.py b/src/neptune/cli/clear.py deleted file mode 100644 index 8d92e6c1a..000000000 --- a/src/neptune/cli/clear.py +++ /dev/null @@ -1,91 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = ["ClearRunner"] - -import shutil -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Sequence, -) - -import click - -from neptune.cli.collect import collect_containers -from neptune.cli.status import StatusRunner -from neptune.constants import SYNC_DIRECTORY -from neptune.internal.utils.logger import get_logger - -if TYPE_CHECKING: - from neptune.cli.containers import ( - AsyncContainer, - Container, - OfflineContainer, - ) - from neptune.internal.backends.neptune_backend import NeptuneBackend - -logger = get_logger(with_prefix=False) - - -class ClearRunner: - @staticmethod - def clear(*, backend: "NeptuneBackend", path: Path, force: bool = False, clear_eventual: bool = True) -> None: - containers = collect_containers(path=path, backend=backend) - - remove_sync_containers(path=path) - remove_containers(containers=containers.not_found_containers) - remove_containers( - containers=filter_containers(a=containers.synced_containers, b=containers.not_found_containers) - ) - - if clear_eventual and (containers.offline_containers or containers.unsynced_containers): - log_junk_metadata( - offline_containers=containers.offline_containers, unsynced_containers=containers.unsynced_containers - ) - - if force or click.confirm("\nDo you want to delete the listed metadata?"): - remove_containers(containers=containers.offline_containers) - remove_containers(containers=containers.unsynced_containers) - - -def filter_containers(*, a: Sequence["Container"], b: Sequence["Container"]) -> Sequence["Container"]: - b_ids = {container.container_id for container in b} - return [container for container in a if container.container_id not in b_ids] - - -def remove_sync_containers(*, path: Path) -> None: - """ - Function can remove SYNC_DIRECTORY safely, Neptune client only stores files to upload in this location. - """ - shutil.rmtree(path / SYNC_DIRECTORY, ignore_errors=True) - - -def log_junk_metadata( - *, offline_containers: Sequence["OfflineContainer"], unsynced_containers: Sequence["AsyncContainer"] -) -> None: - if unsynced_containers: - logger.info("") - StatusRunner.log_unsync_objects(unsynced_containers=unsynced_containers) - - if offline_containers: - logger.info("") - StatusRunner.log_offline_objects(offline_containers=offline_containers, info=False) - - -def remove_containers(*, containers: Sequence["Container"]) -> None: - for container in containers: - container.clear() diff --git a/src/neptune/cli/collect.py b/src/neptune/cli/collect.py deleted file mode 100644 index aa06c5e55..000000000 --- a/src/neptune/cli/collect.py +++ /dev/null @@ -1,142 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import defaultdict -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Callable, - Dict, - Iterable, - List, - NamedTuple, - Tuple, -) - -from neptune.cli.containers import ( - AsyncContainer, - ExecutionDirectory, - OfflineContainer, -) -from neptune.cli.utils import ( - detect_async_dir, - detect_offline_dir, - get_metadata_container, - is_single_execution_dir_synced, -) -from neptune.constants import ( - ASYNC_DIRECTORY, - OFFLINE_DIRECTORY, -) -from neptune.internal.container_type import ContainerType -from neptune.internal.id_formats import UniqueId -from neptune.objects.structure_version import StructureVersion - -if TYPE_CHECKING: - from neptune.internal.backends.neptune_backend import NeptuneBackend - - -class CollectedContainers(NamedTuple): - async_containers: List[AsyncContainer] - offline_containers: List[OfflineContainer] - synced_containers: List[AsyncContainer] - unsynced_containers: List[AsyncContainer] - not_found_containers: List[AsyncContainer] - - -def collect_containers(*, path: Path, backend: "NeptuneBackend") -> CollectedContainers: - if not path.is_dir(): - return CollectedContainers( - async_containers=[], - offline_containers=[], - synced_containers=[], - unsynced_containers=[], - not_found_containers=[], - ) - - async_containers: List[AsyncContainer] = [] - if (path / ASYNC_DIRECTORY).exists(): - async_containers = list(collect_async_containers(path=path, backend=backend)) - - offline_containers = [] - if (path / OFFLINE_DIRECTORY).exists(): - offline_containers = list(collect_offline_containers(path=path)) - - return CollectedContainers( - async_containers=async_containers, - offline_containers=offline_containers, - synced_containers=[x for x in async_containers if x.synced], - unsynced_containers=[x for x in async_containers if not x.synced and x.found is True], - not_found_containers=[x for x in async_containers if x.found is False], - ) - - -def collect_async_containers(*, path: Path, backend: "NeptuneBackend") -> Iterable[AsyncContainer]: - container_to_execution_dirs = collect_by_container(base_path=path / ASYNC_DIRECTORY, detect_by=detect_async_dir) - - for (container_type, container_id), execution_dirs in container_to_execution_dirs.items(): - experiment = get_metadata_container(backend=backend, container_type=container_type, container_id=container_id) - found = experiment is not None - - yield AsyncContainer( - container_id=container_id, - container_type=container_type, - found=found, - experiment=experiment, - execution_dirs=execution_dirs, - ) - - -def collect_offline_containers(*, path: Path) -> Iterable[OfflineContainer]: - container_to_execution_dirs = collect_by_container(base_path=path / OFFLINE_DIRECTORY, detect_by=detect_offline_dir) - - for (container_type, container_id), execution_dirs in container_to_execution_dirs.items(): - yield OfflineContainer( - container_id=container_id, - container_type=container_type, - execution_dirs=execution_dirs, - found=False, - ) - - -def collect_child_directories(base_path: Path, structure_version: StructureVersion) -> List[Path]: - if structure_version in {StructureVersion.CHILD_EXECUTION_DIRECTORIES, StructureVersion.LEGACY}: - return list(map(lambda r: base_path / r.name, base_path.iterdir())) - elif structure_version == StructureVersion.DIRECT_DIRECTORY: - return [base_path] - else: - raise ValueError(f"Unknown structure version {structure_version}") - - -def collect_by_container( - *, base_path: Path, detect_by: Callable[[str], Tuple[ContainerType, UniqueId, StructureVersion]] -) -> Dict[Tuple[ContainerType, UniqueId], List["ExecutionDirectory"]]: - container_to_execution_dirs: Dict[Tuple[ContainerType, UniqueId], List["ExecutionDirectory"]] = defaultdict(list) - - for child_path in base_path.iterdir(): - container_type, unique_id, structure_version = detect_by(child_path.name) - execution_dirs = collect_child_directories(child_path, structure_version) - for execution_dir in execution_dirs: - parent = execution_dir.parent if structure_version == StructureVersion.CHILD_EXECUTION_DIRECTORIES else None - container_to_execution_dirs[(container_type, unique_id)].append( - ExecutionDirectory( - path=execution_dir, - synced=is_single_execution_dir_synced(execution_dir), - structure_version=structure_version, - parent=parent, - ) - ) - - return container_to_execution_dirs diff --git a/src/neptune/cli/commands.py b/src/neptune/cli/commands.py deleted file mode 100644 index e8fb4ab51..000000000 --- a/src/neptune/cli/commands.py +++ /dev/null @@ -1,169 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = ["status", "sync", "clear"] - -from pathlib import Path -from typing import ( - List, - Optional, -) - -import click - -from neptune.cli.clear import ClearRunner -from neptune.cli.path_option import path_option -from neptune.cli.status import StatusRunner -from neptune.cli.sync import SyncRunner -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.backends.hosted_neptune_backend import HostedNeptuneBackend -from neptune.internal.credentials import Credentials - - -@click.command() -@path_option -def status(path: Path) -> None: - """List synchronized and unsynchronized objects in the given directory. Trashed objects are not listed. - - Neptune stores object data on disk in the '.neptune' directory. If an object executes offline - or if the network is unavailable as the object executes, the object data can be synchronized - with the server with this command line utility. - - Examples: - - \b - # List synchronized and unsynchronized objects in the current directory - neptune status - - \b - # List synchronized and unsynchronized objects in directory "foo/bar" without actually syncing - neptune status --path foo/bar - """ - - raise NeptuneUnsupportedFunctionalityException - backend = HostedNeptuneBackend(Credentials.from_token()) - - StatusRunner.status(backend=backend, path=path) - - -@click.command() -@path_option -@click.option( - "--object", - "object_names", - multiple=True, - metavar="", - help="object name (workspace/project/short-id or UUID for offline runs) to synchronize.", -) -@click.option( - "-p", - "--project", - "project_name", - multiple=False, - metavar="project-name", - help="project name (workspace/project) where offline runs will be sent", -) -@click.option( - "--offline-only", - "offline_only", - is_flag=True, - default=False, - help="synchronize only the offline runs inside '.neptune' directory", -) -def sync( - path: Path, - object_names: List[str], - project_name: Optional[str], - offline_only: Optional[bool], -) -> None: - """Synchronizes objects with unsent data to the server. - - Neptune stores object data on disk in the '.neptune' directory. If an object executes offline - or if the network is unavailable as the run executes, the object data can be synchronized - with the server with this command line utility. - - You can list unsynchronized runs with `neptune status` - - Examples: - - \b - # Synchronize all objects in the current directory - neptune sync - - \b - # Synchronize all objects in the given path - neptune sync --path foo/bar - - \b - # Synchronize only runs "NPT-42" and "NPT-43" in "workspace/project" in the current directory - neptune sync --object workspace/project/NPT-42 --object workspace/project/NPT-43 - - \b - # Synchronise all objects in the current directory, sending offline runs to project "workspace/project" - neptune sync --project workspace/project - - \b - # Synchronize only the offline run with UUID offline/a1561719-b425-4000-a65a-b5efb044d6bb - # to project "workspace/project" - neptune sync --project workspace/project --object offline/a1561719-b425-4000-a65a-b5efb044d6bb - - \b - # Synchronize only the offline runs - neptune sync --offline-only - - \b - # Synchronize only the offline runs to project "workspace/project" - neptune sync --project workspace/project --offline-only - """ - - raise NeptuneUnsupportedFunctionalityException - backend = HostedNeptuneBackend(Credentials.from_token()) - - if offline_only: - if object_names: - raise click.BadParameter("--object and --offline-only are mutually exclusive") - - SyncRunner.sync_all_offline(backend=backend, base_path=path, project_name=project_name) - - elif object_names: - SyncRunner.sync_selected(backend=backend, base_path=path, project_name=project_name, object_names=object_names) - else: - SyncRunner.sync_all(backend=backend, base_path=path, project_name=project_name) - - -@click.command() -@path_option -def clear(path: Path) -> None: - """ - Clears metadata that has been synchronized or trashed, but is still present in local storage. - - Lists objects and data to be cleared before deleting the data. - - Examples: - - \b - # Clear junk metadata from local storage - neptune clear - - \b - # Clear junk metadata from directory "foo/bar" - neptune clear --path foo/bar - """ - - raise NeptuneUnsupportedFunctionalityException - backend = HostedNeptuneBackend(Credentials.from_token()) - - ClearRunner.clear(backend=backend, path=path) diff --git a/src/neptune/cli/containers.py b/src/neptune/cli/containers.py deleted file mode 100644 index 3961cb73e..000000000 --- a/src/neptune/cli/containers.py +++ /dev/null @@ -1,322 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "Container", - "ExecutionDirectory", - "OfflineContainer", - "AsyncContainer", -] - -import os -import shutil -import threading -import time -from abc import ( - ABC, - abstractmethod, -) -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - List, - Optional, -) - -from neptune.cli.utils import get_qualified_name -from neptune.constants import ASYNC_DIRECTORY -from neptune.core.components.operation_storage import OperationStorage -from neptune.core.components.queue.disk_queue import DiskQueue -from neptune.envs import NEPTUNE_SYNC_BATCH_TIMEOUT_ENV -from neptune.internal.container_type import ContainerType -from neptune.internal.exceptions import NeptuneConnectionLostException -from neptune.internal.id_formats import UniqueId -from neptune.internal.operation import Operation -from neptune.internal.operation_processors.utils import get_container_dir -from neptune.internal.utils.logger import get_logger -from neptune.objects.structure_version import StructureVersion - -if TYPE_CHECKING: - from neptune.internal.backends.api_model import ( - ApiExperiment, - Project, - ) - from neptune.internal.backends.neptune_backend import NeptuneBackend - - -logger = get_logger(with_prefix=False) -retries_timeout = int(os.getenv(NEPTUNE_SYNC_BATCH_TIMEOUT_ENV, "3600")) - - -class ExecutionDirectory: - def __init__(self, path: Path, synced: bool, structure_version: StructureVersion, parent: Optional[Path] = None): - self._path = path - self._synced = synced - self._structure_version = structure_version - self._parent = parent - - @property - def path(self) -> Path: - return self._path - - @property - def synced(self) -> bool: - return self._synced - - @property - def structure_version(self) -> StructureVersion: - return self._structure_version - - @property - def parent(self) -> Optional[Path]: - return self._parent - - def clear(self) -> None: - if self.path.exists(): - remove_directory_structure(self.path) - - def sync(self, *, backend: "NeptuneBackend", container_id: UniqueId, container_type: ContainerType) -> None: - operation_storage = OperationStorage(self.path) - serializer: Callable[[Operation], Dict[str, Any]] = lambda op: op.to_dict() - - with DiskQueue( - data_path=self.path, - to_dict=serializer, - from_dict=Operation.from_dict, - lock=threading.RLock(), - ) as disk_queue: - while True: - raw_batch = disk_queue.get_batch(1000) - if not raw_batch: - break - version = raw_batch[-1].ver - batch = [element.obj for element in raw_batch] - - start_time = time.monotonic() - expected_count = len(batch) - version_to_ack = version - expected_count - while True: - try: - processed_count, _ = backend.execute_operations( - container_id=container_id, - container_type=container_type, - operations=batch, - operation_storage=operation_storage, - ) - version_to_ack += processed_count - batch = batch[processed_count:] - disk_queue.ack(version) - if version_to_ack == version: - break - except NeptuneConnectionLostException as ex: - if time.monotonic() - start_time > retries_timeout: - raise ex - logger.warning( - "Experiencing connection interruptions." - " Will try to reestablish communication with Neptune." - " Internal exception was: %s", - ex.cause.__class__.__name__, - ) - - def move(self, *, base_path: Path, target_container_id: UniqueId, container_type: ContainerType) -> None: - new_online_dir = get_container_dir(container_id=target_container_id, container_type=container_type) - try: - (base_path / ASYNC_DIRECTORY).mkdir(parents=True, exist_ok=True) - except OSError: - logger.warning(f"Cannot create directory: {base_path / ASYNC_DIRECTORY}") - return - - self.path.rename(base_path / ASYNC_DIRECTORY / new_online_dir) - self._path = base_path / ASYNC_DIRECTORY / new_online_dir - self._structure_version = StructureVersion.DIRECT_DIRECTORY - - -class Container(ABC): - @property - @abstractmethod - def container_id(self) -> UniqueId: ... - - @property - @abstractmethod - def execution_dirs(self) -> List[ExecutionDirectory]: ... - - @property - def synced(self) -> bool: - return all(map(lambda execution_dir: execution_dir.synced, self.execution_dirs)) - - @abstractmethod - def sync(self, *, base_path: Path, backend: "NeptuneBackend", project: Optional["Project"] = None) -> None: ... - - def clear(self) -> None: - for execution_dir in self.execution_dirs: - execution_dir.clear() - - for execution_dir in self.execution_dirs: - if execution_dir.parent is not None: - remove_directory(execution_dir.parent) - break - - -class AsyncContainer(Container): - def __init__( - self, - container_id: UniqueId, - container_type: ContainerType, - execution_dirs: List[ExecutionDirectory], - found: bool, - experiment: Optional["ApiExperiment"] = None, - ): - self._container_id = container_id - self._container_type = container_type - self._execution_dirs = execution_dirs - self._found = found - self._experiment = experiment - - @property - def container_id(self) -> UniqueId: - return self._container_id - - @property - def container_type(self) -> ContainerType: - return self._container_type - - @property - def execution_dirs(self) -> List[ExecutionDirectory]: - return self._execution_dirs - - @property - def found(self) -> bool: - return self._found - - @property - def experiment(self) -> Optional["ApiExperiment"]: - return self._experiment - - def sync(self, *, base_path: Path, backend: "NeptuneBackend", project: Optional["Project"] = None) -> None: - assert self.experiment is not None # mypy fix - - qualified_container_name = get_qualified_name(self.experiment) - logger.info("Synchronising %s", qualified_container_name) - - for execution_dir in self.execution_dirs: - if not execution_dir.synced: - execution_dir.sync( - backend=backend, - container_id=self.container_id, - container_type=self.container_type, - ) - - self.clear() - logger.info("Synchronization of %s %s completed.", self.experiment.type.value, qualified_container_name) - - -class OfflineContainer(Container): - def __init__( - self, - container_id: UniqueId, - container_type: ContainerType, - execution_dirs: List[ExecutionDirectory], - found: bool, - ): - self._container_id = container_id - self._container_type = container_type - self._execution_dirs = execution_dirs - self._found = found - - @property - def container_id(self) -> UniqueId: - return self._container_id - - @property - def container_type(self) -> ContainerType: - return self._container_type - - @property - def execution_dirs(self) -> List[ExecutionDirectory]: - return self._execution_dirs - - @property - def found(self) -> bool: - return self._found - - def sync(self, *, base_path: Path, backend: "NeptuneBackend", project: Optional["Project"] = None) -> None: - assert project is not None # mypy fix - - experiment = register_offline_container( - backend=backend, - project=project, - container_type=self.container_type, - ) - - if experiment: - self._container_id = experiment.id - for execution_dir in self.execution_dirs: - execution_dir.move( - base_path=base_path, - target_container_id=self.container_id, - container_type=self.container_type, - ) - else: - logger.warning("Cannot register offline container %s", self.container_id) - return - - qualified_container_name = get_qualified_name(experiment) - logger.info("Offline container %s registered as %s", self.container_id, qualified_container_name) - logger.info("Synchronising %s", qualified_container_name) - - for execution_dir in self.execution_dirs: - execution_dir.sync( - backend=backend, - container_id=self.container_id, - container_type=self.container_type, - ) - - self.clear() - logger.info("Synchronization of %s %s completed.", experiment.type.value, qualified_container_name) - - -def remove_directory(path: Path) -> None: - try: - path.rmdir() - logger.info(f"Deleted: {path}") - except OSError: - logger.warning(f"Cannot remove directory: {path}") - - -def remove_directory_structure(path: Path) -> None: - try: - shutil.rmtree(path) - logger.info(f"Deleted: {path}") - except OSError: - logger.warning(f"Cannot remove directory: {path}") - - -def register_offline_container( - *, backend: "NeptuneBackend", project: "Project", container_type: ContainerType -) -> Optional["ApiExperiment"]: - try: - if container_type == ContainerType.RUN: - return backend.create_run(project.id) - else: - raise ValueError("Only runs are supported in offline mode") - except Exception as e: - logger.warning( - "Exception occurred while trying to create a run" " on the Neptune server. Please try again later", - ) - logger.exception(e) - return None diff --git a/src/neptune/cli/path_option.py b/src/neptune/cli/path_option.py deleted file mode 100644 index 48ebb8d0d..000000000 --- a/src/neptune/cli/path_option.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = ["path_option"] - -from pathlib import Path -from typing import Any - -import click - -from neptune.constants import NEPTUNE_DATA_DIRECTORY - - -def get_neptune_path(ctx: Any, param: Any, path: str) -> Path: - # check if path exists and contains a '.neptune' folder - local_path = Path(path) - - if (local_path / NEPTUNE_DATA_DIRECTORY).is_dir(): - return local_path / NEPTUNE_DATA_DIRECTORY - elif local_path.name == NEPTUNE_DATA_DIRECTORY and local_path.is_dir(): - return local_path - else: - raise click.BadParameter("Path {} does not contain a '{}' folder.".format(local_path, NEPTUNE_DATA_DIRECTORY)) - - -path_option = click.option( - "--path", - type=click.Path(exists=True, file_okay=False, resolve_path=True), - default=Path.cwd(), - callback=get_neptune_path, - metavar="", - help="path to a directory containing a '.neptune' folder with stored objects", -) diff --git a/src/neptune/cli/status.py b/src/neptune/cli/status.py deleted file mode 100644 index aab2ce581..000000000 --- a/src/neptune/cli/status.py +++ /dev/null @@ -1,103 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = ["StatusRunner"] - -import sys -import textwrap -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Sequence, -) - -from neptune.cli.collect import collect_containers -from neptune.cli.containers import ( - AsyncContainer, - OfflineContainer, -) -from neptune.cli.utils import get_qualified_name -from neptune.constants import OFFLINE_NAME_PREFIX -from neptune.envs import PROJECT_ENV_NAME -from neptune.internal.backends.api_model import ApiExperiment -from neptune.internal.utils.logger import get_logger - -logger = get_logger(with_prefix=False) - -if TYPE_CHECKING: - from neptune.internal.backends.neptune_backend import NeptuneBackend - - -offline_run_explainer = """ -Runs that execute offline are not created on the server and are not assigned to projects; -instead, they are identified by UUIDs like the ones above. -When synchronizing offline runs, specify the workspace and project using the "--project" -flag. Alternatively, you can set the environment variable -{} to the target workspace/project. See the examples below. -""".format( - PROJECT_ENV_NAME -) - - -class StatusRunner: - @staticmethod - def status(*, backend: "NeptuneBackend", path: Path) -> None: - containers = collect_containers(path=path, backend=backend) - - if len(containers.not_found_containers) > 0: - logger.warning( - "\nWARNING: %s objects was skipped because they do not exist anymore.", - len(containers.not_found_containers), - ) - if not any([containers.synced_containers, containers.unsynced_containers, containers.offline_containers]): - logger.info("There are no Neptune objects in %s", path) - sys.exit(1) - - StatusRunner.log_unsync_objects(unsynced_containers=containers.unsynced_containers) - StatusRunner.log_offline_objects(offline_containers=containers.offline_containers) - - if not containers.unsynced_containers: - logger.info("\nThere are no unsynchronized objects in %s", path) - - if not containers.synced_containers: - logger.info("\nThere are no synchronized objects in %s", path) - - logger.info("\nPlease run with the `neptune sync --help` to see example commands.") - - @staticmethod - def log_offline_objects(*, offline_containers: Sequence["OfflineContainer"], info: bool = True) -> None: - if offline_containers: - logger.info("Unsynchronized offline objects:") - for container in offline_containers: - logger.info("- %s", f"{OFFLINE_NAME_PREFIX}{container.container_id}") - - if info: - logger.info("\n%s", textwrap.fill(offline_run_explainer, width=90)) - - @staticmethod - def log_unsync_objects(*, unsynced_containers: Sequence["AsyncContainer"]) -> None: - if unsynced_containers: - logger.info("Unsynchronized objects:") - for container in unsynced_containers: - experiment = container.experiment - - assert experiment is not None # mypy fix as experiment is present for async containers - - logger.info("- %s%s", get_qualified_name(experiment), trashed(experiment)) - - -def trashed(cont: ApiExperiment) -> str: - return " (Trashed)" if cont.trashed else "" diff --git a/src/neptune/cli/sync.py b/src/neptune/cli/sync.py deleted file mode 100644 index 906b44cdc..000000000 --- a/src/neptune/cli/sync.py +++ /dev/null @@ -1,155 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = ["SyncRunner"] - -from pathlib import Path -from typing import ( - TYPE_CHECKING, - List, - Optional, - Sequence, -) - -from neptune.cli.collect import collect_containers -from neptune.cli.utils import ( - get_metadata_container, - get_project, -) -from neptune.constants import OFFLINE_NAME_PREFIX -from neptune.exceptions import CannotSynchronizeOfflineRunsWithoutProject -from neptune.internal.id_formats import ( - QualifiedName, - UniqueId, -) -from neptune.internal.utils.logger import get_logger - -if TYPE_CHECKING: - from neptune.cli.containers import ( - AsyncContainer, - OfflineContainer, - ) - from neptune.internal.backends.neptune_backend import NeptuneBackend - - -logger = get_logger(with_prefix=False) - - -class SyncRunner: - @staticmethod - def sync_all_offline(*, backend: "NeptuneBackend", base_path: Path, project_name: Optional[str] = None) -> None: - containers = collect_containers(path=base_path, backend=backend) - - project = get_project(project_name_flag=QualifiedName(project_name) if project_name else None, backend=backend) - if not project: - raise CannotSynchronizeOfflineRunsWithoutProject - - for container in containers.offline_containers: - container.sync(base_path=base_path, backend=backend, project=project) - - @staticmethod - def sync_all(*, backend: "NeptuneBackend", base_path: Path, project_name: Optional[str] = None) -> None: - containers = collect_containers(path=base_path, backend=backend) - - if containers.unsynced_containers: - for async_container in containers.unsynced_containers: - async_container.sync(base_path=base_path, backend=backend, project=None) - - if containers.offline_containers: - project = get_project( - project_name_flag=QualifiedName(project_name) if project_name else None, backend=backend - ) - if not project: - raise CannotSynchronizeOfflineRunsWithoutProject - - for offline_container in containers.offline_containers: - offline_container.sync(base_path=base_path, backend=backend, project=project) - - @staticmethod - def sync_selected( - *, backend: "NeptuneBackend", base_path: Path, project_name: Optional[str] = None, object_names: Sequence[str] - ) -> None: - containers = collect_containers(path=base_path, backend=backend) - async_selected = [QualifiedName(name) for name in object_names if not name.startswith(OFFLINE_NAME_PREFIX)] - - if async_selected: - sync_selected_async( - backend=backend, - base_path=base_path, - container_names=async_selected, - containers=containers.async_containers, - ) - - offline_selected = [ - UniqueId(name[len(OFFLINE_NAME_PREFIX) :]) for name in object_names if name.startswith(OFFLINE_NAME_PREFIX) - ] - if offline_selected: - sync_selected_offline( - backend=backend, - base_path=base_path, - container_names=offline_selected, - containers=containers.offline_containers, - project_name=project_name, - ) - - -def sync_selected_async( - *, - backend: "NeptuneBackend", - base_path: Path, - container_names: List["QualifiedName"], - containers: List["AsyncContainer"], -) -> None: - async_containers_ids = set() - for container_name in container_names: - experiment = get_metadata_container( - backend=backend, - container_id=container_name, - ) - if experiment: - async_containers_ids.add(experiment.id) - else: - logger.error(f"Container {container_name} not found") - - selected_async_containers = [x for x in containers if x.container_id in async_containers_ids] - - for container in selected_async_containers: - container.sync(base_path=base_path, backend=backend, project=None) - - -def sync_selected_offline( - *, - backend: "NeptuneBackend", - base_path: Path, - container_names: List["UniqueId"], - containers: List["OfflineContainer"], - project_name: Optional[str] = None, -) -> None: - project = get_project(project_name_flag=QualifiedName(project_name) if project_name else None, backend=backend) - if not project: - raise CannotSynchronizeOfflineRunsWithoutProject - - selected_offline_containers: List["OfflineContainer"] = [] - for container_id in container_names: - found_container = next((x for x in containers if x.container_id == container_id), None) - - if found_container: - selected_offline_containers.append(found_container) - else: - logger.warning("Offline container %s not found on disk.", container_id) - - for container in selected_offline_containers: - container.sync(base_path=base_path, backend=backend, project=project) diff --git a/src/neptune/cli/utils.py b/src/neptune/cli/utils.py deleted file mode 100644 index 26128b7f6..000000000 --- a/src/neptune/cli/utils.py +++ /dev/null @@ -1,144 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = [ - "get_metadata_container", - "get_project", - "get_qualified_name", - "is_single_execution_dir_synced", - "detect_offline_dir", - "detect_async_dir", -] - -import os -import textwrap -import threading -from pathlib import Path -from typing import ( - Any, - Callable, - Dict, - Optional, - Tuple, - Union, -) - -from neptune.core.components.queue.disk_queue import DiskQueue -from neptune.envs import PROJECT_ENV_NAME -from neptune.exceptions import ( - MetadataContainerNotFound, - ProjectNotFound, -) -from neptune.internal.backends.api_model import ( - ApiExperiment, - Project, -) -from neptune.internal.backends.neptune_backend import NeptuneBackend -from neptune.internal.container_type import ContainerType -from neptune.internal.exceptions import NeptuneException -from neptune.internal.id_formats import ( - QualifiedName, - UniqueId, -) -from neptune.internal.operation import Operation -from neptune.internal.utils.logger import get_logger -from neptune.objects.structure_version import StructureVersion - -logger = get_logger(with_prefix=False) - - -def get_metadata_container( - backend: NeptuneBackend, - container_id: Union[UniqueId, QualifiedName], - container_type: Optional[ContainerType] = None, -) -> Optional[ApiExperiment]: - public_container_type = container_type or "object" - try: - return backend.get_metadata_container(container_id=container_id, expected_container_type=container_type) - except MetadataContainerNotFound: - logger.warning("Can't fetch %s %s. Skipping.", public_container_type, container_id) - except NeptuneException as e: - logger.warning("Exception while fetching %s %s. Skipping.", public_container_type, container_id) - logger.exception(e) - - return None - - -_project_name_missing_message = ( - "Project name not provided. Could not synchronize offline runs." - " To synchronize an offline run, specify the project name with the --project flag" - f" or by setting the {PROJECT_ENV_NAME} environment variable." -) - - -def _project_not_found_message(project_name: QualifiedName) -> str: - return ( - f"Project {project_name} not found. Could not synchronize offline runs." - " Please ensure you specified the correct project name with the --project flag" - f" or with the {PROJECT_ENV_NAME} environment variable, or contact Neptune for support." - ) - - -def get_project(backend: NeptuneBackend, project_name_flag: Optional[QualifiedName] = None) -> Optional[Project]: - project_name: Optional[QualifiedName] = project_name_flag - if project_name_flag is None: - project_name_from_env = os.getenv(PROJECT_ENV_NAME) - if project_name_from_env is not None: - project_name = QualifiedName(project_name_from_env) - - if not project_name: - logger.warning(textwrap.fill(_project_name_missing_message)) - return None - try: - return backend.get_project(project_name) - except ProjectNotFound: - logger.warning(textwrap.fill(_project_not_found_message(project_name))) - return None - - -def get_qualified_name(experiment: ApiExperiment) -> QualifiedName: - return QualifiedName(f"{experiment.workspace}/{experiment.project_name}/{experiment.sys_id}") - - -def is_single_execution_dir_synced(execution_path: Path) -> bool: - serializer: Callable[[Operation], Dict[str, Any]] = lambda op: op.to_dict() - - with DiskQueue(execution_path, serializer, Operation.from_dict, threading.RLock()) as disk_queue: - is_queue_empty: bool = disk_queue.is_empty() - - return is_queue_empty - - -def detect_async_dir(dir_name: str) -> Tuple[ContainerType, UniqueId, StructureVersion]: - parts = dir_name.split("__") - if len(parts) == 1: - return ContainerType.RUN, UniqueId(dir_name), StructureVersion.LEGACY - elif len(parts) == 2: - return ContainerType(parts[0]), UniqueId(parts[1]), StructureVersion.CHILD_EXECUTION_DIRECTORIES - elif len(parts) == 4 or len(parts) == 5: - return ContainerType(parts[0]), UniqueId(parts[1]), StructureVersion.DIRECT_DIRECTORY - else: - raise ValueError(f"Wrong dir format: {dir_name}") - - -def detect_offline_dir(dir_name: str) -> Tuple[ContainerType, UniqueId, StructureVersion]: - parts = dir_name.split("__") - if len(parts) == 1: - return ContainerType.RUN, UniqueId(dir_name), StructureVersion.DIRECT_DIRECTORY - elif len(parts) == 2 or len(parts) == 4: - return ContainerType(parts[0]), UniqueId(parts[1]), StructureVersion.DIRECT_DIRECTORY - else: - raise ValueError(f"Wrong dir format: {dir_name}") diff --git a/src/neptune/constants.py b/src/neptune/constants.py deleted file mode 100644 index e848efc41..000000000 --- a/src/neptune/constants.py +++ /dev/null @@ -1,41 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "ANONYMOUS_API_TOKEN", - "NEPTUNE_DATA_DIRECTORY", - "OFFLINE_DIRECTORY", - "ASYNC_DIRECTORY", - "SYNC_DIRECTORY", - "OFFLINE_NAME_PREFIX", - "MAX_32_BIT_INT", - "MIN_32_BIT_INT", -] - -"""Constants used by Neptune""" - - -ANONYMOUS_API_TOKEN = "ANONYMOUS" - -NEPTUNE_DATA_DIRECTORY = ".neptune" - -OFFLINE_DIRECTORY = "offline" -ASYNC_DIRECTORY = "async" -SYNC_DIRECTORY = "sync" - -OFFLINE_NAME_PREFIX = "offline/" - -MAX_32_BIT_INT = 2147483647 -MIN_32_BIT_INT = -2147483648 diff --git a/src/neptune/core/__init__.py b/src/neptune/core/__init__.py deleted file mode 100644 index 665b8500e..000000000 --- a/src/neptune/core/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/core/components/__init__.py b/src/neptune/core/components/__init__.py deleted file mode 100644 index 665b8500e..000000000 --- a/src/neptune/core/components/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/core/components/abstract.py b/src/neptune/core/components/abstract.py deleted file mode 100644 index 948d71907..000000000 --- a/src/neptune/core/components/abstract.py +++ /dev/null @@ -1,77 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from abc import ( - ABC, - abstractmethod, -) -from pathlib import Path -from types import TracebackType -from typing import ( - Optional, - Tuple, - Type, -) - - -class AutoCloseable(ABC): - def __enter__(self) -> AutoCloseable: - return self - - @abstractmethod - def close(self) -> None: ... - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - self.close() - - -class Resource(AutoCloseable): - @abstractmethod - def cleanup(self) -> None: ... - - def flush(self) -> None: - pass - - def close(self) -> None: - self.flush() - - @property - @abstractmethod - def data_path(self) -> Path: ... - - -class WithResources(Resource): - @property - @abstractmethod - def resources(self) -> Tuple[Resource, ...]: ... - - def flush(self) -> None: - for resource in self.resources: - resource.flush() - - def close(self) -> None: - for resource in self.resources: - resource.close() - - def cleanup(self) -> None: - for resource in self.resources: - resource.cleanup() diff --git a/src/neptune/core/components/metadata_file.py b/src/neptune/core/components/metadata_file.py deleted file mode 100644 index 41693bfc4..000000000 --- a/src/neptune/core/components/metadata_file.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["MetadataFile"] - -import os -from json import ( - JSONDecodeError, - dump, - load, -) -from pathlib import Path -from typing import ( - Any, - Dict, - Optional, -) - -from neptune.core.components.abstract import Resource - -METADATA_FILE: str = "metadata.json" - - -class MetadataFile(Resource): - def __init__(self, data_path: Path, metadata: Optional[Dict[str, Any]] = None): - self._data_path = data_path - self._metadata_path: Path = (data_path / METADATA_FILE).resolve(strict=False) - self._data: Dict[str, Any] = self._read_or_default() - - if metadata: - for key, value in metadata.items(): - self.__setitem__(key, value) - self.flush() - - @property - def data_path(self) -> Path: - return self._data_path - - def __getitem__(self, item: str) -> Any: - return self._data[item] - - def __setitem__(self, key: str, value: Any) -> None: - self._data[key] = value - - def flush(self) -> None: - with open(self._metadata_path, "w") as handler: - dump(self._data, handler, indent=2) - - def _read_or_default(self) -> Dict[str, Any]: - if self._metadata_path.exists(): - try: - with open(self._metadata_path, "r") as handler: - data: Dict[str, Any] = load(handler) - return data - except (OSError, JSONDecodeError): - pass - - return dict() - - def cleanup(self) -> None: - try: - os.remove(self._metadata_path) - except OSError: - pass diff --git a/src/neptune/core/components/operation_storage.py b/src/neptune/core/components/operation_storage.py deleted file mode 100644 index 732f281a9..000000000 --- a/src/neptune/core/components/operation_storage.py +++ /dev/null @@ -1,43 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["OperationStorage"] - -import os -import shutil -from pathlib import Path - -from neptune.core.components.abstract import Resource - -UPLOAD_PATH: str = "upload_path" - - -class OperationStorage(Resource): - def __init__(self, data_path: Path): - self._data_path = data_path - - # initialize upload directory - os.makedirs(self.upload_path, exist_ok=True) - - @property - def data_path(self) -> Path: - return self._data_path - - @property - def upload_path(self) -> Path: - return self._data_path / UPLOAD_PATH - - def cleanup(self) -> None: - shutil.rmtree(self.upload_path, ignore_errors=True) diff --git a/src/neptune/core/components/queue/__init__.py b/src/neptune/core/components/queue/__init__.py deleted file mode 100644 index 665b8500e..000000000 --- a/src/neptune/core/components/queue/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/core/components/queue/disk_queue.py b/src/neptune/core/components/queue/disk_queue.py deleted file mode 100644 index 857857cb9..000000000 --- a/src/neptune/core/components/queue/disk_queue.py +++ /dev/null @@ -1,265 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["QueueElement", "DiskQueue"] - -import json -import os -import threading -from collections import deque -from dataclasses import dataclass -from glob import glob -from pathlib import Path -from time import time -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Callable, - Deque, - Generic, - List, - Optional, - Tuple, - Type, - TypeVar, -) - -from neptune.core.components.abstract import WithResources -from neptune.core.components.queue.json_file_splitter import JsonFileSplitter -from neptune.core.components.queue.log_file import LogFile -from neptune.core.components.queue.sync_offset_file import SyncOffsetFile -from neptune.exceptions import MalformedOperation -from neptune.internal.utils.logger import get_logger - -if TYPE_CHECKING: - from neptune.core.components.abstract import Resource - - -T = TypeVar("T") -Timestamp = float - -_logger = get_logger() - - -DEFAULT_MAX_BATCH_SIZE_BYTES = 100 * 1024**2 - - -@dataclass -class QueueElement(Generic[T]): - obj: T - ver: int - size: int - at: Optional[Timestamp] = None - - -# NOTICE: This class is thread-safe as long as there is only one consumer and one producer. -class DiskQueue(WithResources, Generic[T]): - def __init__( - self, - data_path: Path, - to_dict: Callable[[T], dict], - from_dict: Callable[[dict], T], - lock: threading.RLock, - max_file_size: int = 64 * 1024**2, - max_batch_size_bytes: Optional[int] = None, - extension: str = "log", - ) -> None: - self._data_path: Path = data_path.resolve() - self._to_dict: Callable[[T], dict] = to_dict - self._from_dict: Callable[[dict], T] = from_dict - self._max_file_size: int = max_file_size - self._max_batch_size_bytes: int = max_batch_size_bytes or int( - os.environ.get("NEPTUNE_MAX_BATCH_SIZE_BYTES") or str(DEFAULT_MAX_BATCH_SIZE_BYTES) - ) - self._extension: str = extension - - self._last_ack_file = SyncOffsetFile(data_path / "last_ack_version", default=0) - self._last_put_file = SyncOffsetFile(data_path / "last_put_version", default=0) - - self._log_files: Deque[LogFile] = get_all_log_files(data_path, extension) - self._write_file_version: int = self._log_files[-1].min_version - self._writer = self._log_files[-1] - self._read_file_version: int = self._log_files[0].min_version - self._reader = JsonFileSplitter(self._log_files[0].file_path) - - self._should_skip_to_ack = True - - self._empty_cond = threading.Condition(lock) - - @property - def data_path(self) -> Path: - return self._data_path - - @property - def resources(self) -> Tuple["Resource", ...]: - return ( - self._last_put_file, - self._last_ack_file, - ) + tuple(self._log_files) - - def put(self, obj: T) -> int: - version = self._last_put_file.read_local() + 1 - serialized_obj = json.dumps(self._serialize(obj=obj, version=version, at=time())) - - self._create_new_writer_if_file_size_exceeded(len(serialized_obj), version) - - self._writer.write(serialized_obj) - self._last_put_file.write(version) - - return version - - def get(self) -> Optional[QueueElement[T]]: - if self._should_skip_to_ack: - return self._skip_and_get() - else: - return self._get() - - def _skip_and_get(self) -> Optional[QueueElement[T]]: - ack_version = self._last_ack_file.read_local() - while True: - top_element = self._get() - if top_element is None: - return None - if top_element.ver > ack_version: - self._should_skip_to_ack = False - if top_element.ver > ack_version + 1: - _logger.warning( - "Possible data loss. Last acknowledged operation version: %d, next: %d", - ack_version, - top_element.ver, - ) - return top_element - - def _get(self) -> Optional[QueueElement[T]]: - _json, size = self._reader.get_with_size() - if not _json: - if self._read_file_version >= self._write_file_version: - return None - - self._reader.close() - for log_file in self._log_files: - if log_file.min_version > self._read_file_version: - self._read_file_version = log_file.min_version - self._reader = JsonFileSplitter(log_file.file_path) - break - - # It is safe. Max recursion level is 2. - return self._get() - try: - obj, ver, at = self._deserialize(_json) - return QueueElement[T](obj, ver, size, at) - except Exception as e: - raise MalformedOperation from e - - def get_batch(self, size: int) -> List[QueueElement[T]]: - if self._should_skip_to_ack: - first = self._skip_and_get() - else: - first = self._get() - if not first: - return [] - - ret = [first] - cur_batch_size = first.size - for _ in range(0, size - 1): - if cur_batch_size >= self._max_batch_size_bytes: - break - next_obj = self._get() - if not next_obj: - break - - cur_batch_size += next_obj.size - ret.append(next_obj) - return ret - - def wait_for_empty(self, seconds: Optional[float] = None) -> bool: - with self._empty_cond: - return self._empty_cond.wait_for(self.is_empty, timeout=seconds) - - def ack(self, version: int) -> None: - self._last_ack_file.write(version) - self._clean_log_files_up_to(version) - - with self._empty_cond: - if self.is_empty(): - self._empty_cond.notify_all() - - def _create_new_writer_if_file_size_exceeded(self, size: int, version: int) -> None: - if self._writer.file_size + size > self._max_file_size: - old_writer = self._writer - self._writer = LogFile(self._data_path, version, extension=self._extension) - old_writer.flush() - old_writer.close() - self._write_file_version = version - self._log_files.append(self._writer) - - def _clean_log_files_up_to(self, version: int) -> None: - log_versions = [log.min_version for log in self._log_files] - - for current_min_version, next_min_version in zip(log_versions, log_versions[1:]): - if next_min_version <= version: - self._log_files.popleft().cleanup() - - def is_empty(self) -> bool: - return self.size() == 0 - - def size(self) -> int: - return self._last_put_file.read_local() - self._last_ack_file.read_local() - - def _serialize(self, obj: T, version: int, at: Optional[Timestamp] = None) -> dict: - return {"obj": self._to_dict(obj), "version": version, "at": at} - - def _deserialize(self, data: dict) -> Tuple[T, int, Optional[Timestamp]]: - return self._from_dict(data["obj"]), data["version"], data.get("at") - - def close(self) -> None: - self._reader.close() - super().close() - - def __enter__(self) -> "DiskQueue[T]": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - self.flush() - self.close() - if self.is_empty(): - self.cleanup() - - -def get_all_log_files(data_path: Path, extension: str) -> Deque[LogFile]: - local_data_files = glob(f"{data_path}/data-*.{extension}") - - if not local_data_files: - return deque([LogFile(data_path, 1, extension=extension)]) - - sorted_local_data_files = sorted( - local_data_files, key=lambda file_path: extract_version_from_file_name(Path(file_path), extension) - ) - - return deque( - [ - LogFile(data_path, extract_version_from_file_name(Path(file_path), extension), extension=extension) - for file_path in sorted_local_data_files - ] - ) - - -def extract_version_from_file_name(file_path: Path, extension: str) -> int: - return int(file_path.name.split("-")[-1][: -len(extension) - 1]) diff --git a/src/neptune/core/components/queue/json_file_splitter.py b/src/neptune/core/components/queue/json_file_splitter.py deleted file mode 100644 index fb6fdc322..000000000 --- a/src/neptune/core/components/queue/json_file_splitter.py +++ /dev/null @@ -1,118 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["JsonFileSplitter"] - -from collections import deque -from io import StringIO -from json import ( - JSONDecodeError, - JSONDecoder, -) -from pathlib import Path -from types import TracebackType -from typing import ( - IO, - Any, - Deque, - Optional, - Tuple, - Type, - Union, -) - - -class JsonFileSplitter: - BUFFER_SIZE = 64 * 1024 - MAX_PART_READ = 8 * 1024 - - def __init__(self, file_path: Union[str, Path]): - self._file: IO = open(file_path, "r") - self._decoder: JSONDecoder = JSONDecoder(strict=False) - self._part_buffer: StringIO = StringIO() - self._parsed_queue: Deque[Tuple[Any, int]] = deque() - self._start_pos: int = 0 - - def close(self) -> None: - if not self._file.closed: - self._file.close() - if not self._part_buffer.closed: - self._part_buffer.close() - - def get(self) -> Optional[dict]: - return (self.get_with_size() or (None, None))[0] - - def get_with_size(self) -> Tuple[Optional[dict], int]: - if self._parsed_queue: - return self._parsed_queue.popleft() - self._read_data() - if self._parsed_queue: - return self._parsed_queue.popleft() - return None, 0 - - def _read_data(self) -> None: - if self._part_buffer.tell() < self.MAX_PART_READ: - data = self._file.read(self.BUFFER_SIZE) - if not data: - return - if self._part_buffer.tell() > 0: - data = self._reset_part_buffer() + data - self._decode(data) - - if not self._parsed_queue: - data = self._file.read(self.BUFFER_SIZE) - while data: - self._part_buffer.write(data) - data = self._file.read(self.BUFFER_SIZE) - data = self._reset_part_buffer() - self._decode(data) - - def _decode(self, data: str) -> None: - start = self._json_start(data) - while start is not None: - try: - json_data, new_start = self._decoder.raw_decode(data, start) - size = new_start - start - start = new_start - except JSONDecodeError: - self._part_buffer.write(data[start:]) - break - else: - self._parsed_queue.append((json_data, size)) - start = self._json_start(data, start) - - @staticmethod - def _json_start(data: str, start: int = 0) -> Optional[int]: - try: - return data.index("{", start) - except ValueError: - return None - - def _reset_part_buffer(self) -> str: - data = self._part_buffer.getvalue() - self._part_buffer.close() - self._part_buffer = StringIO() - return data - - def __enter__(self) -> "JsonFileSplitter": - return self - - def __exit__( - self, - exc_type: Type[Optional[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - self.close() diff --git a/src/neptune/core/components/queue/log_file.py b/src/neptune/core/components/queue/log_file.py deleted file mode 100644 index e97bc0b7a..000000000 --- a/src/neptune/core/components/queue/log_file.py +++ /dev/null @@ -1,75 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from pathlib import Path - -from neptune.core.components.abstract import Resource -from neptune.internal.utils.logger import get_logger - -logger = get_logger() - - -class LogFile(Resource): - def __init__(self, data_path: Path, min_version: int, extension: str = "log") -> None: - self._data_path: Path = data_path - self._min_version: int = min_version - self._extension: str = extension - - self._file_size: int = 0 - if (data_path / f"data-{min_version}.{extension}").exists(): - self._file_size = self.file_path.stat().st_size - - self._writer = open(self.file_path, "a") - - @property - def data_path(self) -> Path: - return self._data_path - - @property - def min_version(self) -> int: - return self._min_version - - @property - def file_size(self) -> int: - return self._file_size - - @property - def file_name(self) -> str: - return f"data-{self._min_version}.{self._extension}" - - @property - def file_path(self) -> Path: - return self._data_path / self.file_name - - def write(self, data: str) -> None: - self._writer.write(data + "\n") - self._file_size += len(data) + 1 - - def cleanup(self) -> None: - self.close() - try: - self.file_path.unlink() - except FileNotFoundError: - pass - except Exception: - logger.exception("Cannot remove queue file %s", self.file_name) - - def flush(self) -> None: - if not self._writer.closed: - self._writer.flush() - - def close(self) -> None: - if not self._writer.closed: - self._writer.close() diff --git a/src/neptune/core/components/queue/sync_offset_file.py b/src/neptune/core/components/queue/sync_offset_file.py deleted file mode 100644 index 39ddbf7ac..000000000 --- a/src/neptune/core/components/queue/sync_offset_file.py +++ /dev/null @@ -1,64 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["SyncOffsetFile"] - -import os -from pathlib import Path -from typing import IO - -from neptune.core.components.abstract import Resource - - -class SyncOffsetFile(Resource): - def __init__(self, path: Path, default: int = 0): - self._path = path - mode = "r+" if path.exists() else "w+" - self._file: IO = open(self._path, mode) - self._default: int = default - self._last: int = self.read() - - @property - def data_path(self) -> Path: - return self._path.parent - - def write(self, offset: int) -> None: - self._file.seek(0) - self._file.write(str(offset)) - self._file.truncate() - self._file.flush() - self._last = offset - - def read(self) -> int: - self._file.seek(0) - content = self._file.read() - if not content: - return self._default - return int(content) - - def read_local(self) -> int: - return self._last - - def flush(self) -> None: - self._file.flush() - - def close(self) -> None: - self._file.close() - - def cleanup(self) -> None: - try: - os.remove(self._path) - except OSError: - pass diff --git a/src/neptune/envs.py b/src/neptune/envs.py deleted file mode 100644 index 785aab75e..000000000 --- a/src/neptune/envs.py +++ /dev/null @@ -1,81 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "API_TOKEN_ENV_NAME", - "CONNECTION_MODE", - "PROJECT_ENV_NAME", - "CUSTOM_RUN_ID_ENV_NAME", - "MONITORING_NAMESPACE", - "NEPTUNE_ALLOW_SELF_SIGNED_CERTIFICATE", - "NEPTUNE_NOTEBOOK_ID", - "NEPTUNE_NOTEBOOK_PATH", - "NEPTUNE_RETRIES_TIMEOUT_ENV", - "NEPTUNE_SYNC_BATCH_TIMEOUT_ENV", - "NEPTUNE_SUBPROCESS_KILL_TIMEOUT", - "NEPTUNE_FETCH_TABLE_STEP_SIZE", - "NEPTUNE_SYNC_AFTER_STOP_TIMEOUT", - "NEPTUNE_REQUEST_TIMEOUT", - "NEPTUNE_MAX_DISK_USAGE", - "NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED", - "NEPTUNE_ENABLE_DEFAULT_ASYNC_LAG_CALLBACK", - "NEPTUNE_ENABLE_DEFAULT_ASYNC_NO_PROGRESS_CALLBACK", - "NEPTUNE_USE_PROTOCOL_BUFFERS", - "NEPTUNE_ASYNC_BATCH_SIZE", - "S3_ENDPOINT_URL", -] - -from neptune.internal.envs import ( - API_TOKEN_ENV_NAME, - NEPTUNE_RETRIES_TIMEOUT_ENV, -) - -CONNECTION_MODE = "NEPTUNE_MODE" - -PROJECT_ENV_NAME = "NEPTUNE_PROJECT" - -CUSTOM_RUN_ID_ENV_NAME = "NEPTUNE_CUSTOM_RUN_ID" - -MONITORING_NAMESPACE = "NEPTUNE_MONITORING_NAMESPACE" - -NEPTUNE_ALLOW_SELF_SIGNED_CERTIFICATE = "NEPTUNE_ALLOW_SELF_SIGNED_CERTIFICATE" - -NEPTUNE_NOTEBOOK_ID = "NEPTUNE_NOTEBOOK_ID" - -NEPTUNE_NOTEBOOK_PATH = "NEPTUNE_NOTEBOOK_PATH" - -NEPTUNE_SYNC_BATCH_TIMEOUT_ENV = "NEPTUNE_SYNC_BATCH_TIMEOUT" - -NEPTUNE_SUBPROCESS_KILL_TIMEOUT = "NEPTUNE_SUBPROCESS_KILL_TIMEOUT" - -NEPTUNE_FETCH_TABLE_STEP_SIZE = "NEPTUNE_FETCH_TABLE_STEP_SIZE" - -NEPTUNE_SYNC_AFTER_STOP_TIMEOUT = "NEPTUNE_SYNC_AFTER_STOP_TIMEOUT" - -NEPTUNE_REQUEST_TIMEOUT = "NEPTUNE_REQUEST_TIMEOUT" - -NEPTUNE_ENABLE_DEFAULT_ASYNC_LAG_CALLBACK = "NEPTUNE_ENABLE_DEFAULT_ASYNC_LAG_CALLBACK" - -NEPTUNE_ENABLE_DEFAULT_ASYNC_NO_PROGRESS_CALLBACK = "NEPTUNE_ENABLE_DEFAULT_ASYNC_NO_PROGRESS_CALLBACK" - -NEPTUNE_MAX_DISK_USAGE = "NEPTUNE_MAX_DISK_USAGE" - -NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED = "NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED" - -NEPTUNE_ASYNC_BATCH_SIZE = "NEPTUNE_ASYNC_BATCH_SIZE" - -NEPTUNE_USE_PROTOCOL_BUFFERS = "NEPTUNE_USE_PROTOCOL_BUFFERS" - -S3_ENDPOINT_URL = "S3_ENDPOINT_URL" diff --git a/src/neptune/exceptions.py b/src/neptune/exceptions.py deleted file mode 100644 index f65374282..000000000 --- a/src/neptune/exceptions.py +++ /dev/null @@ -1,1179 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "InternalClientError", - "NeptuneException", - "NeptuneInvalidApiTokenException", - "NeptuneApiException", - "MetadataInconsistency", - "MissingFieldException", - "TypeDoesNotSupportAttributeException", - "MalformedOperation", - "FileNotFound", - "FileUploadError", - "FileSetUploadError", - "FileSetNotFound", - "ClientHttpError", - "MetadataContainerNotFound", - "ProjectNotFound", - "RunNotFound", - "ModelNotFound", - "ModelVersionNotFound", - "ExceptionWithProjectsWorkspacesListing", - "ContainerUUIDNotFound", - "RunUUIDNotFound", - "ProjectNotFoundWithSuggestions", - "AmbiguousProjectName", - "NeptuneMissingProjectNameException", - "InactiveContainerException", - "InactiveRunException", - "InactiveModelException", - "InactiveModelVersionException", - "InactiveProjectException", - "NeptuneMissingApiTokenException", - "CannotSynchronizeOfflineRunsWithoutProject", - "NeedExistingExperimentForReadOnlyMode", - "NeedExistingRunForReadOnlyMode", - "NeedExistingModelForReadOnlyMode", - "NeedExistingModelVersionForReadOnlyMode", - "NeptuneParametersCollision", - "NeptuneWrongInitParametersException", - "NeptuneRunResumeAndCustomIdCollision", - "NeptuneClientUpgradeRequiredError", - "NeptuneMissingRequiredInitParameter", - "CannotResolveHostname", - "NeptuneSSLVerificationError", - "NeptuneConnectionLostException", - "InternalServerError", - "Unauthorized", - "Forbidden", - "NeptuneOfflineModeException", - "NeptuneOfflineModeFetchException", - "NeptuneOfflineModeChangeStageException", - "NeptuneProtectedPathException", - "NeptuneCannotChangeStageManually", - "OperationNotSupported", - "NeptuneMissingRequirementException", - "NeptuneLimitExceedException", - "NeptuneFieldCountLimitExceedException", - "NeptuneStorageLimitException", - "FetchAttributeNotFoundException", - "ArtifactNotFoundException", - "PlotlyIncompatibilityException", - "NeptuneUnhandledArtifactSchemeException", - "NeptuneUnhandledArtifactTypeException", - "NeptuneLocalStorageAccessException", - "NeptuneRemoteStorageCredentialsException", - "NeptuneRemoteStorageAccessException", - "ArtifactUploadingError", - "NeptuneUnsupportedArtifactFunctionalityException", - "NeptuneEmptyLocationException", - "NeptuneFeatureNotAvailableException", - "NeptuneObjectCreationConflict", - "NeptuneModelKeyAlreadyExistsError", - "NeptuneSynchronizationAlreadyStoppedException", - "StreamAlreadyUsedException", - "NeptuneUserApiInputException", - "NeptuneMaxDiskUtilizationExceeded", - "NeptuneInvalidQueryException", - "NeptuneUnsupportedFunctionalityException", -] - -from typing import ( - List, - Optional, - Union, -) -from urllib.parse import urlparse - -from packaging.version import Version - -from neptune.envs import ( - CUSTOM_RUN_ID_ENV_NAME, - PROJECT_ENV_NAME, -) -from neptune.internal.backends.api_model import ( - Project, - Workspace, -) -from neptune.internal.container_type import ContainerType -from neptune.internal.envs import API_TOKEN_ENV_NAME -from neptune.internal.exceptions import ( - STYLES, - ClientHttpError, - Forbidden, - InternalClientError, - InternalServerError, - NeptuneApiException, - NeptuneConnectionLostException, - NeptuneException, - NeptuneInvalidApiTokenException, - NeptuneSSLVerificationError, - Unauthorized, -) -from neptune.internal.id_formats import QualifiedName -from neptune.internal.utils import replace_patch_version -from neptune.internal.utils.paths import path_to_str - - -class MetadataInconsistency(NeptuneException): - pass - - -class MissingFieldException(NeptuneException, AttributeError, KeyError): - """Raised when get-like action is called on `Handler`, instead of on `Attribute`.""" - - def __init__(self, field_path): - message = """ -{h1} -----MissingFieldException------------------------------------------------------- -{end} -The field "{field_path}" was not found. - -There are two possible reasons: - - There is a typo in the path. Double-check your code for typos. - - You are fetching a field that another process created, but the local representation is not synchronized. - If you are sending metadata from multiple processes at the same time, synchronize the local representation before fetching values: - {python}run.sync(){end} - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" # noqa: E501 - self._msg = message.format(field_path=field_path, **STYLES) - super().__init__(self._msg) - - def __str__(self): - # required because of overriden `__str__` in `KeyError` - return self._msg - - -class TypeDoesNotSupportAttributeException(NeptuneException, AttributeError): - def __init__(self, type_, attribute): - message = """ -{h1} -----TypeDoesNotSupportAttributeException---------------------------------------- -{end} -{type} has no attribute {attribute}. - -{correct}Need help?{end}-> https://docs.neptune.ai/help/error_type_does_not_support_attribute/ -""" - self._msg = message.format(type=type_, attribute=attribute, **STYLES) - super().__init__(self._msg) - - def __str__(self): - # required because of overriden `__str__` in `KeyError` - return self._msg - - -class MalformedOperation(NeptuneException): - pass - - -class FileNotFound(NeptuneException): - def __init__(self, file: str): - super().__init__("File not found: {}".format(file)) - - -class FileUploadError(NeptuneException): - def __init__(self, filename: str, msg: str): - super().__init__("Cannot upload file {}: {}".format(filename, msg)) - - -class FileSetUploadError(NeptuneException): - def __init__(self, globs: List[str], msg: str): - super().__init__("Cannot upload file set {}: {}".format(globs, msg)) - - -class MetadataContainerNotFound(NeptuneException): - container_id: str - container_type: ContainerType - - def __init__(self, container_id: str, container_type: Optional[ContainerType]): - self.container_id = container_id - self.container_type = container_type - container_type_str = container_type.value.capitalize() if container_type else "object" - super().__init__(f"No existing {container_type_str} was found at {container_id}.") - - @classmethod - def of_container_type(cls, container_type: Optional[ContainerType], container_id: str): - if container_type is None: - return MetadataContainerNotFound(container_id=container_id, container_type=None) - elif container_type == ContainerType.PROJECT: - return ProjectNotFound(project_id=container_id) - elif container_type == ContainerType.RUN: - return RunNotFound(run_id=container_id) - elif container_type == ContainerType.MODEL: - return ModelNotFound(model_id=container_id) - elif container_type == ContainerType.MODEL_VERSION: - return ModelVersionNotFound(model_version_id=container_id) - else: - raise InternalClientError(f"Unexpected ContainerType: {container_type}") - - -class ProjectNotFound(MetadataContainerNotFound): - def __init__(self, project_id: str): - super().__init__(container_id=project_id, container_type=ContainerType.PROJECT) - - -class RunNotFound(MetadataContainerNotFound): - def __init__(self, run_id: str): - super().__init__(container_id=run_id, container_type=ContainerType.RUN) - - -class ModelNotFound(MetadataContainerNotFound): - def __init__(self, model_id: str): - super().__init__(container_id=model_id, container_type=ContainerType.MODEL) - - -class ModelVersionNotFound(MetadataContainerNotFound): - def __init__(self, model_version_id: str): - super().__init__(container_id=model_version_id, container_type=ContainerType.MODEL_VERSION) - - -class ExceptionWithProjectsWorkspacesListing(NeptuneException): - def __init__( - self, - message: str, - available_projects: List[Project] = (), - available_workspaces: List[Workspace] = (), - **kwargs, - ): - available_projects_message = """ -Did you mean any of these? -{projects} -""" - - available_workspaces_message = """ -You can check all of your projects on the Projects page: -{workspaces_urls} -""" - - projects_formated_list = "\n".join( - map( - lambda project: f" - {project.workspace}/{project.name}", - available_projects, - ) - ) - - workspaces_formated_list = "\n".join( - map( - lambda workspace: f" - https://app.neptune.ai/{workspace.name}/-/projects", - available_workspaces, - ) - ) - - super().__init__( - message.format( - available_projects_message=( - available_projects_message.format(projects=projects_formated_list) if available_projects else "" - ), - available_workspaces_message=( - available_workspaces_message.format(workspaces_urls=workspaces_formated_list) - if available_workspaces - else "" - ), - **STYLES, - **kwargs, - ) - ) - - -class ContainerUUIDNotFound(NeptuneException): - container_id: str - container_type: ContainerType - - def __init__(self, container_id: str, container_type: ContainerType): - self.container_id = container_id - self.container_type = container_type - super().__init__( - "{} with ID {} not found. It may have been deleted. " - "You can use the 'neptune clear' command to delete junk objects from local storage.".format( - container_type.value.capitalize(), container_id - ) - ) - - -# for backward compatibility -RunUUIDNotFound = ContainerUUIDNotFound - - -class ProjectNotFoundWithSuggestions(ExceptionWithProjectsWorkspacesListing, ProjectNotFound): - def __init__( - self, - project_id: QualifiedName, - available_projects: List[Project] = (), - available_workspaces: List[Workspace] = (), - ): - message = """ -{h1} -----NeptuneProjectNotFoundException------------------------------------ -{end} -We couldn't find project {fail}"{project}"{end}. -{available_projects_message}{available_workspaces_message} -You may want to check the following docs page: - - https://docs.neptune.ai/setup/creating_project/ - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__( - message=message, - available_projects=available_projects, - available_workspaces=available_workspaces, - project=project_id, - ) - - -class AmbiguousProjectName(ExceptionWithProjectsWorkspacesListing): - def __init__(self, project_id: str, available_projects: List[Project] = ()): - message = """ -{h1} -----NeptuneProjectNameCollisionException------------------------------------ -{end} -Cannot resolve project {fail}"{project}"{end}. Name is ambiguous. -{available_projects_message} -You may also want to check the following docs pages: - - https://docs.neptune.ai/setup/creating_project/ - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message=message, available_projects=available_projects, project=project_id) - - -class NeptuneMissingProjectNameException(ExceptionWithProjectsWorkspacesListing): - def __init__( - self, - available_projects: List[Project] = (), - available_workspaces: List[Workspace] = (), - ): - message = """ -{h1} -----NeptuneMissingProjectNameException---------------------------------------- -{end} -The Neptune client couldn't find your project name. -{available_projects_message}{available_workspaces_message} -There are two options to add it: - - specify it in your code - - set an environment variable in your operating system. - -{h2}CODE{end} -Pass it to the {bold}init_run(){end} function via the {bold}project{end} argument: - {python}neptune.init_run(project='WORKSPACE_NAME/PROJECT_NAME'){end} - -{h2}ENVIRONMENT VARIABLE{end} -or export or set an environment variable depending on your operating system: - - {correct}Linux/Unix{end} - In your terminal run: - {bash}export {env_project}=WORKSPACE_NAME/PROJECT_NAME{end} - - {correct}Windows{end} - In your CMD run: - {bash}set {env_project}=WORKSPACE_NAME/PROJECT_NAME{end} - -and skip the {bold}project{end} argument of the {bold}init_run(){end} function: - {python}neptune.init_run(){end} - -You may also want to check the following docs pages: - - https://docs.neptune.ai/setup/creating_project/ - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__( - message=message, - available_projects=available_projects, - available_workspaces=available_workspaces, - env_project=PROJECT_ENV_NAME, - ) - - -class InactiveContainerException(NeptuneException): - resume_info: str - - def __init__(self, container_type: ContainerType, label: str): - message = """ -{h1} -----{cls}---------------------------------------- -{end} -It seems you are trying to log metadata to (or fetch it from) a {container_type} that was stopped ({label}). - -Here's what you can do:{resume_info} - -You may also want to check the following docs pages: - - https://docs.neptune.ai/logging/to_existing_object/ - - https://docs.neptune.ai/usage/querying_metadata/ - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__( - message.format( - cls=self.__class__.__name__, - label=label, - container_type=container_type.value, - resume_info=self.resume_info, - **STYLES, - ) - ) - - -class InactiveRunException(InactiveContainerException): - resume_info = """ - - Resume the run to continue logging to it: - https://docs.neptune.ai/logging/to_existing_object/ - - Don't invoke `stop()` on a run that you want to access. If you want to stop monitoring only, - you can resume a run in read-only mode: - https://docs.neptune.ai/api/connection_modes/#read-only-mode""" - - def __init__(self, label: str): - super().__init__(label=label, container_type=ContainerType.RUN) - - -class InactiveModelException(InactiveContainerException): - resume_info = """ - - Resume the model to continue logging to it: - https://docs.neptune.ai/api/neptune/#init_model - - Don't invoke `stop()` on a model that you want to access. If you want to stop monitoring only, - you can resume a model in read-only mode: - https://docs.neptune.ai/api/connection_modes/#read-only-mode""" - - def __init__(self, label: str): - super().__init__(label=label, container_type=ContainerType.MODEL) - - -class InactiveModelVersionException(InactiveContainerException): - resume_info = """ - - Resume the model version to continue logging to it: - https://docs.neptune.ai/api/neptune/#init_model_version - - Don't invoke `stop()` on a model version that you want to access. If you want to stop monitoring only, - you can resume a model version in read-only mode: - https://docs.neptune.ai/api/connection_modes/#read-only-mode""" - - def __init__(self, label: str): - super().__init__(label=label, container_type=ContainerType.MODEL_VERSION) - - -class InactiveProjectException(InactiveContainerException): - resume_info = """ - - Resume the connection to the project to continue logging to it: - https://docs.neptune.ai/api/neptune/#init_project - - Don't invoke `stop()` on a project that you want to access.""" - - def __init__(self, label: str): - super().__init__(label=label, container_type=ContainerType.PROJECT) - - -class NeptuneMissingApiTokenException(NeptuneException): - def __init__(self): - message = """ -{h1} -----NeptuneMissingApiTokenException------------------------------------------- -{end} -The Neptune client couldn't find your API token. - -You can get it here: - - https://app.neptune.ai/get_my_api_token - -There are two options to add it: - - specify it in your code - - set an environment variable in your operating system. - -{h2}CODE{end} -Pass the token to the {bold}init_run(){end} function via the {bold}api_token{end} argument: - {python}neptune.init_run(project='WORKSPACE_NAME/PROJECT_NAME', api_token='YOUR_API_TOKEN'){end} - -{h2}ENVIRONMENT VARIABLE{end} {correct}(Recommended option){end} -or export or set an environment variable depending on your operating system: - - {correct}Linux/Unix{end} - In your terminal run: - {bash}export {env_api_token}="YOUR_API_TOKEN"{end} - - {correct}Windows{end} - In your CMD run: - {bash}set {env_api_token}="YOUR_API_TOKEN"{end} - -and skip the {bold}api_token{end} argument of the {bold}init_run(){end} function: - {python}neptune.init_run(project='WORKSPACE_NAME/PROJECT_NAME'){end} - -You may also want to check the following docs pages: - - https://docs.neptune.ai/setup/setting_api_token/ - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(env_api_token=API_TOKEN_ENV_NAME, **STYLES)) - - -class CannotSynchronizeOfflineRunsWithoutProject(NeptuneException): - def __init__(self): - super().__init__("Cannot synchronize offline runs without a project.") - - -class NeedExistingExperimentForReadOnlyMode(NeptuneException): - container_type: ContainerType - callback_name: str - - def __init__(self, container_type: ContainerType, callback_name: str): - message = """ -{h1} -----{class_name}----------------------------------------- -{end} -Read-only mode can be used only with an existing {container_type}. - -Pass the ID of a {container_type} to the {python}with_id{end} argument of {python}{callback_name}{end} -when using {python}mode="read-only"{end}. - -You may also want to check the following docs pages: - - https://docs.neptune.ai/usage/resume_run/ - - https://docs.neptune.ai/api/connection_modes/#read-only-mode - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - self.container_type = container_type - self.callback_name = callback_name - super().__init__( - message.format( - class_name=type(self).__name__, - container_type=self.container_type.value, - callback_name=self.callback_name, - **STYLES, - ) - ) - - -class NeedExistingRunForReadOnlyMode(NeedExistingExperimentForReadOnlyMode): - def __init__(self): - super().__init__(container_type=ContainerType.RUN, callback_name="neptune.init_run") - - -class NeedExistingModelForReadOnlyMode(NeedExistingExperimentForReadOnlyMode): - def __init__(self): - super().__init__(container_type=ContainerType.MODEL, callback_name="neptune.init_model") - - -class NeedExistingModelVersionForReadOnlyMode(NeedExistingExperimentForReadOnlyMode): - def __init__(self): - super().__init__( - container_type=ContainerType.MODEL_VERSION, - callback_name="neptune.init_model_version", - ) - - -class NeptuneParametersCollision(NeptuneException): - def __init__(self, parameter1, parameter2, method_name): - self.parameter1 = parameter1 - self.parameter2 = parameter2 - self.method_name = method_name - message = """ -{h1} -----NeptuneParametersCollision----------------------------------------- -{end} -The {python}{parameter1}{end} and {python}{parameter2}{end} parameters of the {python}{method_name}(){end} method are mutually exclusive. - -You may also want to check the following docs page: - - https://docs.neptune.ai/api/universal/#initialization-methods - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" # noqa: E501 - super().__init__( - message.format( - parameter1=parameter1, - parameter2=parameter2, - method_name=method_name, - **STYLES, - ) - ) - - -class NeptuneWrongInitParametersException(NeptuneException): - pass - - -class NeptuneRunResumeAndCustomIdCollision(NeptuneWrongInitParametersException): - def __init__(self): - message = """ -{h1} -----NeptuneRunResumeAndCustomIdCollision----------------------------------------- -{end} -It's not possible to use {python}custom_run_id{end} while resuming a run. - -The {python}run{end} and {python}custom_run_id{end} parameters of the {python}init_run(){end} method are mutually exclusive. -Make sure you have no {bash}{custom_id_env}{end} environment variable set -and no value is explicitly passed to the `custom_run_id` argument when you are resuming a run. - -You may also want to check the following docs page: - - https://docs.neptune.ai/logging/to_existing_object/ - - https://docs.neptune.ai/logging/custom_run_id/ - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" # noqa: E501 - super().__init__(message.format(custom_id_env=CUSTOM_RUN_ID_ENV_NAME, **STYLES)) - - -class NeptuneClientUpgradeRequiredError(NeptuneException): - def __init__( - self, - version: Union[Version, str], - min_version: Optional[Union[Version, str]] = None, - max_version: Optional[Union[Version, str]] = None, - ): - current_version = str(version) - required_version = "==" + replace_patch_version(str(max_version)) if max_version else ">=" + str(min_version) - message = """ -{h1} -----NeptuneClientUpgradeRequiredError------------------------------------------------------------- -{end} -Your version of the Neptune client library ({current_version}) is no longer supported by the Neptune - server. The minimum required version is {required_version}. - -In order to update the Neptune client library, run the following command in your terminal: - {bash}pip install -U neptune{end} -Or if you are using Conda, run the following instead: - {bash}conda update -c conda-forge neptune{end} - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__( - message.format( - current_version=current_version, - required_version=required_version, - **STYLES, - ) - ) - - -class NeptuneMissingRequiredInitParameter(NeptuneWrongInitParametersException): - def __init__( - self, - called_function: str, - parameter_name: str, - ): - message = """ -{h1} -----NeptuneMissingRequiredInitParameter--------------------------------------- -{end} -{python}neptune.{called_function}(){end} invocation was missing {python}{parameter_name}{end}. -If you want to create a new object using {python}{called_function}{end}, {python}{parameter_name}{end} is required: -https://docs.neptune.ai/api/neptune#{called_function} - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__( - message.format( - called_function=called_function, - parameter_name=parameter_name, - **STYLES, - ) - ) - - -class CannotResolveHostname(NeptuneException): - def __init__(self, host): - message = """ -{h1} -----CannotResolveHostname----------------------------------------------------------------------- -{end} -The Neptune client library was not able to resolve hostname {underline}{host}{end}. - -What should I do? - - Check if your computer is connected to the internet. - - Check if your computer is supposed to be using a proxy to access the internet. - If so, you may want to use the {python}proxies{end} parameter of the {python}init_run(){end} function. - See https://docs.neptune.ai/api/universal/#proxies - and https://requests.readthedocs.io/en/latest/user/advanced/#proxies - - Check the status of Neptune services: https://status.neptune.ai/ - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(host=host, **STYLES)) - - -class NeptuneOfflineModeException(NeptuneException): - pass - - -class NeptuneOfflineModeFetchException(NeptuneOfflineModeException): - def __init__(self): - message = """ -{h1} -----NeptuneOfflineModeFetchException--------------------------------------------------- -{end} -It seems you are trying to fetch data from the server while working in offline mode. -You need to work in a non-offline connection mode to fetch data from the server. - -You can set the connection mode when creating a new run: - {python}run = neptune.init_run(mode="async"){end} - -You may also want to check the following docs page: - - https://docs.neptune.ai/api/connection_modes - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(**STYLES)) - - -class NeptuneOfflineModeChangeStageException(NeptuneOfflineModeException): - def __init__(self): - message = """ -{h1} -----NeptuneOfflineModeChangeStageException--------------------------------------- -{end} -You cannot change the stage of the model version while in offline mode. -""" - super().__init__(message.format(**STYLES)) - - -class NeptuneProtectedPathException(NeptuneException): - extra_info = "" - - def __init__(self, path: str): - message = """ -{h1} -----NeptuneProtectedPathException---------------------------------------------- -{end} -Field {path} cannot be changed directly. -{extra_info} - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - self._path = path - super().__init__( - message.format( - path=path, - extra_info=self.extra_info.format(**STYLES), - **STYLES, - ) - ) - - -class NeptuneCannotChangeStageManually(NeptuneProtectedPathException): - extra_info = """ -If you want to change the stage of the model version, -use the {python}.change_stage(){end} method: - {python}model_version.change_stage("staging"){end}""" - - -class OperationNotSupported(NeptuneException): - def __init__(self, message: str): - super().__init__(f"Operation not supported: {message}") - - -class NeptuneMissingRequirementException(NeptuneException): - def __init__(self, package_name: str, framework_name: Optional[str]): - message = """ - {h1} - ----NeptuneMissingRequirementException----------------------------------------- - {end} - Looks like the {package_name} package isn't installed. - To install it, run: - {bash}pip install {package_name}{end} - Or install both Neptune and the integration: - {bash}pip install "neptune[{framework_name}]"{end} - - For detailed instructions, check the integration guides: - - https://docs.neptune.ai/integrations - - {correct}Need help?{end}-> https://docs.neptune.ai/getting_help - """ - framework_name = framework_name if framework_name else package_name - super().__init__( - message.format( - package_name=package_name, - framework_name=framework_name, - **STYLES, - ) - ) - - -class NeptuneLimitExceedException(NeptuneException): - def __init__(self, reason: str): - message = """ -{h1} -----NeptuneLimitExceedException--------------------------------------------------------------------------------------- -{end} -{reason} - -Some limit related to your workspace or project was exceeded. You can manage your plan and check usage in the workspace -settings: https://app.neptune.ai/-/subscription -What happens to my data? - If you're using Neptune in asynchronous mode (default), the data that couldn't be uploaded is safely stored on the - disk. You can still fetch and delete data from your projects. -How do I upload my offline metadata to Neptune? - Once your workspace or project is available, you can upload the data with the following command: - {bash}neptune sync{end} -Learn more in the docs: - - https://docs.neptune.ai/help/error_limit_exceeded/ - - https://docs.neptune.ai/help/workspace_or_project_read_only/ -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(**STYLES, reason=reason)) - - -class NeptuneFieldCountLimitExceedException(NeptuneException): - def __init__(self, limit: int, container_type: str, identifier: str): - message = """ -{h1} -----NeptuneFieldCountLimitExceedException--------------------------------------------------------------------------------------- -{end} -There are too many fields (more than {limit}) in the {identifier} {container_type}. -We have stopped the synchronization to the Neptune server and stored the data locally. - -To continue uploading the metadata: - - 1. Delete some excess fields from {identifier}. - - You can delete fields or namespaces with the "del" command. - For example, to delete the "training/checkpoints" namespace: - - {python}del run["training/checkpoints"]{end} - - 2. Once you're done, synchronize the data manually with the following command: - - {bash}neptune sync -p project_name{end} - -For more details, see https://docs.neptune.ai/usage/best_practices -""" # noqa: E501 - super().__init__( - message.format( - **STYLES, - limit=limit, - container_type=container_type, - identifier=identifier, - ) - ) - - -class NeptuneStorageLimitException(NeptuneException): - def __init__(self): - message = """ -{h1} -----NeptuneStorageLimitException--------------------------------------------------------------------------------------- -{end} -You exceeded the storage limit of the workspace. It's not possible to upload new data, but you can still fetch and delete data. -If you are using asynchronous (default) connection mode, Neptune automatically switched to offline mode -and your data is being stored safely on the disk. You can upload it later using the Neptune Command Line Interface tool: - {bash}neptune sync -p project_name{end} -What should I do? - - Go to your projects and remove runs or model metadata you don't need - - ... or update your subscription plan here: https://app.neptune.ai/-/subscription -You may also want to check the following docs page: - - https://docs.neptune.ai/api/connection_modes -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" # noqa: E501 - super().__init__(message.format(**STYLES)) - - -class FetchAttributeNotFoundException(MetadataInconsistency): - def __init__(self, attribute_path: str): - message = """ -{h1} -----MetadataInconsistency---------------------------------------------------------------------- -{end} -The field {python}{attribute_path}{end} was not found. - -Remember that in the asynchronous (default) connection mode, data is synchronized -with the Neptune servers in the background. The data may have not reached -the servers before it was fetched. Before fetching the data, you can force -wait for all the requests sent by invoking: - - {python}run.wait(){end} - -Remember that each use of {python}wait{end} introduces a delay in code execution. - -You may also want to check the following docs page: - - https://docs.neptune.ai/api/connection_modes - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help.html -""" - super().__init__(message.format(attribute_path=attribute_path, **STYLES)) - - -class ArtifactNotFoundException(MetadataInconsistency): - def __init__(self, artifact_hash: str): - message = """ -{h1} -----MetadataInconsistency---------------------------------------------------------------------- -{end} -Artifact with hash {python}{artifact_hash}{end} was not found. - -Remember that in the asynchronous (default) connection mode, data is synchronized -with the Neptune servers in the background. The data may have not reached -the servers before it was fetched. Before fetching the data, you can force -wait for all the requests sent by invoking: - - {python}run.wait(){end} - -Remember that each use of {python}wait{end} introduces a delay in code execution. - -You may also want to check the following docs page: - - https://docs.neptune.ai/api/connection_modes - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help.html -""" - super().__init__(message.format(artifact_hash=artifact_hash, **STYLES)) - - -class PlotlyIncompatibilityException(Exception): - def __init__(self, matplotlib_version, plotly_version, details): - super().__init__( - "Unable to convert plotly figure to matplotlib format. " - "Your matplotlib ({}) and plotlib ({}) versions are not compatible. " - "{}".format(matplotlib_version, plotly_version, details) - ) - - -class NeptuneUnhandledArtifactSchemeException(NeptuneException): - def __init__(self, path: str): - scheme = urlparse(path).scheme - message = """ -{h1} -----NeptuneUnhandledArtifactProtocolException------------------------------------ -{end} -You have used a Neptune Artifact to track a file with a scheme unhandled by this client ({scheme}). -Problematic path: {path} - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(scheme=scheme, path=path, **STYLES)) - - -class NeptuneUnhandledArtifactTypeException(NeptuneException): - def __init__(self, type_str: str): - message = """ -{h1} -----NeptuneUnhandledArtifactTypeException---------------------------------------- -{end} -A Neptune Artifact you're listing is tracking a file type unhandled by this client ({type_str}). - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(type_str=type_str, **STYLES)) - - -class NeptuneLocalStorageAccessException(NeptuneException): - def __init__(self, path, expected_description): - message = """ -{h1} -----NeptuneLocalStorageAccessException------------------------------------- -{end} -Neptune had a problem processing "{path}". It expects it to be {expected_description}. - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(path=path, expected_description=expected_description, **STYLES)) - - -class NeptuneRemoteStorageCredentialsException(NeptuneException): - def __init__(self): - message = """ -{h1} -----NeptuneRemoteStorageCredentialsException------------------------------------- -{end} -Neptune could not find suitable credentials for remote storage of a Neptune Artifact you're listing. - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(**STYLES)) - - -class NeptuneRemoteStorageAccessException(NeptuneException): - def __init__(self, location: str): - message = """ -{h1} -----NeptuneRemoteStorageAccessException------------------------------------------ -{end} -Neptune could not access an object ({location}) from remote storage of a Neptune Artifact you're listing. - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(location=location, **STYLES)) - - -class ArtifactUploadingError(NeptuneException): - def __init__(self, msg: str): - super().__init__("Cannot upload artifact: {}".format(msg)) - - -class NeptuneUnsupportedArtifactFunctionalityException(NeptuneException): - def __init__(self, functionality_info: str): - message = """ -{h1} -----NeptuneUnsupportedArtifactFunctionality------------------------------------- -{end} -It seems you are using Neptune Artifacts functionality that is currently not supported. - -{functionality_info} - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(functionality_info=functionality_info, **STYLES)) - - -class NeptuneEmptyLocationException(NeptuneException): - def __init__(self, location: str, namespace: str): - message = """ -{h1} -----NeptuneEmptyLocationException---------------------------------------------- -{end} -Neptune could not find files in the requested location ({location}) during the creation of an Artifact in "{namespace}". - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(location=location, namespace=namespace, **STYLES)) - - -class NeptuneFeatureNotAvailableException(NeptuneException): - def __init__(self, missing_feature): - message = """ -{h1} -----NeptuneFeatureNotAvailableException---------------------------------------------- -{end} -The following feature is not yet supported by the Neptune instance you are using: -{missing_feature} - -An update of the Neptune instance is required in order to use it. Please contact your local Neptune administrator -or Neptune support directly (support@neptune.ai) about the upcoming updates. - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - self.message = message.format(missing_feature=missing_feature, **STYLES) - super().__init__(message) - - -class NeptuneObjectCreationConflict(NeptuneException): - pass - - -class NeptuneModelKeyAlreadyExistsError(NeptuneObjectCreationConflict): - def __init__(self, model_key, models_tab_url): - message = """ -{h1} -----NeptuneModelKeyAlreadyExistsError--------------------------------------------------- -{end} -A model with the provided key ({model_key}) already exists in this project. - -You can check all of your models in the Models section of the project: -{models_tab_url} - -Note: If there is a model with the same key in the trash, you need to delete the model from the trash to make the key -available again. - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(model_key=model_key, models_tab_url=models_tab_url, **STYLES)) - - -class NeptuneSynchronizationAlreadyStoppedException(NeptuneException): - def __init__(self): - message = """ -{h1} -----NeptuneSynchronizationAlreadyStopped--------------------------------------------------- -{end} -The synchronization thread had stopped before Neptune could finish uploading the logged metadata. -Your data is stored locally, but you'll need to finish the synchronization manually. -To synchronize with the Neptune servers, enter the following on your command line: - - {bash}neptune sync{end} - -For details, see https://docs.neptune.ai/api/neptune_sync/ - -If the synchronization fails, you may want to check your connection and ensure that you're -within limits by going to your Neptune project settings -> Usage. -If the issue persists, our support is happy to help. - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(**STYLES)) - - -class StreamAlreadyUsedException(NeptuneException): - def __init__(self): - message = """ -{h1} -----StreamAlreadyUsedException--------------------------------------------------- -{end} -A File object created with File.from_stream() has already been logged. -You can only log content from the same stream once. - -For more, see https://docs.neptune.ai/api/field_types/#from_stream - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(**STYLES)) - - -class NeptuneUserApiInputException(NeptuneException): - def __init__(self, message): - super().__init__(message) - - -class FileSetNotFound(NeptuneException): - def __init__(self, attribute: str, path: str): - message = """ - {h1} - ----MetadataInconsistency---------------------------------------------------------------------- - {end} - Attribute {python}{path}{end} was not found. - - Remember that in the asynchronous (default) connection mode, data is synchronized - with the Neptune servers in the background. The data may have not reached - the servers before it was fetched. Before fetching the data, you can force - wait for all the requests sent by invoking: - - {python}run.wait(){end} - - Remember that each use of {python}wait{end} introduces a delay in code execution. - - You may also want to check the following docs page: - - https://docs.neptune.ai/api/connection_modes - - {correct}Need help?{end}-> https://docs.neptune.ai/getting_help/ - """ - super().__init__(message.format(path=path_to_str([attribute, path]), **STYLES)) - - -class NeptuneMaxDiskUtilizationExceeded(NeptuneException): - def __init__(self, disk_utilization: float, utilization_limit: float): - message = """ -{h1} -----NeptuneMaxDiskUtilizationExceeded---------------------------------------------- -{end} -Current disk utilization ({disk_utilization}%) exceeds the limit ({utilization_limit}%). - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help - """ - super().__init__( - message.format(disk_utilization=disk_utilization, utilization_limit=utilization_limit, **STYLES) - ) - - -class NeptuneInvalidQueryException(NeptuneException): - def __init__(self, nql_query: str): - message = f""" -The provided NQL query is invalid: {nql_query}. -For syntax help, see https://docs.neptune.ai/usage/nql/ -""" - super().__init__(message) - - -class NeptuneUnsupportedFunctionalityException(NeptuneException): - def __init__(self): - message = """ -{h1} -----NeptuneUnsupportedFunctionalityException---------------------------- -{end} -You're using neptune 2.0, which is in Beta. -Some functionality that you tried to use is not supported in the installed version. -We will gradually add missing features to the Beta. Check that you're on the latest version. -""" - super().__init__(message) diff --git a/src/neptune/handler.py b/src/neptune/handler.py deleted file mode 100644 index f42bde74f..000000000 --- a/src/neptune/handler.py +++ /dev/null @@ -1,801 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Handler"] - -from functools import wraps -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Collection, - Dict, - Iterable, - Iterator, - List, - Optional, - Union, -) - -from neptune.api.models import FileEntry -from neptune.attributes import File -from neptune.attributes.atoms.artifact import Artifact -from neptune.attributes.constants import SYSTEM_STAGE_ATTRIBUTE_PATH -from neptune.attributes.file_set import FileSet -from neptune.attributes.namespace import Namespace -from neptune.attributes.series import FileSeries -from neptune.attributes.series.float_series import FloatSeries -from neptune.attributes.series.string_series import StringSeries -from neptune.attributes.sets.string_set import StringSet -from neptune.exceptions import ( - MissingFieldException, - NeptuneCannotChangeStageManually, - NeptuneUnsupportedFunctionalityException, - NeptuneUserApiInputException, -) -from neptune.internal.artifacts.types import ArtifactFileData -from neptune.internal.types.stringify_value import StringifyValue -from neptune.internal.utils import ( - is_collection, - is_dict_like, - is_float, - is_float_like, - is_string, - is_stringify_value, - verify_collection_type, - verify_type, -) -from neptune.internal.utils.paths import ( - join_paths, - parse_path, -) -from neptune.internal.value_to_attribute_visitor import ValueToAttributeVisitor -from neptune.internal.warnings import warn_about_unsupported_type -from neptune.objects.abstract import SupportsNamespaces -from neptune.types.atoms.file import File as FileVal -from neptune.types.type_casting import cast_value_for_extend -from neptune.types.value_copy import ValueCopy -from neptune.typing import ProgressBarType -from neptune.utils import stringify_unsupported - -if TYPE_CHECKING: - from neptune.objects import NeptuneObject - - -def feature_temporarily_unavailable(_: Callable[..., Any]) -> Callable[..., Any]: - def wrapper(*_, **__): - raise NeptuneUnsupportedFunctionalityException() - - return wrapper - - -def validate_path_not_protected(target_path: str, handler: "Handler"): - path_protection_exception = handler._PROTECTED_PATHS.get(target_path) - if path_protection_exception: - raise path_protection_exception(target_path) - - -def check_protected_paths(fun): - @wraps(fun) - def inner_fun(self: "Handler", *args, **kwargs): - validate_path_not_protected(self._path, self) - return fun(self, *args, **kwargs) - - return inner_fun - - -ExtendDictT = Union[Collection[Any], Dict[str, "ExtendDictT"]] - - -class Handler(SupportsNamespaces): - # paths which can't be modified by client directly - _PROTECTED_PATHS = { - SYSTEM_STAGE_ATTRIBUTE_PATH: NeptuneCannotChangeStageManually, - } - - def __init__(self, container: "NeptuneObject", path: str): - super().__init__() - self._container = container - self._path = str(path) - - def __repr__(self): - attr = self._container.get_attribute(self._path) - formal_type = type(attr).__name__ if attr else "Unassigned" - return f'<{formal_type} field at "{self._path}">' - - def _ipython_key_completions_(self): - return self._container._get_subpath_suggestions(path_prefix=self._path) - - def __getitem__(self, path: str) -> "Handler": - return Handler(self._container, join_paths(self._path, path)) - - def __setitem__(self, key: str, value) -> None: - self[key].assign(value) - - def __getattr__(self, item: str): - run_level_methods = {"exists", "get_structure", "print_structure", "stop", "sync", "wait"} - - if item in run_level_methods: - raise AttributeError( - "You're invoking an object-level method on a handler for a namespace" "inside the object.", - f""" - For example: You're trying run[{self._path}].{item}() - but you probably want run.{item}(). - - To obtain the root object of the namespace handler, you can do the following: - root_run = run[{self._path}].get_root_object() - root_run.{item}() - """, - ) - - return object.__getattribute__(self, item) - - def _get_attribute(self): - """Returns an attribute defined in `self._path` or throws MissingFieldException.""" - attr = self._container.get_attribute(self._path) - if attr is None: - raise MissingFieldException(self._path) - return attr - - @property - def container(self) -> "NeptuneObject": - """Returns the container that the attribute is attached to.""" - return self._container - - def get_root_object(self) -> "NeptuneObject": - """Returns the root-level object of a namespace handler. - - Example: - If you use it on the namespace of a run, the run object is returned. - - >>> pretraining = run["workflow/steps/pretraining"] - >>> pretraining.stop() - ... # Error: pretraining is a namespace handler object, not a run object - >>> pretraining_run = pretraining.get_root_object() - >>> pretraining_run.stop() # The root run is stopped - - For more information, see the docs: - https://docs.neptune.ai/api/field_types/#get_root_object - """ - return self._container - - @check_protected_paths - def assign(self, value, *, wait: bool = False) -> None: - """Assigns the provided value to the field. - - Available for the following field types: - * `Integer` - * `Float` - * `Boolean` - * `String` - - Args: - value: Value to be stored in a field. - wait (bool, optional): If `True` the client will wait to send all tracked metadata to the server. - This makes the call synchronous. - Defaults to `None`. - - Examples: - Assigning values: - - >>> import neptune - >>> run = neptune.init_run() - - >>> # You can use both the Python assign operator (=) - ... run['parameters/max_epochs'] = 5 - >>> # as well as directly use the .assign method - ... run['parameters/max_epochs'].assign(5) - - You can assign integers, floats, bools, strings - - >>> run['parameters/max_epochs'] = 5 - >>> run['parameters/max_lr'] = 0.4 - >>> run['parameters/early_stopping'] = True - >>> run['JIRA'] = 'NPT-952' - - You can also assign values in batch through a dict - - >>> params = {'max_epochs': 5, 'lr': 0.4} - >>> run['parameters'] = params - - For more information, see the docs: - https://docs.neptune.ai/api-reference/field-types - """ - with self._container.lock(): - attr = self._container.get_attribute(self._path) - if attr is None: - self._container.define(self._path, value) - else: - if isinstance(value, Handler): - value = ValueCopy(value) - attr.process_assignment(value, wait=wait) - - @check_protected_paths - def upload(self, value, *, wait: bool = False) -> None: - """Uploads the provided file under the specified field path. - - Args: - value (str or File): Path to the file to be uploaded or `File` value object. - wait (bool, optional): If `True` the client will wait to send all tracked metadata to the server. - This makes the call synchronous. - Defaults to `False`. - - Examples: - >>> import neptune - >>> run = neptune.init_run() - - >>> # Upload example data - ... run["dataset/data_sample"].upload("sample_data.csv") - - >>> # Both the content and the extension is stored - ... # When downloaded the filename is a combination of path and the extension - ... run["dataset/data_sample"].download() # data_sample.csv - - Explicitly create File value object - - >>> from neptune.types import File - >>> run["dataset/data_sample"].upload(File("sample_data.csv")) - - For more information, see the docs: - https://docs.neptune.ai/api/field_types#upload - - """ - raise NeptuneUnsupportedFunctionalityException - value = FileVal.create_from(value) - - with self._container.lock(): - attr = self._container.get_attribute(self._path) - if attr is None: - attr = File(self._container, parse_path(self._path)) - self._container.set_attribute(self._path, attr) - attr.upload(value, wait=wait) - - @check_protected_paths - def upload_files(self, value: Union[str, Iterable[str]], *, wait: bool = False) -> None: - raise NeptuneUnsupportedFunctionalityException - if is_collection(value): - verify_collection_type("value", value, str) - else: - verify_type("value", value, str) - - with self._container.lock(): - attr = self._container.get_attribute(self._path) - if attr is None: - attr = FileSet(self._container, parse_path(self._path)) - self._container.set_attribute(self._path, attr) - attr.upload_files(value, wait=wait) - - @check_protected_paths - def log( - self, - value, - *, - step: Optional[float] = None, - timestamp: Optional[float] = None, - wait: bool = False, - **kwargs, - ) -> None: - """Logs the provided value or a collection of values. - - Available for the following field types: - - * `FloatSeries` - * `StringSeries` - * `FileSeries` - - - Args: - value: Value or collection of values to be added to the field. - step (float or int, optional, default is None): Index of the log entry being appended. - Must be strictly increasing. - Defaults to `None`. - timestamp(float or int, optional): Time index of the log entry being appended in form of Unix time. - If `None` current time (`time.time()`) will be used as a timestamp. - Defaults to `None`. - wait (bool, optional): If `True` the client will wait to send all tracked metadata to the server. - This makes the call synchronous. - Defaults to `False`. - - For more information, see the docs: - https://docs.neptune.ai/api-reference/field-types - - """ - verify_type("step", step, (int, float, type(None))) - verify_type("timestamp", timestamp, (int, float, type(None))) - - with self._container.lock(): - attr = self._container.get_attribute(self._path) - if attr is None: - from_stringify_value = False - if is_stringify_value(value): - from_stringify_value, value = True, value.value - - if is_collection(value): - if value: - first_value = next(iter(value)) - else: - raise ValueError("Cannot deduce value type: `value` cannot be empty") - else: - first_value = value - - if is_float(first_value): - attr = FloatSeries(self._container, parse_path(self._path)) - elif is_string(first_value): - attr = StringSeries(self._container, parse_path(self._path)) - elif FileVal.is_convertable(first_value): - raise NeptuneUnsupportedFunctionalityException - attr = FileSeries(self._container, parse_path(self._path)) - elif is_float_like(first_value): - attr = FloatSeries(self._container, parse_path(self._path)) - elif from_stringify_value: - if is_collection(value): - value = list(map(str, value)) - else: - value = str(value) - attr = StringSeries(self._container, parse_path(self._path)) - else: - warn_about_unsupported_type(type_str=str(type(first_value))) - return None - - self._container.set_attribute(self._path, attr) - attr.log(value, step=step, timestamp=timestamp, wait=wait, **kwargs) - - @check_protected_paths - def append( - self, - value: Union[dict, Any], - *, - step: Optional[float] = None, - timestamp: Optional[float] = None, - wait: bool = False, - **kwargs, - ) -> None: - """Logs a series of values, such as a metric, by appending the provided value to the end of the series. - - Available for following series field types: - - * `FloatSeries` - series of float values - * `StringSeries` - series of strings - * `FileSeries` - series of files - - When you log the first value, the type of the value determines what type of field is created. - To learn more about field types, see the docs: https://docs.neptune.ai/api/field_types - - Args: - value: Value to be added to the series field. - step: Optional index of the entry being appended. Must be strictly increasing. - timestamp: Optional time index of the log entry being appended, in Unix time format. - If None, the current time (obtained with `time.time()`) is used. - wait: If True, the client sends all tracked metadata to the server before executing the call. - For more information, see: https://docs.neptune.ai/api/universal/#wait - - Examples: - >>> import neptune - >>> run = neptune.init_run() - >>> for epoch in range(n_epochs): - ... ... # Your training loop - ... run["train/epoch/loss"].append(loss) # FloatSeries - ... token = str(...) - ... run["train/tokens"].append(token) # StringSeries - ... run["train/distribution"].append(plt_histogram, step=epoch) # FileSeries - """ - verify_type("step", step, (int, float, type(None))) - verify_type("timestamp", timestamp, (int, float, type(None))) - if step is not None: - step = [step] - if timestamp is not None: - timestamp = [timestamp] - - value = ExtendUtils.transform_to_extend_format(value) - self.extend(value, steps=step, timestamps=timestamp, wait=wait, **kwargs) - - @check_protected_paths - def extend( - self, - values: ExtendDictT, - *, - steps: Optional[Collection[float]] = None, - timestamps: Optional[Collection[float]] = None, - wait: bool = False, - **kwargs, - ) -> None: - """Logs a series of values by appending the provided collection of values to the end of the series. - - Available for the following series field types: - - * `FloatSeries` - series of float values - * `StringSeries` - series of strings - * `FileSeries` - series of files - - When you log the first value, the type of the value determines what type of field is created. - To learn more about field types, see the docs: https://docs.neptune.ai/api/field_types - - Args: - values: Values to be added to the series field, as a dictionary or collection. - steps: Optional collection of indeces for the entries being appended. Must be strictly increasing. - timestamps: Optional collection of time indeces for the entries being appended, in Unix time format. - If None, the current time (obtained with `time.time()`) is used. - wait: If True, the client sends all tracked metadata to the server before executing the call. - For details, see https://docs.neptune.ai/api/universal/#wait - - Example: - The following example reads a CSV file into a pandas DataFrame and extracts the values - to create a Neptune series: - >>> import neptune - >>> run = neptune.init_run() - >>> for epoch in range(n_epochs): - ... df = pandas.read_csv("time_series.csv") - ... ys = df["value"] - ... ts = df["timestamp"] - ... run["data/example_series"].extend(ys, timestamps=ts) - """ - ExtendUtils.validate_values_for_extend(values, steps, timestamps) - - with self._container.lock(): - attr = self._container.get_attribute(self._path) - if attr is None: - neptune_value = cast_value_for_extend(values) - if neptune_value is None: - warn_about_unsupported_type(type_str=str(type(values))) - return None - - attr = ValueToAttributeVisitor(self._container, parse_path(self._path)).visit(neptune_value) - self._container.set_attribute(self._path, attr) - - attr.extend(values, steps=steps, timestamps=timestamps, wait=wait, **kwargs) - - @check_protected_paths - def add(self, values: Union[str, Iterable[str]], *, wait: bool = False) -> None: - """Adds the provided tags to the run. - - Args: - values (str or collection of str): Tag or tags to be added. - .. note:: - You can use emojis in your tags. For example, "Exploration 🧪" - wait (bool, optional): If `True`, the client will wait to send all tracked metadata to the server first. - This makes the call synchronous. - Defaults to `False`. - - For more information, see the docs: - https://docs.neptune.ai/api/field_types#add - """ - verify_type("values", values, (str, Iterable)) - with self._container.lock(): - attr = self._container.get_attribute(self._path) - if attr is None: - attr = StringSet(self._container, parse_path(self._path)) - self._container.set_attribute(self._path, attr) - attr.add(values, wait=wait) - - @check_protected_paths - def remove(self, values: Union[str, Iterable[str]], *, wait: bool = False) -> None: - """Removes the provided tags from the set. - - Args: - values (str or collection of str): Tags to be removed. - wait (bool, optional): If `True`, the client will wait to send all tracked metadata to the server first. - This makes the call synchronous. - Defaults to `False`. - - For more information, see the docs: - https://docs.neptune.ai/api/field_types#remove - """ - return self._pass_call_to_attr(function_name="remove", values=values, wait=wait) - - @check_protected_paths - def clear(self, *, wait: bool = False): - """Removes all tags from the `StringSet`. - - Args: - wait (bool, optional): If `True`, the client will wait to send all tracked metadata to the server first. - This makes the call synchronous. - Defaults to `False`. - - For more information, see the docs: - https://docs.neptune.ai/api/field_types#clear - """ - return self._pass_call_to_attr(function_name="clear", wait=wait) - - def fetch(self): - """Fetches fields value or, in case of a namespace, fetches values of all non-File Atom fields as a dictionary. - - Available for the following field types: - - * `Integer` - * `Float` - * `Boolean` - * `String` - * `DateTime` - * `StringSet` - * `Namespace handler` - - Returns: - If called on a field, returns the stored value. - If called on a namespace, returns a dictionary containing the values of all non-Atom fields. - - For more information on field types, see the docs: - https://docs.neptune.ai/api-reference/field-types - """ - return self._pass_call_to_attr(function_name="fetch") - - def fetch_last(self): - """Fetches the last value stored in the series from Neptune. - - Available for the following field types: - - * `FloatSeries` - * `StringSeries` - - Returns: - Fetches the last value stored in the series from Neptune. - - For more information on field types, see the docs: - https://docs.neptune.ai/api-reference/field-types - """ - return self._pass_call_to_attr(function_name="fetch_last") - - def fetch_values(self, *, include_timestamp: Optional[bool] = True, progress_bar: Optional[ProgressBarType] = None): - """Fetches all values stored in the series from Neptune. - - Available for the following field types: - - * `FloatSeries` - * `StringSeries` - - Args: - include_timestamp (bool, optional): Whether the fetched data should include the timestamp field. - Defaults to `True`. - progress_bar: (bool or Type of progress bar, optional): progress bar to be used while fetching values. - If `None` or `True` the default tqdm-based progress bar will be used. - If `False` no progress bar will be used. - If a type of progress bar is passed, it will be used instead of the default one. - Defaults to `None`. - - Returns: - ``Pandas.DataFrame``: containing all the values and their indexes stored in the series field. - - For more information on field types, see the docs: - https://docs.neptune.ai/api-reference/field-types - """ - return self._pass_call_to_attr( - function_name="fetch_values", - include_timestamp=include_timestamp, - progress_bar=progress_bar, - ) - - @check_protected_paths - def delete_files(self, paths: Union[str, Iterable[str]], *, wait: bool = False) -> None: - """Deletes the files specified by the paths from the `FileSet` stored on the Neptune servers. - - Args: - paths (str or collection of str): `Path` or paths to files or folders to be deleted. - Note that the paths are relative to the FileSet itself. For example, if the `FileSet` contains - the files `example.txt`, `varia/notes.txt`, `varia/data.csv`, to delete the entire varia subfolder, - you would pass varia as the argument. - wait (bool, optional): If `True`, the client will wait to send all tracked metadata to the server. - This makes the call synchronous. - Defaults to `None`. - - For more information, see the docs: - https://docs.neptune.ai/api/field_types#delete_files - """ - raise NeptuneUnsupportedFunctionalityException - return self._pass_call_to_attr(function_name="delete_files", paths=paths, wait=wait) - - @check_protected_paths - def download( - self, - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, - ) -> None: - """Downloads the stored files to the working directory or to the specified destination. - - Available for the following field types: - - * `File` - * `FileSeries` - * `FileSet` - * `Artifact` - - Args: - destination (str, optional): Path to where the file(s) should be downloaded. - If `None` file will be downloaded to the working directory. - If `destination` is a directory, the file will be downloaded to the specified directory with a filename - composed from field name and extension (if present). - If `destination` is a path to a file, the file will be downloaded under the specified name. - Defaults to `None`. - progress_bar: (bool or Type of progress bar, optional): progress bar to be used while downloading assets. - If `None` or `True` the default tqdm-based progress bar will be used. - If `False` no progress bar will be used. - If a type of progress bar is passed, it will be used instead of the default one. - Defaults to `None`. - - For more information, see the docs: - https://docs.neptune.ai/api-reference/field-types - """ - raise NeptuneUnsupportedFunctionalityException - return self._pass_call_to_attr(function_name="download", destination=destination, progress_bar=progress_bar) - - def download_last(self, destination: str = None) -> None: - """Downloads the stored files to the working directory or to the specified destination. - - Args: - destination (str, optional): Path to where the file(s) should be downloaded. - If `None` file will be downloaded to the working directory. - If `destination` is a directory, the file will be downloaded to the specified directory with a filename - composed from field name and extension (if present). - If `destination` is a path to a file, the file will be downloaded under the specified name. - Defaults to `None`. - - For more information, see the docs: - https://docs.neptune.ai/api/field_types#download_last - """ - return self._pass_call_to_attr(function_name="download_last", destination=destination) - - @feature_temporarily_unavailable - def fetch_hash(self) -> str: - """Fetches the hash of an artifact. - - You may also want to check the docs: - https://docs.neptune.ai/api/field_types#fetch_hash - """ - return self._pass_call_to_attr(function_name="fetch_hash") - - def fetch_extension(self) -> str: - """Fetches the extension of a file. - - You may also want to check the docs: - https://docs.neptune.ai/api/field_types#fetch_extension - """ - raise NeptuneUnsupportedFunctionalityException - return self._pass_call_to_attr(function_name="fetch_extension") - - @feature_temporarily_unavailable - def fetch_files_list(self) -> List[ArtifactFileData]: - """Fetches the list of files in an artifact and their metadata. - - You may also want to check the docs: - https://docs.neptune.ai/api/field_types#fetch_files_list - """ - raise NeptuneUnsupportedFunctionalityException - return self._pass_call_to_attr(function_name="fetch_files_list") - - def list_fileset_files(self, path: Optional[str] = None) -> List[FileEntry]: - """Fetches metadata of the file set. - - If the top-level artifact of the field is a directory, only the metadata of this directory is returned. - You can use the `path` argument to list metadata of the files contained inside the directory or subdirectories. - - Args: - path: Path to a nested directory, to get metadata of the files contained within the directory. - - Returns: - List of FileEntry items with the following metadata: name, size (bytes), mtime (last modification time), - and file type (file or directory). - - Examples: - In this example, a Neptune run (RUN-100) has a FileSet field "dataset" containing a directory called "data", - which has a subdirectory "samples" and a file "dataset.csv". The code for logging this would be: - `run["dataset"].upload_files("data")` - - >>> import neptune - >>> run = neptune.init_run(with_id="RUN-100") - >>> run["dataset"].list_fileset_files() - [FileEntry(name='data', size=None, mtime=datetime.datetime(2023, 8, 17, 10, 31, 54, 278601, tzinfo=tzutc()), - file_type='directory')] - >>> run["dataset"].list_fileset_files(path="data") - [FileEntry(name='samples', size=None, mtime=datetime.datetime(2023, 8, 17, 10, 34, 6, 777017, - tzinfo=tzutc()), file_type='directory'), FileEntry(name='dataset.csv', size=215, - mtime=datetime.datetime(2023, 8, 17, 10, 31, 26, 402000, tzinfo=tzutc()), file_type='file')] - >>> run["dataset"].list_fileset_files(path="data/samples") - [FileEntry(name='sample_v2.csv', size=215, mtime=datetime.datetime(2023, 8, 17, 10, 31, 26, 491000, - tzinfo=tzutc()), file_type='file'), FileEntry(name='sample_v3.csv', size=215, mtime=datetime.datetime(2023, - 8, 17, 10, 31, 26, 338000, tzinfo=tzutc()), file_type='file'), ...] - - For more information, see the API reference: - https://docs.neptune.ai/api/field_types#list_fileset_files - """ - raise NeptuneUnsupportedFunctionalityException - return self._pass_call_to_attr(function_name="list_fileset_files", path=path) - - def _pass_call_to_attr(self, function_name, **kwargs): - return getattr(self._get_attribute(), function_name)(**kwargs) - - @feature_temporarily_unavailable - @check_protected_paths - def track_files(self, path: str, *, destination: str = None, wait: bool = False) -> None: - """Creates an artifact tracking some files. - - You may also want to check the docs: - https://docs.neptune.ai/api/field_types#track_files - """ - raise NeptuneUnsupportedFunctionalityException - with self._container.lock(): - attr = self._container.get_attribute(self._path) - if attr is None: - attr = Artifact(self._container, parse_path(self._path)) - self._container.set_attribute(self._path, attr) - - attr.track_files(path=path, destination=destination, wait=wait) - - def __delitem__(self, path) -> None: - self.pop(path) - - @feature_temporarily_unavailable - @check_protected_paths - def pop(self, path: str = None, *, wait: bool = False) -> None: - with self._container.lock(): - handler = self - if path: - verify_type("path", path, str) - handler = self[path] - path = join_paths(self._path, path) - # extra check: check_protected_paths decorator does not catch flow with non-null path - validate_path_not_protected(path, self) - else: - path = self._path - - attribute = self._container.get_attribute(path) - if isinstance(attribute, Namespace): - for child_path in list(attribute): - handler.pop(child_path, wait=wait) - else: - self._container._pop_impl(parse_path(path), wait=wait) - - -class ExtendUtils: - @staticmethod - def transform_to_extend_format(value): - """Preserves the nested structure created by `Namespaces` and `dict_like` objects, - but replaces all other values with single-element lists, - so the work can be delegated to `extend` method.""" - if isinstance(value, Namespace) or is_dict_like(value): - return {k: ExtendUtils.transform_to_extend_format(v) for k, v in value.items()} - - if isinstance(value, StringifyValue): - return stringify_unsupported([value.value]) - - return [value] - - @staticmethod - def validate_values_for_extend(values, steps, timestamps): - """Validates if the input data is a collection or a namespace with collections leafs. - If steps or timestamps are passed, check if its length is equal to all given values.""" - collections_lengths = set(ExtendUtils.generate_leaf_collection_lengths(values)) - - if len(collections_lengths) > 1: - if steps is not None: - raise NeptuneUserApiInputException("Number of steps must be equal to the number of values") - if timestamps is not None: - raise NeptuneUserApiInputException("Number of timestamps must be equal to the number of values") - else: - common_collections_length = next(iter(collections_lengths)) - if steps is not None and common_collections_length != len(steps): - raise NeptuneUserApiInputException("Number of steps must be equal to the number of values") - if timestamps is not None and common_collections_length != len(timestamps): - raise NeptuneUserApiInputException("Number of timestamps must be equal to the number of values") - - @staticmethod - def generate_leaf_collection_lengths(values) -> Iterator[int]: - if is_stringify_value(values): - values = values.value - - if isinstance(values, Namespace) or is_dict_like(values): - for val in values.values(): - yield from ExtendUtils.generate_leaf_collection_lengths(val) - elif is_collection(values): - yield len(values) - else: - raise NeptuneUserApiInputException("Values must be a collection or namespace leafs must be collections") diff --git a/src/neptune/integrations/__init__.py b/src/neptune/integrations/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/integrations/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/integrations/aws/__init__.py b/src/neptune/integrations/aws/__init__.py deleted file mode 100644 index bef3d1925..000000000 --- a/src/neptune/integrations/aws/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune-aws", suggestion="aws") - -from neptune_aws.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/detectron2/__init__.py b/src/neptune/integrations/detectron2/__init__.py deleted file mode 100644 index 057036686..000000000 --- a/src/neptune/integrations/detectron2/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune-detectron2", suggestion="detectron2") - -from neptune_detectron2.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/fastai/__init__.py b/src/neptune/integrations/fastai/__init__.py deleted file mode 100644 index adc6fd6d1..000000000 --- a/src/neptune/integrations/fastai/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune-fastai", suggestion="fastai") - -from neptune_fastai.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/kedro/__init__.py b/src/neptune/integrations/kedro/__init__.py deleted file mode 100644 index f246ee24c..000000000 --- a/src/neptune/integrations/kedro/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("kedro-neptune", suggestion="kedro") - -from kedro_neptune.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/lightgbm/__init__.py b/src/neptune/integrations/lightgbm/__init__.py deleted file mode 100644 index 37474dfb9..000000000 --- a/src/neptune/integrations/lightgbm/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune-lightgbm", suggestion="lightgbm") - -from neptune_lightgbm.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/mosaicml/__init__.py b/src/neptune/integrations/mosaicml/__init__.py deleted file mode 100644 index f24007157..000000000 --- a/src/neptune/integrations/mosaicml/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["NeptuneLogger"] - -from neptune.internal.utils.requirement_check import require_installed - -require_installed("mosaicml") - -from composer.loggers import NeptuneLogger # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/optuna/__init__.py b/src/neptune/integrations/optuna/__init__.py deleted file mode 100644 index 6e3f81735..000000000 --- a/src/neptune/integrations/optuna/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune-optuna", suggestion="optuna") - -from neptune_optuna.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/pandas/__init__.py b/src/neptune/integrations/pandas/__init__.py deleted file mode 100644 index 52161c7f5..000000000 --- a/src/neptune/integrations/pandas/__init__.py +++ /dev/null @@ -1,133 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -__all__ = ["to_pandas"] - -from datetime import datetime -from typing import ( - TYPE_CHECKING, - Dict, - Optional, - Tuple, - Union, -) - -import pandas as pd - -from neptune.api.models import ( - ArtifactField, - BoolField, - DateTimeField, - FieldVisitor, - FileField, - FileSetField, - FloatField, - FloatSeriesField, - GitRefField, - ImageSeriesField, - IntField, - LeaderboardEntry, - NotebookRefField, - ObjectStateField, - StringField, - StringSeriesField, - StringSetField, -) - -if TYPE_CHECKING: - from neptune.table import Table - -PANDAS_AVAILABLE_TYPES = Union[str, float, int, bool, datetime, None] - - -class FieldToPandasValueVisitor(FieldVisitor[PANDAS_AVAILABLE_TYPES]): - - def visit_float(self, field: FloatField) -> float: - return field.value - - def visit_int(self, field: IntField) -> int: - return field.value - - def visit_bool(self, field: BoolField) -> bool: - return field.value - - def visit_string(self, field: StringField) -> str: - return field.value - - def visit_datetime(self, field: DateTimeField) -> datetime: - return field.value - - def visit_file(self, field: FileField) -> None: - return None - - def visit_string_set(self, field: StringSetField) -> Optional[str]: - return ",".join(field.values) - - def visit_float_series(self, field: FloatSeriesField) -> Optional[float]: - return field.last - - def visit_string_series(self, field: StringSeriesField) -> Optional[str]: - return field.last - - def visit_image_series(self, field: ImageSeriesField) -> None: - return None - - def visit_file_set(self, field: FileSetField) -> None: - return None - - def visit_git_ref(self, field: GitRefField) -> Optional[str]: - return field.commit.commit_id if field.commit is not None else None - - def visit_object_state(self, field: ObjectStateField) -> str: - return field.value - - def visit_notebook_ref(self, field: NotebookRefField) -> Optional[str]: - return field.notebook_name - - def visit_artifact(self, field: ArtifactField) -> str: - return field.hash - - -def make_row(entry: LeaderboardEntry, to_value_visitor: FieldVisitor) -> Dict[str, PANDAS_AVAILABLE_TYPES]: - row: Dict[str, PANDAS_AVAILABLE_TYPES] = dict() - - for field in entry.fields: - value = to_value_visitor.visit(field) - if value is not None: - row[field.path] = value - - return row - - -def sort_key(field: str) -> Tuple[int, str]: - domain = field.split("/")[0] - if domain == "sys": - return 0, field - if domain == "monitoring": - return 2, field - return 1, field - - -def to_pandas(table: Table) -> pd.DataFrame: - - to_value_visitor = FieldToPandasValueVisitor() - rows = dict((n, make_row(entry, to_value_visitor)) for (n, entry) in enumerate(table._entries)) - - df = pd.DataFrame.from_dict(data=rows, orient="index") - df = df.reindex(sorted(df.columns, key=sort_key), axis="columns") - - return df diff --git a/src/neptune/integrations/prophet/__init__.py b/src/neptune/integrations/prophet/__init__.py deleted file mode 100644 index bfd990d63..000000000 --- a/src/neptune/integrations/prophet/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune_prophet", suggestion="prophet") - -from neptune_prophet.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/python_logger.py b/src/neptune/integrations/python_logger.py deleted file mode 100644 index 7cca2bfff..000000000 --- a/src/neptune/integrations/python_logger.py +++ /dev/null @@ -1,79 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["NeptuneHandler"] - -import logging -import threading - -from neptune import Run -from neptune.internal.state import ContainerState -from neptune.internal.utils import verify_type -from neptune.version import version as neptune_version - -INTEGRATION_VERSION_KEY = "source_code/integrations/neptune-python-logger" - - -class NeptuneHandler(logging.Handler): - """Handler that sends the log records created by the logger to Neptune - - Args: - run (Run): An existing run reference (as returned by `neptune.init_run`) - Logger will send messages as a `StringSeries` field on this run. - level (int, optional): Log level of the handler. Defaults to `logging.NOTSET`, - which logs everything that matches logger's level. - path (str, optional): Path to the `StringSeries` field used for logging. Default to `None`. - If `None`, `'monitoring/python_logger'` is used. - - Examples: - >>> import logging - >>> import neptune - >>> from neptune.integrations.python_logger import NeptuneHandler - - >>> logger = logging.getLogger("root_experiment") - >>> logger.setLevel(logging.DEBUG) - - >>> run = neptune.init_run(project="neptune/sandbox") - >>> npt_handler = NeptuneHandler(run=run) - >>> logger.addHandler(npt_handler) - - >>> logger.debug("Starting data preparation") - ... - >>> logger.debug("Data preparation done") - """ - - def __init__(self, *, run: Run, level=logging.NOTSET, path: str = None): - verify_type("run", run, Run) - verify_type("level", level, int) - verify_type("path", path, (str, type(None))) - - super().__init__(level=level) - self._path = path if path else f"{run.monitoring_namespace}/python_logger" - self._run = run - self._thread_local = threading.local() - - self._run[INTEGRATION_VERSION_KEY] = str(neptune_version) - - def emit(self, record: logging.LogRecord) -> None: - if not hasattr(self._thread_local, "inside_write"): - self._thread_local.inside_write = False - - if self._run.get_state() == ContainerState.STARTED.value and not self._thread_local.inside_write: - try: - self._thread_local.inside_write = True - message = self.format(record) - self._run[self._path].append(message) - finally: - self._thread_local.inside_write = False diff --git a/src/neptune/integrations/pytorch/__init__.py b/src/neptune/integrations/pytorch/__init__.py deleted file mode 100644 index ea2e33a9b..000000000 --- a/src/neptune/integrations/pytorch/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune-pytorch", suggestion="pytorch") - -from neptune_pytorch.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/pytorch_lightning/__init__.py b/src/neptune/integrations/pytorch_lightning/__init__.py deleted file mode 100644 index 753917956..000000000 --- a/src/neptune/integrations/pytorch_lightning/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["NeptuneLogger"] - -from neptune.internal.utils.requirement_check import require_installed - -require_installed("pytorch-lightning") - - -from pytorch_lightning.loggers import NeptuneLogger # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/sacred/__init__.py b/src/neptune/integrations/sacred/__init__.py deleted file mode 100644 index bf8b21924..000000000 --- a/src/neptune/integrations/sacred/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune-sacred", suggestion="sacred") - -from neptune_sacred.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/sklearn/__init__.py b/src/neptune/integrations/sklearn/__init__.py deleted file mode 100644 index 651196213..000000000 --- a/src/neptune/integrations/sklearn/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune-sklearn", suggestion="sklearn") - -from neptune_sklearn.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/tensorboard/__init__.py b/src/neptune/integrations/tensorboard/__init__.py deleted file mode 100644 index cce6b53c8..000000000 --- a/src/neptune/integrations/tensorboard/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune-tensorboard", suggestion="tensorboard") - -from neptune_tensorboard.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/tensorflow_keras/__init__.py b/src/neptune/integrations/tensorflow_keras/__init__.py deleted file mode 100644 index 9c421d89f..000000000 --- a/src/neptune/integrations/tensorflow_keras/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune-tensorflow-keras", suggestion="tensorflow-keras") - -from neptune_tensorflow_keras.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/transformers/__init__.py b/src/neptune/integrations/transformers/__init__.py deleted file mode 100644 index ef4f9c440..000000000 --- a/src/neptune/integrations/transformers/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["NeptuneCallback"] - -from neptune.internal.utils.requirement_check import require_installed - -require_installed("transformers") - -from transformers.integrations import NeptuneCallback # noqa: F401,F403,E402 diff --git a/src/neptune/integrations/utils.py b/src/neptune/integrations/utils.py deleted file mode 100644 index 589d9a23a..000000000 --- a/src/neptune/integrations/utils.py +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["join_paths", "verify_type", "RunType"] - -from typing import Union - -from neptune import Run -from neptune.handler import Handler -from neptune.internal.utils import verify_type -from neptune.internal.utils.paths import join_paths - -RunType = Union[Run, Handler] diff --git a/src/neptune/integrations/xgboost/__init__.py b/src/neptune/integrations/xgboost/__init__.py deleted file mode 100644 index f4c6c3b0b..000000000 --- a/src/neptune/integrations/xgboost/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.utils.requirement_check import require_installed - -require_installed("neptune-xgboost", suggestion="xgboost") - -from neptune_xgboost.impl import * # noqa: F401,F403,E402 diff --git a/src/neptune/internal/__init__.py b/src/neptune/internal/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/internal/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/artifacts/__init__.py b/src/neptune/internal/artifacts/__init__.py deleted file mode 100644 index 14617c961..000000000 --- a/src/neptune/internal/artifacts/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "LocalArtifactDriver", - "S3ArtifactDriver", -] - -from neptune.internal.artifacts.drivers import ( - LocalArtifactDriver, - S3ArtifactDriver, -) diff --git a/src/neptune/internal/artifacts/drivers/__init__.py b/src/neptune/internal/artifacts/drivers/__init__.py deleted file mode 100644 index dcdf2934b..000000000 --- a/src/neptune/internal/artifacts/drivers/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["LocalArtifactDriver", "S3ArtifactDriver"] - -from neptune.internal.artifacts.drivers.local import LocalArtifactDriver -from neptune.internal.artifacts.drivers.s3 import S3ArtifactDriver diff --git a/src/neptune/internal/artifacts/drivers/local.py b/src/neptune/internal/artifacts/drivers/local.py deleted file mode 100644 index 5e9af0582..000000000 --- a/src/neptune/internal/artifacts/drivers/local.py +++ /dev/null @@ -1,116 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["LocalArtifactDriver"] - -import os -import pathlib -import typing -from datetime import datetime -from urllib.parse import urlparse - -from neptune.exceptions import ( - NeptuneLocalStorageAccessException, - NeptuneUnsupportedArtifactFunctionalityException, -) -from neptune.internal.artifacts.file_hasher import FileHasher -from neptune.internal.artifacts.types import ( - ArtifactDriver, - ArtifactFileData, - ArtifactFileType, -) - - -class LocalArtifactDriver(ArtifactDriver): - DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S" - - @staticmethod - def get_type() -> str: - return ArtifactFileType.LOCAL.value - - @classmethod - def matches(cls, path: str) -> bool: - return urlparse(path).scheme in ("file", "") - - @classmethod - def _serialize_metadata(cls, metadata: typing.Dict[str, typing.Any]) -> typing.Dict[str, str]: - return { - "file_path": metadata["file_path"], - "last_modified": datetime.fromtimestamp(metadata["last_modified"]).strftime(cls.DATETIME_FORMAT), - } - - @classmethod - def _deserialize_metadata(cls, metadata: typing.Dict[str, str]) -> typing.Dict[str, typing.Any]: - return { - "file_path": metadata["file_path"], - "last_modified": datetime.strptime(metadata["last_modified"], cls.DATETIME_FORMAT), - } - - @classmethod - def get_tracked_files(cls, path: str, destination: str = None) -> typing.List[ArtifactFileData]: - file_protocol_prefix = "file://" - if path.startswith(file_protocol_prefix): - path = path[len(file_protocol_prefix) :] - - if "*" in path: - raise NeptuneUnsupportedArtifactFunctionalityException( - f"Wildcard characters (*,?) in location URI ({path}) are not supported." - ) - - source_location = pathlib.Path(path).expanduser() - - stored_files: typing.List[ArtifactFileData] = list() - - files_to_check = source_location.rglob("*") if source_location.is_dir() else [source_location] - for file in files_to_check: - # symlink dirs are omitted by rglob('*') - if not file.is_file(): - continue - - if source_location.is_dir(): - file_path = file.relative_to(source_location).as_posix() - else: - file_path = file.name - file_path = file_path if destination is None else (pathlib.Path(destination) / file_path).as_posix() - - stored_files.append( - ArtifactFileData( - file_path=file_path, - file_hash=FileHasher.get_local_file_hash(file), - type=ArtifactFileType.LOCAL.value, - size=file.stat().st_size, - metadata=cls._serialize_metadata( - { - "file_path": f"file://{file.resolve().as_posix()}", - "last_modified": file.stat().st_mtime, - } - ), - ) - ) - - return stored_files - - @classmethod - def download_file(cls, destination: pathlib.Path, file_definition: ArtifactFileData): - parsed_path = urlparse(file_definition.metadata.get("file_path")) - absolute_path = pathlib.Path(parsed_path.netloc + parsed_path.path) - - if not absolute_path.is_file(): - raise NeptuneLocalStorageAccessException(path=absolute_path, expected_description="an existing file") - - os.makedirs(str(destination.parent), exist_ok=True) - if destination.exists(): - os.remove(destination) - destination.symlink_to(absolute_path) diff --git a/src/neptune/internal/artifacts/drivers/s3.py b/src/neptune/internal/artifacts/drivers/s3.py deleted file mode 100644 index d5553e1c4..000000000 --- a/src/neptune/internal/artifacts/drivers/s3.py +++ /dev/null @@ -1,132 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["S3ArtifactDriver"] - -import pathlib -import typing -from datetime import datetime -from urllib.parse import urlparse - -from neptune.exceptions import ( - NeptuneRemoteStorageAccessException, - NeptuneRemoteStorageCredentialsException, - NeptuneUnsupportedArtifactFunctionalityException, -) -from neptune.internal.artifacts.types import ( - ArtifactDriver, - ArtifactFileData, - ArtifactFileType, -) -from neptune.internal.utils.s3 import get_boto_s3_client - - -class S3ArtifactDriver(ArtifactDriver): - DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S" - - @staticmethod - def get_type() -> str: - return ArtifactFileType.S3.value - - @classmethod - def matches(cls, path: str) -> bool: - return urlparse(path).scheme == "s3" - - @classmethod - def _serialize_metadata(cls, metadata: typing.Dict[str, typing.Any]) -> typing.Dict[str, str]: - return { - "location": metadata["location"], - "last_modified": metadata["last_modified"].strftime(cls.DATETIME_FORMAT), - } - - @classmethod - def _deserialize_metadata(cls, metadata: typing.Dict[str, str]) -> typing.Dict[str, typing.Any]: - return { - "location": metadata["location"], - "last_modified": datetime.strptime(metadata["last_modified"], cls.DATETIME_FORMAT), - } - - @classmethod - def get_tracked_files(cls, path: str, destination: str = None) -> typing.List[ArtifactFileData]: - url = urlparse(path) - bucket_name, prefix = url.netloc, url.path.lstrip("/") - - if "*" in prefix: - raise NeptuneUnsupportedArtifactFunctionalityException( - f"Wildcard characters (*,?) in location URI ({path}) are not supported." - ) - - remote_storage = get_boto_s3_client().Bucket(bucket_name) - - stored_files: typing.List[ArtifactFileData] = list() - - from botocore.exceptions import NoCredentialsError - - try: - for remote_object in remote_storage.objects.filter(Prefix=prefix): - # If prefix is path to file get only directories - if prefix == remote_object.key: - prefix = str(pathlib.PurePosixPath(prefix).parent) - - remote_key = remote_object.key - destination = pathlib.PurePosixPath(destination or "") - relative_file_path = remote_key[len(prefix.lstrip(".")) :].lstrip("/") - - file_path = destination / relative_file_path - - stored_files.append( - ArtifactFileData( - file_path=str(file_path).lstrip("/"), - file_hash=remote_object.e_tag.strip('"'), - type=ArtifactFileType.S3.value, - size=remote_object.size, - metadata=cls._serialize_metadata( - { - "location": f's3://{bucket_name}/{remote_key.lstrip("/")}', - "last_modified": remote_object.last_modified, - } - ), - ) - ) - except NoCredentialsError: - raise NeptuneRemoteStorageCredentialsException() - except ( - remote_storage.meta.client.exceptions.NoSuchBucket, - remote_storage.meta.client.exceptions.NoSuchKey, - ): - raise NeptuneRemoteStorageAccessException(location=path) - - return stored_files - - @classmethod - def download_file(cls, destination: pathlib.Path, file_definition: ArtifactFileData): - location = file_definition.metadata.get("location") - url = urlparse(location) - bucket_name, path = url.netloc, url.path.lstrip("/") - - remote_storage = get_boto_s3_client() - - from botocore.exceptions import NoCredentialsError - - try: - bucket = remote_storage.Bucket(bucket_name) - bucket.download_file(path, str(destination)) - except NoCredentialsError: - raise NeptuneRemoteStorageCredentialsException() - except ( - remote_storage.meta.client.exceptions.NoSuchBucket, - remote_storage.meta.client.exceptions.NoSuchKey, - ): - raise NeptuneRemoteStorageAccessException(location=location) diff --git a/src/neptune/internal/artifacts/file_hasher.py b/src/neptune/internal/artifacts/file_hasher.py deleted file mode 100644 index 04b4fec39..000000000 --- a/src/neptune/internal/artifacts/file_hasher.py +++ /dev/null @@ -1,112 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["FileHasher"] - -import datetime -import hashlib -import typing -from pathlib import Path - -from neptune.internal.artifacts.local_file_hash_storage import LocalFileHashStorage -from neptune.internal.artifacts.types import ( - ArtifactFileData, - ArtifactMetadataSerializer, -) -from neptune.internal.artifacts.utils import sha1 - - -class FileHasher: - ENCODING = "UTF-8" - HASH_ELEMENT_DIVISOR = b"#" - META_ELEMENT_DIVISOR = b"|" - SERVER_INT_BYTES = 4 - SERVER_LONG_BYTES = 8 - SERVER_BYTE_ORDER = "big" - HASH_LENGTH = 64 # sha-256 - - @classmethod - def get_local_file_hash(cls, file_path: typing.Union[str, Path]) -> str: - local_storage = LocalFileHashStorage() - - absolute = Path(file_path).resolve() - modification_date = datetime.datetime.fromtimestamp(absolute.stat().st_mtime).strftime("%Y%m%d_%H%M%S%f") - - stored_file_hash = local_storage.fetch_one(absolute) - - if stored_file_hash: - if stored_file_hash.modification_date >= modification_date: - computed_hash = stored_file_hash.file_hash - else: - computed_hash = sha1(absolute) - local_storage.update(absolute, computed_hash, modification_date) - else: - computed_hash = sha1(absolute) - local_storage.insert(absolute, computed_hash, modification_date) - - local_storage.close() - - return computed_hash - - @classmethod - def _number_to_bytes(cls, int_value: int, bytes_cnt): - return int_value.to_bytes(bytes_cnt, cls.SERVER_BYTE_ORDER) - - @classmethod - def get_artifact_hash(cls, artifact_files: typing.Iterable[ArtifactFileData]) -> str: - artifact_hash = hashlib.sha256() - - for artifact_file in sorted(artifact_files, key=lambda file: file.file_path): - artifact_hash.update(cls.HASH_ELEMENT_DIVISOR) - artifact_hash.update(cls._number_to_bytes(len(artifact_file.file_path), cls.SERVER_INT_BYTES)) - artifact_hash.update(artifact_file.file_path.encode(cls.ENCODING)) - artifact_hash.update(cls.HASH_ELEMENT_DIVISOR) - artifact_hash.update(artifact_file.file_hash.encode(cls.ENCODING)) - artifact_hash.update(cls.HASH_ELEMENT_DIVISOR) - if artifact_file.size is not None: - artifact_hash.update(cls._number_to_bytes(artifact_file.size, cls.SERVER_LONG_BYTES)) - artifact_hash.update(cls.HASH_ELEMENT_DIVISOR) - artifact_hash.update(cls._number_to_bytes(len(artifact_file.type), cls.SERVER_INT_BYTES)) - artifact_hash.update(artifact_file.type.encode(cls.ENCODING)) - artifact_hash.update(cls.HASH_ELEMENT_DIVISOR) - for metadata_key_value in ArtifactMetadataSerializer.serialize(artifact_file.metadata): - metadata_name, metadata_value = metadata_key_value.get("key"), metadata_key_value.get("value") - artifact_hash.update(cls.META_ELEMENT_DIVISOR) - artifact_hash.update(cls._number_to_bytes(len(metadata_name), cls.SERVER_INT_BYTES)) - artifact_hash.update(metadata_name.encode(cls.ENCODING)) - artifact_hash.update(cls.META_ELEMENT_DIVISOR) - artifact_hash.update(cls._number_to_bytes(len(metadata_value), cls.SERVER_INT_BYTES)) - artifact_hash.update(metadata_value.encode(cls.ENCODING)) - - return str(artifact_hash.hexdigest()) - - @classmethod - def get_artifact_hash_without_metadata(cls, artifact_files: typing.Iterable[ArtifactFileData]) -> str: - artifact_hash = hashlib.sha256() - - for artifact_file in sorted(artifact_files, key=lambda file: file.file_path): - artifact_hash.update(cls.HASH_ELEMENT_DIVISOR) - artifact_hash.update(cls._number_to_bytes(len(artifact_file.file_path), cls.SERVER_INT_BYTES)) - artifact_hash.update(artifact_file.file_path.encode(cls.ENCODING)) - artifact_hash.update(cls.HASH_ELEMENT_DIVISOR) - artifact_hash.update(artifact_file.file_hash.encode(cls.ENCODING)) - artifact_hash.update(cls.HASH_ELEMENT_DIVISOR) - if artifact_file.size is not None: - artifact_hash.update(cls._number_to_bytes(artifact_file.size, cls.SERVER_LONG_BYTES)) - artifact_hash.update(cls.HASH_ELEMENT_DIVISOR) - artifact_hash.update(cls._number_to_bytes(len(artifact_file.type), cls.SERVER_INT_BYTES)) - artifact_hash.update(artifact_file.type.encode(cls.ENCODING)) - - return str(artifact_hash.hexdigest()) diff --git a/src/neptune/internal/artifacts/local_file_hash_storage.py b/src/neptune/internal/artifacts/local_file_hash_storage.py deleted file mode 100644 index c8e23f422..000000000 --- a/src/neptune/internal/artifacts/local_file_hash_storage.py +++ /dev/null @@ -1,68 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["LocalFileHashStorage"] - -import os -import sqlite3 as sql -from dataclasses import dataclass -from pathlib import Path - - -class LocalFileHashStorage: - @dataclass - class LocalFileHash: - file_path: str - file_hash: str - modification_date: str - - def __init__(self): - db_path = Path.home() / ".neptune" / "files.db" - os.makedirs(db_path.parent, exist_ok=True) - - self.session = sql.connect(str(db_path)) - self.cursor: sql.Cursor = self.session.cursor() - self.cursor.execute( - "CREATE TABLE IF NOT EXISTS local_file_hashes (file_path text, file_hash text, modification_date text)" - ) - self.session.commit() - - def insert(self, path: Path, computed_hash: str, modification_date: str): - self.cursor.execute( - "INSERT INTO local_file_hashes (file_path, file_hash, modification_date) VALUES (?, ?, ?)", - (str(path), computed_hash, modification_date), - ) - self.session.commit() - - def fetch_one(self, path: Path) -> "LocalFileHash": - found = [ - LocalFileHashStorage.LocalFileHash(*row) - for row in self.cursor.execute( - "SELECT file_path, file_hash, modification_date FROM local_file_hashes WHERE file_path = ?", - (str(path),), - ) - ] - - return found[0] if found is not None and len(found) > 0 else None - - def update(self, path: Path, computed_hash: str, modification_date: str): - self.cursor.execute( - "UPDATE local_file_hashes SET file_hash=?, modification_date=? WHERE file_path = ?", - (computed_hash, modification_date, str(path)), - ) - self.session.commit() - - def close(self) -> None: - self.session.close() diff --git a/src/neptune/internal/artifacts/types.py b/src/neptune/internal/artifacts/types.py deleted file mode 100644 index 273c78af1..000000000 --- a/src/neptune/internal/artifacts/types.py +++ /dev/null @@ -1,113 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ArtifactFileType", "ArtifactMetadataSerializer", "ArtifactFileData", "ArtifactDriversMap", "ArtifactDriver"] - -import abc -import enum -import pathlib -import typing -from dataclasses import dataclass - -from neptune.exceptions import ( - NeptuneUnhandledArtifactSchemeException, - NeptuneUnhandledArtifactTypeException, -) - - -class ArtifactFileType(enum.Enum): - S3 = "S3" - LOCAL = "Local" - - -class ArtifactMetadataSerializer: - @staticmethod - def serialize(metadata: typing.Dict[str, str]) -> typing.List[typing.Dict[str, str]]: - return [{"key": k, "value": v} for k, v in sorted(metadata.items())] - - @staticmethod - def deserialize(metadata: typing.List[typing.Dict[str, str]]) -> typing.Dict[str, str]: - return {f'{key_value.get("key")}': f'{key_value.get("value")}' for key_value in metadata} - - -@dataclass -class ArtifactFileData: - file_path: str - file_hash: str - type: str - metadata: typing.Dict[str, str] - size: int = None - - @classmethod - def from_dto(cls, artifact_file_dto): - return cls( - file_path=artifact_file_dto.filePath, - file_hash=artifact_file_dto.fileHash, - type=artifact_file_dto.type, - size=artifact_file_dto.size, - metadata=ArtifactMetadataSerializer.deserialize( - [{"key": str(m.key), "value": str(m.value)} for m in artifact_file_dto.metadata] - ), - ) - - def to_dto(self) -> typing.Dict: - return { - "filePath": self.file_path, - "fileHash": self.file_hash, - "type": self.type, - "size": self.size, - "metadata": ArtifactMetadataSerializer.serialize(self.metadata), - } - - -class ArtifactDriversMap: - _implementations: typing.List[typing.Type["ArtifactDriver"]] = [] - - @classmethod - def match_path(cls, path: str) -> typing.Type["ArtifactDriver"]: - for artifact_driver in cls._implementations: - if artifact_driver.matches(path): - return artifact_driver - - raise NeptuneUnhandledArtifactSchemeException(path) - - @classmethod - def match_type(cls, type_str: str) -> typing.Type["ArtifactDriver"]: - for artifact_driver in cls._implementations: - if artifact_driver.get_type() == type_str: - return artifact_driver - - raise NeptuneUnhandledArtifactTypeException(type_str) - - -class ArtifactDriver(abc.ABC): - def __init_subclass__(cls): - ArtifactDriversMap._implementations.append(cls) - - @staticmethod - def get_type() -> str: - raise NotImplementedError - - @classmethod - def matches(cls, path: str) -> bool: - raise NotImplementedError - - @classmethod - def get_tracked_files(cls, path: str, destination: str = None) -> typing.List[ArtifactFileData]: - raise NotImplementedError - - @classmethod - def download_file(cls, destination: pathlib.Path, file_definition: ArtifactFileData): - raise NotImplementedError diff --git a/src/neptune/internal/artifacts/utils.py b/src/neptune/internal/artifacts/utils.py deleted file mode 100644 index 41db6db23..000000000 --- a/src/neptune/internal/artifacts/utils.py +++ /dev/null @@ -1,33 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["sha1"] - -import hashlib -import pathlib -import typing - - -def sha1(fname: typing.Union[str, pathlib.Path], block_size: int = 2**16) -> str: - sha1sum = hashlib.sha1() - - with open(fname, "rb") as source: - block = source.read(block_size) - - while len(block) != 0: - sha1sum.update(block) - block = source.read(block_size) - - return str(sha1sum.hexdigest()) diff --git a/src/neptune/internal/backends/__init__.py b/src/neptune/internal/backends/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/internal/backends/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/backends/api_model.py b/src/neptune/internal/backends/api_model.py deleted file mode 100644 index fcdbaf44a..000000000 --- a/src/neptune/internal/backends/api_model.py +++ /dev/null @@ -1,182 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "Project", - "Workspace", - "ApiExperiment", - "OptionalFeatures", - "VersionInfo", - "ClientConfig", - "ArtifactModel", - "MultipartConfig", -] - -from dataclasses import dataclass -from typing import ( - FrozenSet, - Optional, -) - -from packaging import version - -from neptune.internal.container_type import ContainerType -from neptune.internal.id_formats import ( - SysId, - UniqueId, -) - - -@dataclass(frozen=True) -class MultipartConfig: - min_chunk_size: int - max_chunk_size: int - max_chunk_count: int - max_single_part_size: int - - @staticmethod - def get_default() -> "MultipartConfig": - return MultipartConfig( - min_chunk_size=5242880, - max_chunk_size=1073741824, - max_chunk_count=1000, - max_single_part_size=5242880, - ) - - -@dataclass -class Project: - id: UniqueId - name: str - workspace: str - sys_id: SysId - - -@dataclass -class Workspace: - id: UniqueId - name: str - - -@dataclass -class ApiExperiment: - id: UniqueId - type: ContainerType - sys_id: SysId - workspace: str - project_name: str - trashed: bool = False - - @classmethod - def from_experiment(cls, response_exp): - return cls( - id=response_exp.id, - type=ContainerType.from_api(response_exp.type), - sys_id=response_exp.shortId, - workspace=response_exp.organizationName, - project_name=response_exp.projectName, - trashed=response_exp.trashed, - ) - - -class OptionalFeatures: - VERSION_INFO = "version_info" - ARTIFACTS = "artifacts" - ARTIFACTS_HASH_EXCLUDE_METADATA = "artifacts_hash_exclude_metadata" - ARTIFACTS_EXCLUDE_DIRECTORY_FILES = "artifact_exclude_directory_files" - MULTIPART_UPLOAD = "multipart_upload" - - -@dataclass(frozen=True) -class VersionInfo: - min_recommended: Optional[version.Version] - min_compatible: Optional[version.Version] - max_compatible: Optional[version.Version] - - @staticmethod - def build( - min_recommended: Optional[str], - min_compatible: Optional[str], - max_compatible: Optional[str], - ) -> "VersionInfo": - return VersionInfo( - min_recommended=version.parse(min_recommended) if min_recommended else None, - min_compatible=version.parse(min_compatible) if min_compatible else None, - max_compatible=version.parse(max_compatible) if max_compatible else None, - ) - - -@dataclass(frozen=True) -class ClientConfig: - api_url: str - display_url: str - _missing_features: FrozenSet[str] - version_info: VersionInfo - multipart_config: MultipartConfig - - def has_feature(self, feature_name: str) -> bool: - return feature_name not in self._missing_features - - @staticmethod - def from_api_response(config) -> "ClientConfig": - missing_features = [] - - version_info_obj = getattr(config, "pyLibVersions", None) - if version_info_obj is None: - missing_features.append(OptionalFeatures.VERSION_INFO) - min_recommended = min_compatible = max_compatible = None - else: - min_recommended = getattr(version_info_obj, "minRecommendedVersion", None) - min_compatible = getattr(version_info_obj, "minCompatibleVersion", None) - max_compatible = getattr(version_info_obj, "maxCompatibleVersion", None) - - multipart_upload_config_obj = getattr(config, "multiPartUpload", None) - has_multipart_upload = getattr(multipart_upload_config_obj, "enabled", False) - if not has_multipart_upload: - missing_features.append(OptionalFeatures.MULTIPART_UPLOAD) - multipart_upload_config = None - else: - min_chunk_size = getattr(multipart_upload_config_obj, "minChunkSize") - max_chunk_size = getattr(multipart_upload_config_obj, "maxChunkSize") - max_chunk_count = getattr(multipart_upload_config_obj, "maxChunkCount") - max_single_part_size = getattr(multipart_upload_config_obj, "maxSinglePartSize") - multipart_upload_config = MultipartConfig( - min_chunk_size, max_chunk_size, max_chunk_count, max_single_part_size - ) - - artifacts_config_obj = getattr(config, "artifacts", None) - has_artifacts = getattr(artifacts_config_obj, "enabled", False) - if not has_artifacts: - missing_features.append(OptionalFeatures.ARTIFACTS) - - artifacts_api_version = getattr(artifacts_config_obj, "apiVersion", 1) - if artifacts_api_version == 1: - missing_features.append(OptionalFeatures.ARTIFACTS_HASH_EXCLUDE_METADATA) - missing_features.append(OptionalFeatures.ARTIFACTS_EXCLUDE_DIRECTORY_FILES) - - return ClientConfig( - api_url=config.apiUrl, - display_url=config.applicationUrl, - _missing_features=frozenset(missing_features), - version_info=VersionInfo.build(min_recommended, min_compatible, max_compatible), - multipart_config=multipart_upload_config, - ) - - -@dataclass -class ArtifactModel: - received_metadata: bool - hash: str - size: int diff --git a/src/neptune/internal/backends/factory.py b/src/neptune/internal/backends/factory.py deleted file mode 100644 index f92cbb413..000000000 --- a/src/neptune/internal/backends/factory.py +++ /dev/null @@ -1,41 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["get_backend"] - -from typing import Optional - -from neptune.internal.credentials import Credentials -from neptune.types.mode import Mode - -from .hosted_neptune_backend import HostedNeptuneBackend -from .neptune_backend import NeptuneBackend -from .neptune_backend_mock import NeptuneBackendMock -from .offline_neptune_backend import OfflineNeptuneBackend - - -def get_backend(mode: Mode, api_token: Optional[str] = None, proxies: Optional[dict] = None) -> NeptuneBackend: - if mode == Mode.ASYNC: - return HostedNeptuneBackend(credentials=Credentials.from_token(api_token=api_token), proxies=proxies) - elif mode == Mode.SYNC: - return HostedNeptuneBackend(credentials=Credentials.from_token(api_token=api_token), proxies=proxies) - elif mode == Mode.DEBUG: - return NeptuneBackendMock() - elif mode == Mode.OFFLINE: - return OfflineNeptuneBackend() - elif mode == Mode.READ_ONLY: - return HostedNeptuneBackend(credentials=Credentials.from_token(api_token=api_token), proxies=proxies) - else: - raise ValueError(f"mode should be one of {[m for m in Mode]}") diff --git a/src/neptune/internal/backends/hosted_artifact_operations.py b/src/neptune/internal/backends/hosted_artifact_operations.py deleted file mode 100644 index ec722b5e2..000000000 --- a/src/neptune/internal/backends/hosted_artifact_operations.py +++ /dev/null @@ -1,300 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "track_to_new_artifact", - "track_to_existing_artifact", - "get_artifact_attribute", - "list_artifact_files", -] - -from typing import ( - Dict, - List, - Optional, - Tuple, - Type, -) - -from bravado.exception import HTTPNotFound - -from neptune.api.models import ArtifactField -from neptune.exceptions import ( - ArtifactNotFoundException, - ArtifactUploadingError, - FetchAttributeNotFoundException, - NeptuneEmptyLocationException, -) -from neptune.internal.artifacts.file_hasher import FileHasher -from neptune.internal.artifacts.types import ( - ArtifactDriver, - ArtifactDriversMap, - ArtifactFileData, -) -from neptune.internal.backends.api_model import ArtifactModel -from neptune.internal.backends.swagger_client_wrapper import SwaggerClientWrapper -from neptune.internal.backends.utils import with_api_exceptions_handler -from neptune.internal.operation import ( - AssignArtifact, - Operation, -) -from neptune.internal.utils.paths import path_to_str - - -def _compute_artifact_size(artifact_file_list: List[ArtifactFileData]): - artifact_size = 0 - for artifact_file in artifact_file_list: - if artifact_file.size is None: - # whole artifact's size is undefined in this case - return None - artifact_size += artifact_file.size - return artifact_size - - -def _filter_empty_directory_files(files: List[ArtifactFileData]) -> List[ArtifactFileData]: - return list(filter(lambda x: not _is_s3_empty_directory_file(x), files)) - - -def _is_s3_empty_directory_file(file: ArtifactFileData) -> bool: - return file.type == "S3" and file.size == 0 - - -def track_to_new_artifact( - swagger_client: SwaggerClientWrapper, - project_id: str, - path: List[str], - parent_identifier: str, - entries: List[Tuple[str, Optional[str]]], - default_request_params: Dict, - exclude_directory_files: bool, - exclude_metadata_from_hash: bool, -) -> Optional[Operation]: - files: List[ArtifactFileData] = _extract_file_list(path, entries) - - if exclude_directory_files: - files = _filter_empty_directory_files(files) - - if not files: - raise ArtifactUploadingError("Uploading an empty Artifact") - - artifact_hash = ( - _compute_artifact_hash_without_metadata(files) if exclude_metadata_from_hash else _compute_artifact_hash(files) - ) - - artifact = create_new_artifact( - swagger_client=swagger_client, - project_id=project_id, - artifact_hash=artifact_hash, - parent_identifier=parent_identifier, - size=_compute_artifact_size(files), - default_request_params=default_request_params, - ) - - if not artifact.received_metadata: - upload_artifact_files_metadata( - swagger_client=swagger_client, - project_id=project_id, - artifact_hash=artifact_hash, - files=files, - default_request_params=default_request_params, - ) - - return AssignArtifact(path=path, hash=artifact_hash) - - -def track_to_existing_artifact( - swagger_client: SwaggerClientWrapper, - project_id: str, - path: List[str], - artifact_hash: str, - parent_identifier: str, - entries: List[Tuple[str, Optional[str]]], - default_request_params: Dict, - exclude_directory_files: bool, -) -> Optional[Operation]: - files: List[ArtifactFileData] = _extract_file_list(path, entries) - - if exclude_directory_files: - files = _filter_empty_directory_files(files) - - if not files: - raise ArtifactUploadingError("Uploading an empty Artifact") - - artifact = create_artifact_version( - swagger_client=swagger_client, - project_id=project_id, - artifact_hash=artifact_hash, - parent_identifier=parent_identifier, - files=files, - default_request_params=default_request_params, - ) - - return AssignArtifact(path=path, hash=artifact.hash) - - -def _compute_artifact_hash_without_metadata(files: List[ArtifactFileData]) -> str: - return FileHasher.get_artifact_hash_without_metadata(files) - - -def _compute_artifact_hash(files: List[ArtifactFileData]) -> str: - return FileHasher.get_artifact_hash(files) - - -def _extract_file_list(path: List[str], entries: List[Tuple[str, Optional[str]]]) -> List[ArtifactFileData]: - files: List[ArtifactFileData] = list() - - for entry_path, entry_destination in entries: - driver: Type[ArtifactDriver] = ArtifactDriversMap.match_path(entry_path) - artifact_files = driver.get_tracked_files(path=entry_path, destination=entry_destination) - - if len(artifact_files) == 0: - raise NeptuneEmptyLocationException(location=entry_path, namespace="/".join(path)) - - files.extend(artifact_files) - - return files - - -@with_api_exceptions_handler -def create_new_artifact( - swagger_client: SwaggerClientWrapper, - project_id: str, - artifact_hash: str, - parent_identifier: str, - size: int, - default_request_params: Dict, -) -> ArtifactModel: - params = { - "projectIdentifier": project_id, - "hash": artifact_hash, - "size": size, - "parentIdentifier": parent_identifier, - **add_artifact_version_to_request_params(default_request_params), - } - try: - result = swagger_client.api.createNewArtifact(**params).response().result - return ArtifactModel( - hash=result.artifactHash, - received_metadata=result.receivedMetadata, - size=result.size, - ) - except HTTPNotFound: - raise ArtifactNotFoundException(artifact_hash) - - -@with_api_exceptions_handler -def upload_artifact_files_metadata( - swagger_client: SwaggerClientWrapper, - project_id: str, - artifact_hash: str, - files: List[ArtifactFileData], - default_request_params: Dict, -) -> ArtifactModel: - params = { - "projectIdentifier": project_id, - "hash": artifact_hash, - "artifactFilesDTO": {"files": [ArtifactFileData.to_dto(a) for a in files]}, - **add_artifact_version_to_request_params(default_request_params), - } - try: - result = swagger_client.api.uploadArtifactFilesMetadata(**params).response().result - return ArtifactModel( - hash=result.artifactHash, - size=result.size, - received_metadata=result.receivedMetadata, - ) - except HTTPNotFound: - raise ArtifactNotFoundException(artifact_hash) - - -@with_api_exceptions_handler -def create_artifact_version( - swagger_client: SwaggerClientWrapper, - project_id: str, - artifact_hash: str, - parent_identifier: str, - files: List[ArtifactFileData], - default_request_params: Dict, -) -> ArtifactModel: - params = { - "projectIdentifier": project_id, - "hash": artifact_hash, - "parentIdentifier": parent_identifier, - "artifactFilesDTO": {"files": [ArtifactFileData.to_dto(a) for a in files]}, - **add_artifact_version_to_request_params(default_request_params), - } - try: - result = swagger_client.api.createArtifactVersion(**params).response().result - return ArtifactModel( - hash=result.artifactHash, - size=result.size, - received_metadata=result.receivedMetadata, - ) - except HTTPNotFound: - raise ArtifactNotFoundException(artifact_hash) - - -@with_api_exceptions_handler -def get_artifact_attribute( - swagger_client: SwaggerClientWrapper, - parent_identifier: str, - path: List[str], - default_request_params: Dict, -) -> ArtifactField: - requests_params = add_artifact_version_to_request_params(default_request_params) - params = { - "experimentId": parent_identifier, - "attribute": path_to_str(path), - **requests_params, - } - try: - result = swagger_client.api.getArtifactAttribute(**params).response().result - return ArtifactField.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - -@with_api_exceptions_handler -def list_artifact_files( - swagger_client: SwaggerClientWrapper, - project_id: str, - artifact_hash: str, - default_request_params: Dict, -) -> List[ArtifactFileData]: - requests_params = add_artifact_version_to_request_params(default_request_params) - params = { - "projectIdentifier": project_id, - "hash": artifact_hash, - **requests_params, - } - try: - result = swagger_client.api.listArtifactFiles(**params).response().result - return [ArtifactFileData.from_dto(a) for a in result.files] - except HTTPNotFound: - raise ArtifactNotFoundException(artifact_hash) - - -def add_artifact_version_to_request_params(default_request_params: Dict) -> Dict: - current_artifact_version = "2" - - return { - "_request_options": { - **default_request_params["_request_options"], - "headers": { - **default_request_params["_request_options"]["headers"], - "X-Neptune-Artifact-Api-Version": current_artifact_version, - }, - } - } diff --git a/src/neptune/internal/backends/hosted_client.py b/src/neptune/internal/backends/hosted_client.py deleted file mode 100644 index 69c8a4843..000000000 --- a/src/neptune/internal/backends/hosted_client.py +++ /dev/null @@ -1,212 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "DEFAULT_REQUEST_KWARGS", - "DEFAULT_PROTO_REQUEST_KWARGS", - "create_http_client_with_auth", - "create_backend_client", - "create_leaderboard_client", - "create_artifacts_client", -] - -import os -import platform -from typing import ( - Dict, - Tuple, -) - -import requests -from bravado.http_client import HttpClient -from bravado.requests_client import RequestsClient - -from neptune.envs import NEPTUNE_REQUEST_TIMEOUT -from neptune.exceptions import NeptuneClientUpgradeRequiredError -from neptune.internal.backends.api_model import ClientConfig -from neptune.internal.backends.swagger_client_wrapper import SwaggerClientWrapper -from neptune.internal.backends.utils import ( - NeptuneResponseAdapter, - build_operation_url, - cache, - create_swagger_client, - update_session_proxies, - verify_client_version, - verify_host_resolution, - with_api_exceptions_handler, -) -from neptune.internal.credentials import Credentials -from neptune.internal.oauth import NeptuneAuthenticator -from neptune.version import version as neptune_version - -BACKEND_SWAGGER_PATH = "/api/backend/swagger.json" -LEADERBOARD_SWAGGER_PATH = "/api/leaderboard/swagger.json" -ARTIFACTS_SWAGGER_PATH = "/api/artifacts/swagger.json" - -CONNECT_TIMEOUT = 30 # helps detecting internet connection lost -REQUEST_TIMEOUT = int(os.getenv(NEPTUNE_REQUEST_TIMEOUT, "600")) - -DEFAULT_REQUEST_KWARGS = { - "_request_options": { - "connect_timeout": CONNECT_TIMEOUT, - "timeout": REQUEST_TIMEOUT, - "headers": {}, - } -} - -DEFAULT_PROTO_REQUEST_KWARGS = { - "_request_options": { - **DEFAULT_REQUEST_KWARGS["_request_options"], - "headers": { - **DEFAULT_REQUEST_KWARGS["_request_options"]["headers"], - "Accept": "application/x-protobuf,application/json", - "Accept-Encoding": "gzip, deflate, br", - }, - } -} - - -def _close_connections_on_fork(session: requests.Session): - try: - os.register_at_fork(before=session.close, after_in_child=session.close, after_in_parent=session.close) - except AttributeError: - pass - - -# WARNING: Be careful when changing this function. It is used in the experimental package -def _set_pool_size(http_client: RequestsClient) -> None: - _ = http_client - - -def create_http_client(ssl_verify: bool, proxies: Dict[str, str]) -> RequestsClient: - http_client = RequestsClient(ssl_verify=ssl_verify, response_adapter_class=NeptuneResponseAdapter) - http_client.session.verify = ssl_verify - - _set_pool_size(http_client) - - _close_connections_on_fork(http_client.session) - - update_session_proxies(http_client.session, proxies) - - user_agent = "neptune-client/{lib_version} ({system}, python {python_version})".format( - lib_version=neptune_version, - system=platform.platform(), - python_version=platform.python_version(), - ) - http_client.session.headers.update({"User-Agent": user_agent}) - - return http_client - - -@cache -def _get_token_client( - credentials: Credentials, - ssl_verify: bool, - proxies: Dict[str, str], - endpoint_url: str = None, -) -> SwaggerClientWrapper: - config_api_url = credentials.api_url_opt or credentials.token_origin_address - if proxies is None: - verify_host_resolution(config_api_url) - - token_http_client = create_http_client(ssl_verify, proxies) - - return SwaggerClientWrapper( - create_swagger_client( - build_operation_url(endpoint_url or config_api_url, BACKEND_SWAGGER_PATH), - token_http_client, - ) - ) - - -@cache -@with_api_exceptions_handler -def get_client_config(credentials: Credentials, ssl_verify: bool, proxies: Dict[str, str]) -> ClientConfig: - backend_client = _get_token_client(credentials=credentials, ssl_verify=ssl_verify, proxies=proxies) - - config = ( - backend_client.api.getClientConfig( - X_Neptune_Api_Token=credentials.api_token, - alpha="true", - **DEFAULT_REQUEST_KWARGS, - ) - .response() - .result - ) - - client_config = ClientConfig.from_api_response(config) - if not client_config.version_info: - raise NeptuneClientUpgradeRequiredError(neptune_version, max_version="0.4.111") - return client_config - - -@cache -def create_http_client_with_auth( - credentials: Credentials, ssl_verify: bool, proxies: Dict[str, str] -) -> Tuple[RequestsClient, ClientConfig]: - client_config = get_client_config(credentials=credentials, ssl_verify=ssl_verify, proxies=proxies) - - config_api_url = credentials.api_url_opt or credentials.token_origin_address - - verify_client_version(client_config, neptune_version) - - endpoint_url = None - if config_api_url != client_config.api_url: - endpoint_url = build_operation_url(client_config.api_url, BACKEND_SWAGGER_PATH) - - http_client = create_http_client(ssl_verify=ssl_verify, proxies=proxies) - http_client.authenticator = NeptuneAuthenticator( - credentials.api_token, - _get_token_client( - credentials=credentials, - ssl_verify=ssl_verify, - proxies=proxies, - endpoint_url=endpoint_url, - ), - ssl_verify, - proxies, - ) - - return http_client, client_config - - -@cache -def create_backend_client(client_config: ClientConfig, http_client: HttpClient) -> SwaggerClientWrapper: - return SwaggerClientWrapper( - create_swagger_client( - build_operation_url(client_config.api_url, BACKEND_SWAGGER_PATH), - http_client, - ) - ) - - -@cache -def create_leaderboard_client(client_config: ClientConfig, http_client: HttpClient) -> SwaggerClientWrapper: - return SwaggerClientWrapper( - create_swagger_client( - build_operation_url(client_config.api_url, LEADERBOARD_SWAGGER_PATH), - http_client, - ) - ) - - -@cache -def create_artifacts_client(client_config: ClientConfig, http_client: HttpClient) -> SwaggerClientWrapper: - return SwaggerClientWrapper( - create_swagger_client( - build_operation_url(client_config.api_url, ARTIFACTS_SWAGGER_PATH), - http_client, - ) - ) diff --git a/src/neptune/internal/backends/hosted_file_operations.py b/src/neptune/internal/backends/hosted_file_operations.py deleted file mode 100644 index 0fee39f0f..000000000 --- a/src/neptune/internal/backends/hosted_file_operations.py +++ /dev/null @@ -1,534 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "upload_file_attribute", - "upload_file_set_attribute", - "get_unique_upload_entries", - "download_file_set_attribute", -] - -import collections -import enum -import json -import os -import time -from contextlib import ExitStack -from io import BytesIO -from typing import ( - AnyStr, - Dict, - Iterable, - List, - Optional, - Set, - Union, -) -from urllib.parse import urlencode - -from bravado.exception import ( - HTTPPaymentRequired, - HTTPUnprocessableEntity, -) -from bravado.requests_client import RequestsClient -from requests import ( - Request, - Response, -) - -from neptune.exceptions import ( - FileUploadError, - MetadataInconsistency, - NeptuneLimitExceedException, -) -from neptune.internal.backends.api_model import MultipartConfig -from neptune.internal.backends.swagger_client_wrapper import ( - ApiMethodWrapper, - SwaggerClientWrapper, -) -from neptune.internal.backends.utils import ( - build_operation_url, - construct_progress_bar, - handle_server_raw_response_messages, - with_api_exceptions_handler, -) -from neptune.internal.exceptions import ( - InternalClientError, - NeptuneException, - UploadedFileChanged, -) -from neptune.internal.hardware.constants import BYTES_IN_ONE_MB -from neptune.internal.storage import ( - AttributeUploadConfiguration, - FileChunk, - FileChunker, - UploadEntry, - compress_to_tar_gz_in_memory, - normalize_file_name, - scan_unique_upload_entries, - split_upload_files, -) -from neptune.internal.utils import ( - get_absolute_paths, - get_common_root, -) -from neptune.internal.utils.logger import get_logger -from neptune.typing import ProgressBarType - -logger = get_logger() -DEFAULT_CHUNK_SIZE = 5 * BYTES_IN_ONE_MB -DEFAULT_UPLOAD_CONFIG = AttributeUploadConfiguration(chunk_size=DEFAULT_CHUNK_SIZE) - - -class FileUploadTarget(enum.Enum): - FILE_ATOM = "file" - FILE_SET = "fileset" - - -def upload_file_attribute( - swagger_client: SwaggerClientWrapper, - container_id: str, - attribute: str, - source: Union[str, bytes], - ext: str, - multipart_config: Optional[MultipartConfig], -) -> List[NeptuneException]: - if isinstance(source, str) and not os.path.isfile(source): - return [FileUploadError(source, "Path not found or is a not a file.")] - - target = attribute - if ext: - target += "." + ext - - try: - upload_entry = UploadEntry(source if isinstance(source, str) else BytesIO(source), target) - _multichunk_upload_with_retry( - upload_entry, - query_params={ - "experimentIdentifier": container_id, - "attribute": attribute, - "ext": ext, - }, - swagger_client=swagger_client, - multipart_config=multipart_config, - target=FileUploadTarget.FILE_ATOM, - ) - except MetadataInconsistency as e: - return [e] - - -def upload_file_set_attribute( - swagger_client: SwaggerClientWrapper, - container_id: str, - attribute: str, - file_globs: Iterable[str], - reset: bool, - multipart_config: Optional[MultipartConfig], -) -> List[NeptuneException]: - unique_upload_entries = get_unique_upload_entries(file_globs) - - try: - upload_configuration = DEFAULT_UPLOAD_CONFIG - for package in split_upload_files( - upload_entries=unique_upload_entries, - upload_configuration=upload_configuration, - ): - if package.is_empty() and not reset: - continue - - uploading_multiple_entries = package.len > 1 - creating_a_single_empty_dir = ( - package.len == 1 and not package.items[0].is_stream() and os.path.isdir(package.items[0].source) - ) - - if uploading_multiple_entries or creating_a_single_empty_dir or package.is_empty(): - data = compress_to_tar_gz_in_memory(upload_entries=package.items) - url = build_operation_url( - swagger_client.swagger_spec.api_url, - swagger_client.api.uploadFileSetAttributeTar.operation.path_name, - ) - result = upload_raw_data( - http_client=swagger_client.swagger_spec.http_client, - url=url, - data=data, - headers={"Content-Type": "application/octet-stream"}, - query_params={ - "experimentId": container_id, - "attribute": attribute, - "reset": str(reset), - }, - ) - _attribute_upload_response_handler(result) - else: - upload_entry = package.items[0] - _multichunk_upload_with_retry( - upload_entry, - query_params={ - "experimentIdentifier": container_id, - "attribute": attribute, - "subPath": upload_entry.target_path, - }, - swagger_client=swagger_client, - multipart_config=multipart_config, - target=FileUploadTarget.FILE_SET, - ) - - reset = False - except MetadataInconsistency as e: - if len(e.args) == 1: - return [e] - else: - return [MetadataInconsistency(desc) for desc in e.args] - - -def get_unique_upload_entries(file_globs: Iterable[str]) -> Set[UploadEntry]: - absolute_paths = get_absolute_paths(file_globs) - common_root = get_common_root(absolute_paths) - - upload_entries: List[UploadEntry] = [] - if common_root is not None: - for absolute_path in absolute_paths: - upload_entries.append( - UploadEntry( - absolute_path, - normalize_file_name(os.path.relpath(absolute_path, common_root)), - ) - ) - else: - for absolute_path in absolute_paths: - upload_entries.append(UploadEntry(absolute_path, normalize_file_name(absolute_path))) - - return scan_unique_upload_entries(upload_entries) - - -def _attribute_upload_response_handler(result: bytes) -> None: - try: - parsed = json.loads(result) - except json.JSONDecodeError: - raise InternalClientError("Unexpected response from server: {}".format(result)) - - if isinstance(parsed, type(None)): - # old format with empty optional error - return - if isinstance(parsed, dict): - if "errorDescription" in parsed: - # old format with optional error - raise MetadataInconsistency(parsed["errorDescription"]) - elif "errors" in parsed: - # new format with a list of errors - error_list = parsed["errors"] - if isinstance(error_list, list): - if len(error_list) == 0: - return - try: - raise MetadataInconsistency(*[item["errorDescription"] for item in parsed["errors"]]) - except KeyError: - # fall into default InternalClientError - pass - - raise InternalClientError("Unexpected response from server: {}".format(result)) - - -MultipartUrlSet = collections.namedtuple("MultipartUrlSet", ["start_chunked", "finish_chunked", "send_chunk", "single"]) - -MULTIPART_URLS = { - FileUploadTarget.FILE_ATOM: MultipartUrlSet( - "fileAtomMultipartUploadStart", - "fileAtomMultipartUploadFinish", - "fileAtomMultipartUploadPart", - "fileAtomUpload", - ), - FileUploadTarget.FILE_SET: MultipartUrlSet( - "fileSetFileMultipartUploadStart", - "fileSetFileMultipartUploadFinish", - "fileSetFileMultipartUploadPart", - "fileSetFileUpload", - ), -} - - -def _build_multipart_urlset(swagger_client: SwaggerClientWrapper, target: FileUploadTarget) -> MultipartUrlSet: - urlnameset = MULTIPART_URLS[target] - return MultipartUrlSet( - start_chunked=with_api_exceptions_handler(getattr(swagger_client.api, urlnameset.start_chunked)), - finish_chunked=with_api_exceptions_handler(getattr(swagger_client.api, urlnameset.finish_chunked)), - send_chunk=build_operation_url( - swagger_client.swagger_spec.api_url, - getattr(swagger_client.api, urlnameset.send_chunk).operation.path_name, - ), - single=build_operation_url( - swagger_client.swagger_spec.api_url, - getattr(swagger_client.api, urlnameset.single).operation.path_name, - ), - ) - - -def _multichunk_upload_with_retry( - upload_entry: UploadEntry, - swagger_client: SwaggerClientWrapper, - query_params: dict, - multipart_config: Optional[MultipartConfig], - target: FileUploadTarget, -): - urlset = _build_multipart_urlset(swagger_client, target) - while True: - try: - return _multichunk_upload(upload_entry, swagger_client, query_params, multipart_config, urlset) - except UploadedFileChanged as e: - logger.error(str(e)) - - -def _multichunk_upload( - upload_entry: UploadEntry, - swagger_client: SwaggerClientWrapper, - query_params: dict, - multipart_config: Optional[MultipartConfig], - urlset: MultipartUrlSet, -): - if multipart_config is None: - multipart_config = MultipartConfig.get_default() - - file_stream = upload_entry.get_stream() - entry_length = upload_entry.length() - try: - if entry_length <= multipart_config.max_single_part_size: - # single upload - data = file_stream.read() - result = upload_raw_data( - http_client=swagger_client.swagger_spec.http_client, - url=urlset.single, - data=data, - query_params=query_params, - ) - _attribute_upload_response_handler(result) - else: - # chunked upload - result = urlset.start_chunked(**query_params, totalLength=entry_length).response().result - if result.errors: - raise MetadataInconsistency([err.errorDescription for err in result.errors]) - - no_ext_query_params = query_params.copy() - if "ext" in no_ext_query_params: - del no_ext_query_params["ext"] - - upload_id = result.uploadId - chunker = FileChunker( - None if upload_entry.is_stream() else upload_entry.source, - file_stream, - entry_length, - multipart_config, - ) - for idx, chunk in enumerate(chunker.generate()): - result = upload_raw_data( - http_client=swagger_client.swagger_spec.http_client, - url=urlset.send_chunk, - data=chunk.data, - headers={"X-Range": _build_x_range(chunk, entry_length)}, - query_params={ - "uploadId": upload_id, - "uploadPartIdx": idx, - **no_ext_query_params, - }, - ) - _attribute_upload_response_handler(result) - - result = urlset.finish_chunked(**no_ext_query_params, uploadId=upload_id).response().result - if result.errors: - raise MetadataInconsistency([err.errorDescription for err in result.errors]) - return [] - finally: - file_stream.close() - - -def _build_x_range(chunk: FileChunk, total_size: int) -> str: - return "bytes=%d-%d/%d" % ( - chunk.start, - chunk.end - 1, - total_size, - ) - - -@with_api_exceptions_handler -def upload_raw_data( - http_client: RequestsClient, - url: str, - data: AnyStr, - path_params: Optional[Dict[str, str]] = None, - query_params: Optional[Dict[str, str]] = None, - headers: Optional[Dict[str, str]] = None, -): - url = _generate_url(url=url, path_params=path_params, query_params=query_params) - - session = http_client.session - request = http_client.authenticator.apply(Request(method="POST", url=url, data=data, headers=headers)) - response = handle_server_raw_response_messages(session.send(session.prepare_request(request))) - - if response.status_code >= 300: - ApiMethodWrapper.handle_neptune_http_errors(response) - if response.status_code in ( - HTTPUnprocessableEntity.status_code, - HTTPPaymentRequired.status_code, - ): - raise NeptuneLimitExceedException(reason=response.json().get("title", "Unknown reason")) - response.raise_for_status() - - return response.content - - -def download_image_series_element( - swagger_client: SwaggerClientWrapper, - container_id: str, - attribute: str, - index: int, - destination: str, - progress_bar: Optional[ProgressBarType], -): - url = build_operation_url( - swagger_client.swagger_spec.api_url, - swagger_client.api.getImageSeriesValue.operation.path_name, - ) - response = _download_raw_data( - http_client=swagger_client.swagger_spec.http_client, - url=url, - headers={}, - query_params={ - "experimentId": container_id, - "attribute": attribute, - "index": index, - }, - ) - _store_response_as_file( - response, - os.path.join( - destination, - "{}.{}".format(index, response.headers["content-type"].split("/")[-1]), - ), - progress_bar=progress_bar, - ) - - -def download_file_attribute( - swagger_client: SwaggerClientWrapper, - container_id: str, - attribute: str, - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, -): - url = build_operation_url( - swagger_client.swagger_spec.api_url, - swagger_client.api.downloadAttribute.operation.path_name, - ) - response = _download_raw_data( - http_client=swagger_client.swagger_spec.http_client, - url=url, - headers={"Accept": "application/octet-stream"}, - query_params={"experimentId": container_id, "attribute": attribute}, - ) - _store_response_as_file(response, destination, progress_bar) - - -def download_file_set_attribute( - swagger_client: SwaggerClientWrapper, - download_id: str, - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, -): - download_url: Optional[str] = _get_download_url(swagger_client, download_id) - next_sleep = 0.5 - while download_url is None: - time.sleep(next_sleep) - next_sleep = min(2 * next_sleep, 5) - download_url = _get_download_url(swagger_client, download_id) - - response = _download_raw_data( - http_client=swagger_client.swagger_spec.http_client, - url=download_url, - headers={"Accept": "application/zip"}, - ) - _store_response_as_file(response, destination, progress_bar) - - -def _get_download_url(swagger_client: SwaggerClientWrapper, download_id: str): - params = {"id": download_id} - download_request = swagger_client.api.getDownloadPrepareRequest(**params).response().result - return download_request.downloadUrl - - -def _store_response_as_file( - response: Response, - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, -) -> None: - chunk_size = 1024 * 1024 - - if destination is None: - target_file = _get_content_disposition_filename(response) - elif os.path.isdir(destination): - target_file = os.path.join(destination, _get_content_disposition_filename(response)) - else: - target_file = destination - - if "content-length" in response.headers: - total_size = int(response.headers["content-length"]) - progress_bar = False if total_size < chunk_size else progress_bar # less than one chunk - else: - total_size = 0 - - # TODO: update syntax once py3.10 becomes min supported version (with (x(), y(), z()): ...) - with ExitStack() as stack: - bar = stack.enter_context(construct_progress_bar(progress_bar, "Fetching file...")) - response = stack.enter_context(response) - file_stream = stack.enter_context(open(target_file, "wb")) - - for chunk in response.iter_content(chunk_size=chunk_size): - if chunk: - file_stream.write(chunk) - bar.update(by=len(chunk), total=total_size) - - -def _get_content_disposition_filename(response: Response) -> str: - content_disposition = response.headers["Content-Disposition"] - return content_disposition[content_disposition.rfind("filename=") + 9 :].strip('"') - - -@with_api_exceptions_handler -def _download_raw_data( - http_client: RequestsClient, - url: str, - path_params: Optional[Dict[str, str]] = None, - query_params: Optional[Dict[str, str]] = None, - headers: Optional[Dict[str, str]] = None, -) -> Response: - url = _generate_url(url=url, path_params=path_params, query_params=query_params) - - session = http_client.session - request = http_client.authenticator.apply(Request(method="GET", url=url, headers=headers)) - - response = handle_server_raw_response_messages(session.send(session.prepare_request(request), stream=True)) - - response.raise_for_status() - return response - - -def _generate_url( - url: str, - path_params: Optional[Dict[str, str]] = None, - query_params: Optional[Dict[str, str]] = None, -) -> str: - for key, val in (path_params or dict()).items(): - url = url.replace("{" + key + "}", val) - if query_params: - url = url + "?" + urlencode(list(query_params.items())) - return url diff --git a/src/neptune/internal/backends/hosted_neptune_backend.py b/src/neptune/internal/backends/hosted_neptune_backend.py deleted file mode 100644 index 3135aebb4..000000000 --- a/src/neptune/internal/backends/hosted_neptune_backend.py +++ /dev/null @@ -1,1357 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["HostedNeptuneBackend"] - -import itertools -import os -import re -import typing -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Generator, - Iterable, - List, - Optional, - Tuple, - Union, -) - -from bravado.exception import ( - HTTPConflict, - HTTPNotFound, - HTTPPaymentRequired, - HTTPUnprocessableEntity, -) - -from neptune.api.models import ( - ArtifactField, - BoolField, - DateTimeField, - Field, - FieldDefinition, - FieldType, - FileEntry, - FileField, - FloatField, - FloatSeriesField, - FloatSeriesValues, - ImageSeriesValues, - IntField, - LeaderboardEntry, - NextPage, - QueryFieldDefinitionsResult, - QueryFieldsResult, - StringField, - StringSeriesField, - StringSeriesValues, - StringSetField, -) -from neptune.api.proto.neptune_pb.api.model.attributes_pb2 import ( - ProtoAttributesSearchResultDTO, - ProtoQueryAttributesResultDTO, -) -from neptune.api.proto.neptune_pb.api.model.leaderboard_entries_pb2 import ProtoAttributesDTO -from neptune.api.proto.neptune_pb.api.model.series_values_pb2 import ProtoFloatSeriesValuesDTO -from neptune.api.searching_entries import iter_over_pages -from neptune.core.components.operation_storage import OperationStorage -from neptune.envs import ( - NEPTUNE_FETCH_TABLE_STEP_SIZE, - NEPTUNE_USE_PROTOCOL_BUFFERS, -) -from neptune.exceptions import ( - AmbiguousProjectName, - ContainerUUIDNotFound, - FetchAttributeNotFoundException, - FileSetNotFound, - MetadataContainerNotFound, - MetadataInconsistency, - NeptuneFeatureNotAvailableException, - NeptuneLimitExceedException, - NeptuneObjectCreationConflict, - ProjectNotFound, - ProjectNotFoundWithSuggestions, -) -from neptune.internal.artifacts.types import ArtifactFileData -from neptune.internal.backends.api_model import ( - ApiExperiment, - OptionalFeatures, - Project, - Workspace, -) -from neptune.internal.backends.hosted_artifact_operations import ( - get_artifact_attribute, - list_artifact_files, - track_to_existing_artifact, - track_to_new_artifact, -) -from neptune.internal.backends.hosted_client import ( - DEFAULT_PROTO_REQUEST_KWARGS, - DEFAULT_REQUEST_KWARGS, - create_artifacts_client, - create_backend_client, - create_http_client_with_auth, - create_leaderboard_client, -) -from neptune.internal.backends.hosted_file_operations import ( - download_file_attribute, - download_file_set_attribute, - download_image_series_element, - upload_file_attribute, - upload_file_set_attribute, -) -from neptune.internal.backends.neptune_backend import NeptuneBackend -from neptune.internal.backends.nql import NQLQuery -from neptune.internal.backends.operation_api_name_visitor import OperationApiNameVisitor -from neptune.internal.backends.operation_api_object_converter import OperationApiObjectConverter -from neptune.internal.backends.operations_preprocessor import OperationsPreprocessor -from neptune.internal.backends.utils import ( - ExecuteOperationsBatchingManager, - MissingApiClient, - build_operation_url, - ssl_verify, - with_api_exceptions_handler, -) -from neptune.internal.container_type import ContainerType -from neptune.internal.credentials import Credentials -from neptune.internal.exceptions import ( - ClientHttpError, - InternalClientError, - NeptuneException, -) -from neptune.internal.id_formats import ( - QualifiedName, - UniqueId, -) -from neptune.internal.operation import ( - DeleteAttribute, - Operation, - TrackFilesToArtifact, - UploadFile, - UploadFileContent, - UploadFileSet, -) -from neptune.internal.utils import base64_decode -from neptune.internal.utils.generic_attribute_mapper import map_attribute_result_to_value -from neptune.internal.utils.git import GitInfo -from neptune.internal.utils.logger import get_logger -from neptune.internal.utils.paths import path_to_str -from neptune.internal.utils.patterns import PROJECT_QUALIFIED_NAME_PATTERN -from neptune.internal.warnings import ( - NeptuneWarning, - warn_once, -) -from neptune.internal.websockets.websockets_factory import WebsocketsFactory -from neptune.management.exceptions import ObjectNotFound -from neptune.typing import ProgressBarType -from neptune.version import version as neptune_version - -if TYPE_CHECKING: - from bravado.requests_client import RequestsClient - - from neptune.internal.backends.api_model import ClientConfig - - -_logger = get_logger() - -ATOMIC_ATTRIBUTE_TYPES = { - FieldType.INT.value, - FieldType.FLOAT.value, - FieldType.STRING.value, - FieldType.BOOL.value, - FieldType.DATETIME.value, - FieldType.OBJECT_STATE.value, -} - - -class HostedNeptuneBackend(NeptuneBackend): - def __init__(self, credentials: Credentials, proxies: Optional[Dict[str, str]] = None): - self.credentials = credentials - self.proxies = proxies - self.missing_features = [] - self.use_proto = os.getenv(NEPTUNE_USE_PROTOCOL_BUFFERS, "False").lower() in {"true", "1", "y"} - - http_client, client_config = create_http_client_with_auth( - credentials=credentials, ssl_verify=ssl_verify(), proxies=proxies - ) - self._http_client: "RequestsClient" = http_client - self._client_config: "ClientConfig" = client_config - - self.backend_client = create_backend_client(self._client_config, self._http_client) - self.leaderboard_client = create_leaderboard_client(self._client_config, self._http_client) - - if self._client_config.has_feature(OptionalFeatures.ARTIFACTS): - self.artifacts_client = create_artifacts_client(self._client_config, self._http_client) - else: - # create a stub - self.artifacts_client = MissingApiClient(OptionalFeatures.ARTIFACTS) - - def verify_feature_available(self, feature_name: str): - if not self._client_config.has_feature(feature_name): - raise NeptuneFeatureNotAvailableException(feature_name) - - def get_display_address(self) -> str: - return self._client_config.display_url - - def websockets_factory(self, project_id: str, run_id: str) -> Optional[WebsocketsFactory]: - base_url = re.sub(r"^http", "ws", self._client_config.api_url) - return WebsocketsFactory( - url=build_operation_url(base_url, f"/api/notifications/v1/runs/{project_id}/{run_id}/signal"), - session=self._http_client.authenticator.auth.session, - proxies=self.proxies, - ) - - @with_api_exceptions_handler - def get_project(self, project_id: QualifiedName) -> Project: - project_spec = re.search(PROJECT_QUALIFIED_NAME_PATTERN, project_id) - workspace, name = project_spec["workspace"], project_spec["project"] - - try: - if not workspace: - available_projects = list( - filter( - lambda p: p.name == name, - self.get_available_projects(search_term=name), - ) - ) - - if len(available_projects) == 1: - project = available_projects[0] - project_id = f"{project.workspace}/{project.name}" - elif len(available_projects) > 1: - raise AmbiguousProjectName(project_id=project_id, available_projects=available_projects) - else: - raise ProjectNotFoundWithSuggestions( - project_id=project_id, - available_projects=self.get_available_projects(), - available_workspaces=self.get_available_workspaces(), - ) - - response = self.backend_client.api.getProject( - projectIdentifier=project_id, - **DEFAULT_REQUEST_KWARGS, - ).response() - project = response.result - - return Project( - id=project.id, - name=project.name, - workspace=project.organizationName, - sys_id=project.projectKey, - ) - except HTTPNotFound: - available_workspaces = self.get_available_workspaces() - - if workspace and not list(filter(lambda aw: aw.name == workspace, available_workspaces)): - # Could not found specified workspace, forces listing all projects - raise ProjectNotFoundWithSuggestions( - project_id=project_id, - available_projects=self.get_available_projects(), - available_workspaces=available_workspaces, - ) - else: - raise ProjectNotFoundWithSuggestions( - project_id=project_id, - available_projects=self.get_available_projects(workspace_id=workspace), - ) - - @with_api_exceptions_handler - def get_available_projects( - self, workspace_id: Optional[str] = None, search_term: Optional[str] = None - ) -> List[Project]: - try: - response = self.backend_client.api.listProjects( - limit=5, - organizationIdentifier=workspace_id, - searchTerm=search_term, - sortBy=["lastViewed"], - sortDirection=["descending"], - userRelation="memberOrHigher", - **DEFAULT_REQUEST_KWARGS, - ).response() - projects = response.result.entries - return list( - map( - lambda project: Project( - id=project.id, - name=project.name, - workspace=project.organizationName, - sys_id=project.projectKey, - ), - projects, - ) - ) - except HTTPNotFound: - return [] - - @with_api_exceptions_handler - def get_available_workspaces(self) -> List[Workspace]: - try: - response = self.backend_client.api.listOrganizations( - **DEFAULT_REQUEST_KWARGS, - ).response() - workspaces = response.result - return list( - map( - lambda workspace: Workspace(id=workspace.id, name=workspace.name), - workspaces, - ) - ) - except HTTPNotFound: - return [] - - @with_api_exceptions_handler - def get_metadata_container( - self, - container_id: Union[UniqueId, QualifiedName], - expected_container_type: typing.Optional[ContainerType], - ) -> ApiExperiment: - try: - experiment = ( - self.leaderboard_client.api.getExperiment( - experimentId=container_id, - **DEFAULT_REQUEST_KWARGS, - ) - .response() - .result - ) - - if ( - expected_container_type is not None - and ContainerType.from_api(experiment.type) != expected_container_type - ): - raise MetadataContainerNotFound.of_container_type( - container_type=expected_container_type, container_id=container_id - ) - - return ApiExperiment.from_experiment(experiment) - except ObjectNotFound: - raise MetadataContainerNotFound.of_container_type( - container_type=expected_container_type, container_id=container_id - ) - - @with_api_exceptions_handler - def create_run( - self, - project_id: UniqueId, - git_info: Optional[GitInfo] = None, - custom_run_id: Optional[str] = None, - notebook_id: Optional[str] = None, - checkpoint_id: Optional[str] = None, - ) -> ApiExperiment: - - git_info_serialized = ( - { - "commit": { - "commitId": git_info.commit_id, - "message": git_info.message, - "authorName": git_info.author_name, - "authorEmail": git_info.author_email, - "commitDate": git_info.commit_date, - }, - "repositoryDirty": git_info.dirty, - "currentBranch": git_info.branch, - "remotes": git_info.remotes, - } - if git_info - else None - ) - - additional_params = { - "gitInfo": git_info_serialized, - "customId": custom_run_id, - } - - if notebook_id is not None and checkpoint_id is not None: - additional_params["notebookId"] = notebook_id if notebook_id is not None else None - additional_params["checkpointId"] = checkpoint_id if checkpoint_id is not None else None - - return self._create_experiment( - project_id=project_id, - parent_id=project_id, - container_type=ContainerType.RUN, - additional_params=additional_params, - ) - - @with_api_exceptions_handler - def create_model(self, project_id: UniqueId, key: str = "") -> ApiExperiment: - additional_params = { - "key": key, - } - - return self._create_experiment( - project_id=project_id, - parent_id=project_id, - container_type=ContainerType.MODEL, - additional_params=additional_params, - ) - - @with_api_exceptions_handler - def create_model_version(self, project_id: UniqueId, model_id: UniqueId) -> ApiExperiment: - return self._create_experiment( - project_id=project_id, - parent_id=model_id, - container_type=ContainerType.MODEL_VERSION, - ) - - def _create_experiment( - self, - project_id: UniqueId, - parent_id: UniqueId, - container_type: ContainerType, - additional_params: Optional[dict] = None, - ): - if additional_params is None: - additional_params = dict() - - params = { - "projectIdentifier": project_id, - "parentId": parent_id, - "type": container_type.to_api(), - "cliVersion": str(neptune_version), - **additional_params, - } - - kwargs = { - "experimentCreationParams": params, - "X-Neptune-CliVersion": str(neptune_version), - **DEFAULT_REQUEST_KWARGS, - } - - try: - experiment = self.leaderboard_client.api.createExperiment(**kwargs).response().result - return ApiExperiment.from_experiment(experiment) - except HTTPNotFound: - raise ProjectNotFound(project_id=project_id) - except HTTPConflict as e: - raise NeptuneObjectCreationConflict() from e - - @with_api_exceptions_handler - def create_checkpoint(self, notebook_id: str, jupyter_path: str) -> Optional[str]: - try: - return ( - self.leaderboard_client.api.createEmptyCheckpoint( - notebookId=notebook_id, - checkpoint={"path": jupyter_path}, - **DEFAULT_REQUEST_KWARGS, - ) - .response() - .result.id - ) - except HTTPNotFound: - return None - - @with_api_exceptions_handler - def ping(self, container_id: str, container_type: ContainerType): - request_kwargs = { - "_request_options": { - "timeout": 10, - "connect_timeout": 10, - } - } - try: - self.leaderboard_client.api.ping( - experimentId=container_id, - **request_kwargs, - ).response().result - except HTTPNotFound as e: - raise ContainerUUIDNotFound(container_id, container_type) from e - - def execute_operations( - self, - container_id: UniqueId, - container_type: ContainerType, - operations: List[Operation], - operation_storage: OperationStorage, - ) -> Tuple[int, List[NeptuneException]]: - errors = [] - - batching_mgr = ExecuteOperationsBatchingManager(self) - operations_batch = batching_mgr.get_batch(operations) - errors.extend(operations_batch.errors) - dropped_count = operations_batch.dropped_operations_count - - operations_preprocessor = OperationsPreprocessor() - operations_preprocessor.process(operations_batch.operations) - - preprocessed_operations = operations_preprocessor.get_operations() - errors.extend(preprocessed_operations.errors) - - if preprocessed_operations.artifact_operations: - self.verify_feature_available(OptionalFeatures.ARTIFACTS) - - # Upload operations should be done first since they are idempotent - errors.extend( - self._execute_upload_operations_with_400_retry( - container_id=container_id, - container_type=container_type, - upload_operations=preprocessed_operations.upload_operations, - operation_storage=operation_storage, - ) - ) - - ( - artifact_operations_errors, - assign_artifact_operations, - ) = self._execute_artifact_operations( - container_id=container_id, - container_type=container_type, - artifact_operations=preprocessed_operations.artifact_operations, - ) - - errors.extend(artifact_operations_errors) - - errors.extend( - self._execute_operations( - container_id, - container_type, - operations=assign_artifact_operations + preprocessed_operations.other_operations, - ) - ) - - for op in itertools.chain( - preprocessed_operations.upload_operations, - assign_artifact_operations, - preprocessed_operations.other_operations, - ): - op.clean(operation_storage=operation_storage) - - return ( - operations_preprocessor.processed_ops_count + dropped_count, - errors, - ) - - def _execute_upload_operations( - self, - container_id: str, - container_type: ContainerType, - upload_operations: List[Operation], - operation_storage: OperationStorage, - ) -> List[NeptuneException]: - errors = list() - - if self._client_config.has_feature(OptionalFeatures.MULTIPART_UPLOAD): - multipart_config = self._client_config.multipart_config - # collect delete operations and execute them - attributes_to_reset = [ - DeleteAttribute(op.path) for op in upload_operations if isinstance(op, UploadFileSet) and op.reset - ] - if attributes_to_reset: - errors.extend(self._execute_operations(container_id, container_type, operations=attributes_to_reset)) - else: - multipart_config = None - - for op in upload_operations: - if isinstance(op, UploadFile): - upload_errors = upload_file_attribute( - swagger_client=self.leaderboard_client, - container_id=container_id, - attribute=path_to_str(op.path), - source=op.get_absolute_path(operation_storage), - ext=op.ext, - multipart_config=multipart_config, - ) - if upload_errors: - errors.extend(upload_errors) - elif isinstance(op, UploadFileContent): - upload_errors = upload_file_attribute( - swagger_client=self.leaderboard_client, - container_id=container_id, - attribute=path_to_str(op.path), - source=base64_decode(op.file_content), - ext=op.ext, - multipart_config=multipart_config, - ) - if upload_errors: - errors.extend(upload_errors) - elif isinstance(op, UploadFileSet): - upload_errors = upload_file_set_attribute( - swagger_client=self.leaderboard_client, - container_id=container_id, - attribute=path_to_str(op.path), - file_globs=op.file_globs, - reset=op.reset, - multipart_config=multipart_config, - ) - if upload_errors: - errors.extend(upload_errors) - else: - raise InternalClientError("Upload operation in neither File or FileSet") - - return errors - - def _execute_upload_operations_with_400_retry( - self, - container_id: str, - container_type: ContainerType, - upload_operations: List[Operation], - operation_storage: OperationStorage, - ) -> List[NeptuneException]: - while True: - try: - return self._execute_upload_operations( - container_id, container_type, upload_operations, operation_storage - ) - except ClientHttpError as ex: - if "Length of stream does not match given range" not in ex.response: - raise ex - - @with_api_exceptions_handler - def _execute_artifact_operations( - self, - container_id: str, - container_type: ContainerType, - artifact_operations: List[TrackFilesToArtifact], - ) -> Tuple[List[Optional[NeptuneException]], List[Optional[Operation]]]: - errors = list() - assign_operations = list() - - has_hash_exclude_metadata = self._client_config.has_feature(OptionalFeatures.ARTIFACTS_HASH_EXCLUDE_METADATA) - has_exclude_directories = self._client_config.has_feature(OptionalFeatures.ARTIFACTS_EXCLUDE_DIRECTORY_FILES) - - for op in artifact_operations: - try: - artifact_hash = self.get_artifact_attribute(container_id, container_type, op.path).hash - except FetchAttributeNotFoundException: - artifact_hash = None - - try: - if artifact_hash is None: - assign_operation = track_to_new_artifact( - swagger_client=self.artifacts_client, - project_id=op.project_id, - path=op.path, - parent_identifier=container_id, - entries=op.entries, - default_request_params=DEFAULT_REQUEST_KWARGS, - exclude_directory_files=has_exclude_directories, - exclude_metadata_from_hash=has_hash_exclude_metadata, - ) - else: - assign_operation = track_to_existing_artifact( - swagger_client=self.artifacts_client, - project_id=op.project_id, - path=op.path, - artifact_hash=artifact_hash, - parent_identifier=container_id, - entries=op.entries, - default_request_params=DEFAULT_REQUEST_KWARGS, - exclude_directory_files=has_exclude_directories, - ) - - if assign_operation: - assign_operations.append(assign_operation) - except NeptuneException as error: - errors.append(error) - - return errors, assign_operations - - @with_api_exceptions_handler - def _execute_operations( - self, - container_id: UniqueId, - container_type: ContainerType, - operations: List[Operation], - ) -> List[MetadataInconsistency]: - kwargs = { - "experimentId": container_id, - "operations": [ - { - "path": path_to_str(op.path), - OperationApiNameVisitor().visit(op): OperationApiObjectConverter().convert(op), - } - for op in operations - ], - **DEFAULT_REQUEST_KWARGS, - } - - try: - result = self.leaderboard_client.api.executeOperations(**kwargs).response().result - return [MetadataInconsistency(err.errorDescription) for err in result] - except HTTPNotFound as e: - raise ContainerUUIDNotFound(container_id, container_type) from e - except (HTTPPaymentRequired, HTTPUnprocessableEntity) as e: - raise NeptuneLimitExceedException(reason=e.response.json().get("title", "Unknown reason")) from e - - @with_api_exceptions_handler - def get_attributes(self, container_id: str, container_type: ContainerType) -> List[FieldDefinition]: - params = { - "experimentId": container_id, - **DEFAULT_REQUEST_KWARGS, - } - try: - experiment = self.leaderboard_client.api.getExperimentAttributes(**params).response().result - - attribute_type_names = [at.value for at in FieldType] - accepted_attributes = [attr for attr in experiment.attributes if attr.type in attribute_type_names] - - # Notify about ignored attrs - ignored_attributes = set(attr.type for attr in experiment.attributes) - set( - attr.type for attr in accepted_attributes - ) - if ignored_attributes: - _logger.warning( - "Ignored following attributes (unknown type): %s.\n" "Try to upgrade `neptune`.", - ignored_attributes, - ) - - return [ - FieldDefinition.from_model(field) for field in accepted_attributes if field.type in attribute_type_names - ] - except HTTPNotFound as e: - raise ContainerUUIDNotFound( - container_id=container_id, - container_type=container_type, - ) from e - - def download_file_series_by_index( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - index: int, - destination: str, - progress_bar: Optional[ProgressBarType], - ): - try: - download_image_series_element( - swagger_client=self.leaderboard_client, - container_id=container_id, - attribute=path_to_str(path), - index=index, - destination=destination, - progress_bar=progress_bar, - ) - except ClientHttpError as e: - if e.status == HTTPNotFound.status_code: - raise FetchAttributeNotFoundException(path_to_str(path)) - else: - raise - - def download_file( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, - ): - try: - download_file_attribute( - swagger_client=self.leaderboard_client, - container_id=container_id, - attribute=path_to_str(path), - destination=destination, - progress_bar=progress_bar, - ) - except ClientHttpError as e: - if e.status == HTTPNotFound.status_code: - raise FetchAttributeNotFoundException(path_to_str(path)) - else: - raise - - def download_file_set( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, - ): - download_request = self._get_file_set_download_request(container_id, container_type, path) - try: - download_file_set_attribute( - swagger_client=self.leaderboard_client, - download_id=download_request.id, - destination=destination, - progress_bar=progress_bar, - ) - except ClientHttpError as e: - if e.status == HTTPNotFound.status_code: - raise FetchAttributeNotFoundException(path_to_str(path)) - else: - raise - - @with_api_exceptions_handler - def get_float_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> FloatField: - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - **DEFAULT_REQUEST_KWARGS, - } - try: - result = self.leaderboard_client.api.getFloatAttribute(**params).response().result - return FloatField.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def get_int_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> IntField: - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - **DEFAULT_REQUEST_KWARGS, - } - try: - result = self.leaderboard_client.api.getIntAttribute(**params).response().result - return IntField.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def get_bool_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> BoolField: - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - **DEFAULT_REQUEST_KWARGS, - } - try: - result = self.leaderboard_client.api.getBoolAttribute(**params).response().result - return BoolField.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def get_file_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> FileField: - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - **DEFAULT_REQUEST_KWARGS, - } - try: - result = self.leaderboard_client.api.getFileAttribute(**params).response().result - return FileField.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def get_string_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> StringField: - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - **DEFAULT_REQUEST_KWARGS, - } - try: - result = self.leaderboard_client.api.getStringAttribute(**params).response().result - return StringField.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def get_datetime_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> DateTimeField: - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - **DEFAULT_REQUEST_KWARGS, - } - try: - result = self.leaderboard_client.api.getDatetimeAttribute(**params).response().result - return DateTimeField.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - def get_artifact_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> ArtifactField: - return get_artifact_attribute( - swagger_client=self.leaderboard_client, - parent_identifier=container_id, - path=path, - default_request_params=DEFAULT_REQUEST_KWARGS, - ) - - def list_artifact_files(self, project_id: str, artifact_hash: str) -> List[ArtifactFileData]: - return list_artifact_files( - swagger_client=self.artifacts_client, - project_id=project_id, - artifact_hash=artifact_hash, - default_request_params=DEFAULT_REQUEST_KWARGS, - ) - - @with_api_exceptions_handler - def list_fileset_files(self, attribute: List[str], container_id: str, path: str) -> List[FileEntry]: - attribute = path_to_str(attribute) - try: - entries = ( - self.leaderboard_client.api.lsFileSetAttribute( - attribute=attribute, path=path, experimentId=container_id, **DEFAULT_REQUEST_KWARGS - ) - .response() - .result - ) - return [FileEntry.from_dto(entry) for entry in entries] - except HTTPNotFound: - raise FileSetNotFound(attribute, path) - - @with_api_exceptions_handler - def get_float_series_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> FloatSeriesField: - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - **DEFAULT_REQUEST_KWARGS, - } - try: - result = self.leaderboard_client.api.getFloatSeriesAttribute(**params).response().result - return FloatSeriesField.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def get_string_series_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> StringSeriesField: - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - **DEFAULT_REQUEST_KWARGS, - } - try: - result = self.leaderboard_client.api.getStringSeriesAttribute(**params).response().result - return StringSeriesField.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def get_string_set_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> StringSetField: - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - **DEFAULT_REQUEST_KWARGS, - } - try: - result = self.leaderboard_client.api.getStringSetAttribute(**params).response().result - return StringSetField.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def get_image_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - offset: int, - limit: int, - ) -> ImageSeriesValues: - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - "limit": limit, - "offset": offset, - **DEFAULT_REQUEST_KWARGS, - } - try: - result = self.leaderboard_client.api.getImageSeriesValues(**params).response().result - return ImageSeriesValues.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def get_string_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - limit: int, - from_step: Optional[float] = None, - ) -> StringSeriesValues: - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - "limit": limit, - "skipToStep": from_step, - **DEFAULT_REQUEST_KWARGS, - } - try: - result = self.leaderboard_client.api.getStringSeriesValues(**params).response().result - return StringSeriesValues.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def get_float_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - limit: int, - from_step: Optional[float] = None, - use_proto: Optional[bool] = None, - include_inherited: bool = True, - ) -> FloatSeriesValues: - use_proto = use_proto if use_proto is not None else self.use_proto - - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - "limit": limit, - "skipToStep": from_step, - } - - if not include_inherited: - params["lineage"] = "NONE" - - try: - if use_proto: - result = ( - self.leaderboard_client.api.getFloatSeriesValuesProto( - **params, - **DEFAULT_PROTO_REQUEST_KWARGS, - ) - .response() - .result - ) - data = ProtoFloatSeriesValuesDTO.FromString(result) - return FloatSeriesValues.from_proto(data) - else: - result = ( - self.leaderboard_client.api.getFloatSeriesValues( - **params, - **DEFAULT_REQUEST_KWARGS, - ) - .response() - .result - ) - return FloatSeriesValues.from_model(result) - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def fetch_atom_attribute_values( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> List[Tuple[str, FieldType, Any]]: - params = { - "experimentId": container_id, - } - try: - namespace_prefix = path_to_str(path) - if namespace_prefix: - # don't want to catch "ns/attribute/other" while looking for "ns/attr" - namespace_prefix += "/" - result = self.leaderboard_client.api.getExperimentAttributes(**params).response().result - return [ - (attr.name, attr.type, map_attribute_result_to_value(attr)) - for attr in result.attributes - if attr.name.startswith(namespace_prefix) - ] - except HTTPNotFound as e: - raise ContainerUUIDNotFound(container_id, container_type) from e - - @with_api_exceptions_handler - def _get_file_set_download_request(self, container_id: str, container_type: ContainerType, path: List[str]): - params = { - "experimentId": container_id, - "attribute": path_to_str(path), - **DEFAULT_REQUEST_KWARGS, - } - try: - return self.leaderboard_client.api.prepareForDownloadFileSetAttributeZip(**params).response().result - except HTTPNotFound: - raise FetchAttributeNotFoundException(path_to_str(path)) - - @with_api_exceptions_handler - def _get_column_types(self, project_id: UniqueId, column: str) -> List[Any]: - params = { - "projectIdentifier": project_id, - "query": { - "attributeNameFilter": {"mustMatchRegexes": [column]}, - }, - **DEFAULT_REQUEST_KWARGS, - } - try: - return ( - self.leaderboard_client.api.queryAttributeDefinitionsWithinProject(**params).response().result.entries - ) - except HTTPNotFound as e: - raise ProjectNotFound(project_id=project_id) from e - - @with_api_exceptions_handler - def search_leaderboard_entries( - self, - project_id: UniqueId, - types: Optional[Iterable[ContainerType]] = None, - query: Optional[NQLQuery] = None, - columns: Optional[Iterable[str]] = None, - limit: Optional[int] = None, - sort_by: str = "sys/creation_time", - ascending: bool = False, - progress_bar: Optional[ProgressBarType] = None, - step_size: Optional[int] = None, - use_proto: Optional[bool] = None, - ) -> Generator[LeaderboardEntry, None, None]: - use_proto = use_proto if use_proto is not None else self.use_proto - default_step_size = step_size or int(os.getenv(NEPTUNE_FETCH_TABLE_STEP_SIZE, "100")) - - step_size = min(default_step_size, limit) if limit else default_step_size - - columns = set(columns) | {sort_by} if columns else {sort_by} - - types_filter = list(map(lambda container_type: container_type.to_api(), types)) if types else None - attributes_filter = {"attributeFilters": [{"path": column} for column in columns]} if columns else {} - - if sort_by == "sys/creation_time": - sort_by_column_type = FieldType.DATETIME.value - elif sort_by == "sys/id": - sort_by_column_type = FieldType.STRING.value - else: - sort_by_column_type_candidates = self._get_column_types(project_id, sort_by) - sort_by_column_type = _get_column_type_from_entries(sort_by_column_type_candidates, sort_by) - - try: - return iter_over_pages( - client=self.leaderboard_client, - project_id=project_id, - types=types_filter, - query=query, - attributes_filter=attributes_filter, - step_size=step_size, - limit=limit, - sort_by=sort_by, - ascending=ascending, - sort_by_column_type=sort_by_column_type, - progress_bar=progress_bar, - use_proto=use_proto, - ) - except HTTPNotFound: - raise ProjectNotFound(project_id) - - def get_run_url(self, run_id: str, workspace: str, project_name: str, sys_id: str) -> str: - base_url = self.get_display_address() - return f"{base_url}/{workspace}/{project_name}/e/{sys_id}" - - def get_project_url(self, project_id: str, workspace: str, project_name: str) -> str: - base_url = self.get_display_address() - return f"{base_url}/{workspace}/{project_name}/" - - def get_model_url(self, model_id: str, workspace: str, project_name: str, sys_id: str) -> str: - base_url = self.get_display_address() - return f"{base_url}/{workspace}/{project_name}/m/{sys_id}" - - def get_model_version_url( - self, - model_version_id: str, - model_id: str, - workspace: str, - project_name: str, - sys_id: str, - ) -> str: - base_url = self.get_display_address() - return f"{base_url}/{workspace}/{project_name}/m/{model_id}/v/{sys_id}" - - def query_fields_definitions_within_project( - self, - project_id: QualifiedName, - field_name_regex: Optional[str] = None, - experiment_ids_filter: Optional[List[str]] = None, - next_page: Optional[NextPage] = None, - ) -> QueryFieldDefinitionsResult: - pagination = {"nextPage": next_page.to_dto()} if next_page else {} - params = { - "projectIdentifier": project_id, - "query": { - **pagination, - "experimentIdsFilter": experiment_ids_filter, - "attributeNameRegex": field_name_regex, - }, - } - - try: - data = ( - self.leaderboard_client.api.queryAttributeDefinitionsWithinProject( - **params, - **DEFAULT_REQUEST_KWARGS, - ) - .response() - .result - ) - return QueryFieldDefinitionsResult.from_model(data) - except HTTPNotFound: - raise ProjectNotFound(project_id=project_id) - - def query_fields_within_project( - self, - project_id: QualifiedName, - field_name_regex: Optional[str] = None, - field_names_filter: Optional[List[str]] = None, - experiment_ids_filter: Optional[List[str]] = None, - experiment_names_filter: Optional[List[str]] = None, - next_page: Optional[NextPage] = None, - use_proto: Optional[bool] = None, - ) -> QueryFieldsResult: - use_proto = use_proto if use_proto is not None else self.use_proto - - query = { - "experimentIdsFilter": experiment_ids_filter or None, - "experimentNamesFilter": experiment_names_filter or None, - "nextPage": next_page.to_dto() if next_page else None, - } - - # If we are provided with both explicit column names, and a regex, - # we need to paste together all of them into a single regex (with OR between terms) - if field_name_regex: - terms = [field_name_regex] - - if field_names_filter: - # Make sure we don't pass too broad regex for explicit column names - terms += [f"^{name}$" for name in field_names_filter] - - regex = "|".join(terms) - query["attributeNameFilter"] = {"mustMatchRegexes": [regex]} - elif field_names_filter: - query["attributeNamesFilter"] = field_names_filter - - params = {"projectIdentifier": project_id, "query": query} - - try: - if use_proto: - result = ( - self.leaderboard_client.api.queryAttributesWithinProjectProto( - **params, - **DEFAULT_PROTO_REQUEST_KWARGS, - ) - .response() - .result - ) - data = ProtoQueryAttributesResultDTO.FromString(result) - return QueryFieldsResult.from_proto(data) - else: - data = ( - self.leaderboard_client.api.queryAttributesWithinProject( - **params, - **DEFAULT_REQUEST_KWARGS, - ) - .response() - .result - ) - return QueryFieldsResult.from_model(data) - except HTTPNotFound: - raise ProjectNotFound(project_id=project_id) - - def get_fields_definitions( - self, - container_id: str, - container_type: ContainerType, - use_proto: Optional[bool] = None, - ) -> List[FieldDefinition]: - use_proto = use_proto if use_proto is not None else self.use_proto - - params = { - "experimentIdentifier": container_id, - } - - try: - if use_proto: - result = ( - self.leaderboard_client.api.queryAttributeDefinitionsProto( - **params, - **DEFAULT_PROTO_REQUEST_KWARGS, - ) - .response() - .result - ) - data = ProtoAttributesSearchResultDTO.FromString(result) - return [FieldDefinition.from_proto(field_def) for field_def in data.entries] - else: - data = ( - self.leaderboard_client.api.queryAttributeDefinitions( - **params, - **DEFAULT_REQUEST_KWARGS, - ) - .response() - .result - ) - return [FieldDefinition.from_model(field_def) for field_def in data.entries] - except HTTPNotFound as e: - raise ContainerUUIDNotFound( - container_id=container_id, - container_type=container_type, - ) from e - - def get_fields_with_paths_filter( - self, container_id: str, container_type: ContainerType, paths: List[str], use_proto: Optional[bool] = None - ) -> List[Field]: - use_proto = use_proto if use_proto is not None else self.use_proto - - params = { - "holderIdentifier": container_id, - "holderType": "experiment", - "attributeQuery": { - "attributePathsFilter": paths, - }, - } - - try: - if use_proto: - result = ( - self.leaderboard_client.api.getAttributesWithPathsFilterProto( - **params, - **DEFAULT_PROTO_REQUEST_KWARGS, - ) - .response() - .result - ) - data = ProtoAttributesDTO.FromString(result) - return [Field.from_proto(field) for field in data.attributes] - else: - data = ( - self.leaderboard_client.api.getAttributesWithPathsFilter( - **params, - **DEFAULT_REQUEST_KWARGS, - ) - .response() - .result - ) - return [Field.from_model(field) for field in data.attributes] - except HTTPNotFound as e: - raise ContainerUUIDNotFound( - container_id=container_id, - container_type=container_type, - ) from e - - -def _get_column_type_from_entries(entries: List[Any], column: str) -> str: - if not entries: # column chosen is not present in the table - raise ValueError(f"Column '{column}' chosen for sorting is not present in the table") - - if len(entries) == 1 and entries[0].name == column: - return entries[0].type - - types = set() - for entry in entries: - if entry.name != column: # caught by regex, but it's not this column - continue - if entry.type not in ATOMIC_ATTRIBUTE_TYPES: # non-atomic type - no need to look further - raise ValueError( - f"Column {column} used for sorting is a complex type. For more, " - f"see https://docs.neptune.ai/api/field_types/#simple-types" - ) - types.add(entry.type) - - if types == {FieldType.INT.value, FieldType.FLOAT.value}: - return FieldType.FLOAT.value - - warn_once( - f"Column {column} contains more than one simple data type. Sorting result might be inaccurate.", - exception=NeptuneWarning, - ) - return FieldType.STRING.value diff --git a/src/neptune/internal/backends/neptune_backend.py b/src/neptune/internal/backends/neptune_backend.py deleted file mode 100644 index 578a003ca..000000000 --- a/src/neptune/internal/backends/neptune_backend.py +++ /dev/null @@ -1,361 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["NeptuneBackend"] - -import abc -from typing import ( - Any, - Generator, - List, - Optional, - Tuple, - Union, -) - -from neptune.api.models import ( - ArtifactField, - BoolField, - DateTimeField, - Field, - FieldDefinition, - FieldType, - FileEntry, - FileField, - FloatField, - FloatSeriesField, - FloatSeriesValues, - ImageSeriesValues, - IntField, - LeaderboardEntry, - NextPage, - QueryFieldDefinitionsResult, - QueryFieldsResult, - StringField, - StringSeriesField, - StringSeriesValues, - StringSetField, -) -from neptune.core.components.operation_storage import OperationStorage -from neptune.internal.artifacts.types import ArtifactFileData -from neptune.internal.backends.api_model import ( - ApiExperiment, - Project, - Workspace, -) -from neptune.internal.backends.nql import NQLQuery -from neptune.internal.container_type import ContainerType -from neptune.internal.exceptions import NeptuneException -from neptune.internal.id_formats import ( - QualifiedName, - UniqueId, -) -from neptune.internal.operation import Operation -from neptune.internal.utils.git import GitInfo -from neptune.internal.websockets.websockets_factory import WebsocketsFactory -from neptune.typing import ProgressBarType - - -class NeptuneBackend: - def close(self) -> None: - """No need for closing implementation""" - - @abc.abstractmethod - def get_display_address(self) -> str: - pass - - def verify_feature_available(self, _: str) -> None: - """ - this method makes sense only for backends interacting with server; - it makes sure that a feature is supported in the backend version client interacts with - """ - - def websockets_factory(self, project_id: str, run_id: str) -> Optional[WebsocketsFactory]: - return None - - @abc.abstractmethod - def get_project(self, project_id: QualifiedName) -> Project: - pass - - @abc.abstractmethod - def get_available_projects( - self, workspace_id: Optional[str] = None, search_term: Optional[str] = None - ) -> List[Project]: - pass - - @abc.abstractmethod - def get_available_workspaces(self) -> List[Workspace]: - pass - - @abc.abstractmethod - def create_run( - self, - project_id: UniqueId, - git_info: Optional[GitInfo] = None, - custom_run_id: Optional[str] = None, - notebook_id: Optional[str] = None, - checkpoint_id: Optional[str] = None, - ) -> ApiExperiment: - pass - - @abc.abstractmethod - def create_model( - self, - project_id: UniqueId, - key: str, - ) -> ApiExperiment: - pass - - @abc.abstractmethod - def create_model_version( - self, - project_id: UniqueId, - model_id: UniqueId, - ) -> ApiExperiment: - pass - - @abc.abstractmethod - def get_metadata_container( - self, - container_id: Union[UniqueId, QualifiedName], - expected_container_type: Optional[ContainerType], - ) -> ApiExperiment: - pass - - @abc.abstractmethod - def create_checkpoint(self, notebook_id: str, jupyter_path: str) -> Optional[str]: - pass - - def ping(self, container_id: str, container_type: ContainerType): - """Do nothing by default""" - - @abc.abstractmethod - def execute_operations( - self, - container_id: UniqueId, - container_type: ContainerType, - operations: List[Operation], - operation_storage: OperationStorage, - ) -> Tuple[int, List[NeptuneException]]: - pass - - @abc.abstractmethod - def get_attributes(self, container_id: str, container_type: ContainerType) -> List[FieldDefinition]: - pass - - @abc.abstractmethod - def download_file( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, - ): - pass - - @abc.abstractmethod - def download_file_set( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, - ): - pass - - @abc.abstractmethod - def get_float_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> FloatField: - pass - - @abc.abstractmethod - def get_int_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> IntField: - pass - - @abc.abstractmethod - def get_bool_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> BoolField: - pass - - @abc.abstractmethod - def get_file_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> FileField: - pass - - @abc.abstractmethod - def get_string_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> StringField: - pass - - @abc.abstractmethod - def get_datetime_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> DateTimeField: - pass - - @abc.abstractmethod - def get_artifact_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> ArtifactField: - pass - - @abc.abstractmethod - def list_artifact_files(self, project_id: str, artifact_hash: str) -> List[ArtifactFileData]: - pass - - @abc.abstractmethod - def get_float_series_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> FloatSeriesField: - pass - - @abc.abstractmethod - def get_string_series_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> StringSeriesField: - pass - - @abc.abstractmethod - def get_string_set_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> StringSetField: - pass - - @abc.abstractmethod - def download_file_series_by_index( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - index: int, - destination: str, - progress_bar: Optional[ProgressBarType], - ): - pass - - @abc.abstractmethod - def get_image_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - offset: int, - limit: int, - ) -> ImageSeriesValues: - pass - - @abc.abstractmethod - def get_string_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - limit: int, - from_step: Optional[float] = None, - ) -> StringSeriesValues: ... - - @abc.abstractmethod - def get_float_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - limit: int, - from_step: Optional[float] = None, - use_proto: Optional[bool] = None, - include_inherited: bool = True, - ) -> FloatSeriesValues: ... - - @abc.abstractmethod - def get_run_url(self, run_id: str, workspace: str, project_name: str, sys_id: str) -> str: - pass - - @abc.abstractmethod - def get_project_url(self, project_id: str, workspace: str, project_name: str) -> str: - pass - - @abc.abstractmethod - def get_model_url(self, model_id: str, workspace: str, project_name: str, sys_id: str) -> str: - pass - - @abc.abstractmethod - def get_model_version_url( - self, - model_version_id: str, - model_id: str, - workspace: str, - project_name: str, - sys_id: str, - ) -> str: - pass - - # WARN: Used in Neptune Fetcher - @abc.abstractmethod - def get_fields_definitions( - self, - container_id: str, - container_type: ContainerType, - use_proto: Optional[bool] = None, - ) -> List[FieldDefinition]: ... - - # WARN: Used in Neptune Fetcher - @abc.abstractmethod - def get_fields_with_paths_filter( - self, container_id: str, container_type: ContainerType, paths: List[str], use_proto: Optional[bool] = None - ) -> List[Field]: ... - - @abc.abstractmethod - def fetch_atom_attribute_values( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> List[Tuple[str, FieldType, Any]]: - pass - - @abc.abstractmethod - def search_leaderboard_entries( - self, - project_id: UniqueId, - types: Optional[List[ContainerType]] = None, - query: Optional[NQLQuery] = None, - columns: Optional[List[str]] = None, - limit: Optional[int] = None, - sort_by: str = "sys/creation_time", - ascending: bool = False, - progress_bar: Optional[ProgressBarType] = None, - use_proto: Optional[bool] = None, - ) -> Generator[LeaderboardEntry, None, None]: - pass - - @abc.abstractmethod - def list_fileset_files(self, attribute: List[str], container_id: str, path: str) -> List[FileEntry]: - pass - - @abc.abstractmethod - def query_fields_definitions_within_project( - self, - project_id: QualifiedName, - field_name_regex: Optional[str] = None, - experiment_ids_filter: Optional[List[str]] = None, - next_page: Optional[NextPage] = None, - ) -> QueryFieldDefinitionsResult: ... - - @abc.abstractmethod - def query_fields_within_project( - self, - project_id: QualifiedName, - field_name_regex: Optional[str] = None, - field_names_filter: Optional[List[str]] = None, - experiment_ids_filter: Optional[List[str]] = None, - experiment_names_filter: Optional[List[str]] = None, - next_page: Optional[NextPage] = None, - use_proto: Optional[bool] = None, - ) -> QueryFieldsResult: ... diff --git a/src/neptune/internal/backends/neptune_backend_mock.py b/src/neptune/internal/backends/neptune_backend_mock.py deleted file mode 100644 index 9943532ff..000000000 --- a/src/neptune/internal/backends/neptune_backend_mock.py +++ /dev/null @@ -1,836 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["NeptuneBackendMock"] - -import os -import uuid -from collections import defaultdict -from datetime import datetime -from shutil import copyfile -from typing import ( - Any, - Dict, - Generator, - Iterable, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, -) -from zipfile import ZipFile - -from neptune.api.models import ( - ArtifactField, - BoolField, - DateTimeField, - Field, - FieldDefinition, - FieldType, - FileEntry, - FileField, - FloatField, - FloatPointValue, - FloatSeriesField, - FloatSeriesValues, - ImageSeriesValues, - IntField, - LeaderboardEntry, - NextPage, - QueryFieldDefinitionsResult, - QueryFieldsResult, - StringField, - StringPointValue, - StringSeriesField, - StringSeriesValues, - StringSetField, -) -from neptune.core.components.operation_storage import OperationStorage -from neptune.exceptions import ( - ContainerUUIDNotFound, - MetadataInconsistency, - ModelVersionNotFound, - ProjectNotFound, - RunNotFound, -) -from neptune.internal.artifacts.types import ArtifactFileData -from neptune.internal.backends.api_model import ( - ApiExperiment, - Project, - Workspace, -) -from neptune.internal.backends.hosted_file_operations import get_unique_upload_entries -from neptune.internal.backends.neptune_backend import NeptuneBackend -from neptune.internal.backends.nql import NQLQuery -from neptune.internal.container_structure import ContainerStructure -from neptune.internal.container_type import ContainerType -from neptune.internal.exceptions import ( - InternalClientError, - NeptuneException, -) -from neptune.internal.id_formats import ( - QualifiedName, - SysId, - UniqueId, -) -from neptune.internal.operation import ( - AddStrings, - AssignArtifact, - AssignBool, - AssignDatetime, - AssignFloat, - AssignInt, - AssignString, - ClearArtifact, - ClearFloatLog, - ClearImageLog, - ClearStringLog, - ClearStringSet, - ConfigFloatSeries, - CopyAttribute, - DeleteAttribute, - DeleteFiles, - LogFloats, - LogImages, - LogStrings, - Operation, - RemoveStrings, - TrackFilesToArtifact, - UploadFile, - UploadFileContent, - UploadFileSet, -) -from neptune.internal.operation_visitor import OperationVisitor -from neptune.internal.types.file_types import FileType -from neptune.internal.utils import base64_decode -from neptune.internal.utils.generic_attribute_mapper import NoValue -from neptune.internal.utils.git import GitInfo -from neptune.internal.utils.paths import path_to_str -from neptune.types import ( - Boolean, - Integer, -) -from neptune.types.atoms import GitRef -from neptune.types.atoms.artifact import Artifact -from neptune.types.atoms.datetime import Datetime -from neptune.types.atoms.file import File -from neptune.types.atoms.float import Float -from neptune.types.atoms.string import String -from neptune.types.file_set import FileSet -from neptune.types.namespace import Namespace -from neptune.types.series.file_series import FileSeries -from neptune.types.series.float_series import FloatSeries -from neptune.types.series.string_series import StringSeries -from neptune.types.sets.string_set import StringSet -from neptune.types.value import Value -from neptune.types.value_visitor import ValueVisitor -from neptune.typing import ProgressBarType - -Val = TypeVar("Val", bound=Value) - - -class NeptuneBackendMock(NeptuneBackend): - WORKSPACE_NAME = "mock-workspace" - PROJECT_NAME = "project-placeholder" - PROJECT_KEY = SysId("OFFLINE") - MODEL_SYS_ID = SysId("OFFLINE-MOD") - - def __init__(self, credentials=None, proxies=None): - self._project_id: UniqueId = UniqueId(str(uuid.uuid4())) - self._containers: Dict[(UniqueId, ContainerType), ContainerStructure[Value, dict]] = dict() - self._next_run = 1 # counter for runs - self._next_model_version = defaultdict(lambda: 1) # counter for model versions - self._artifacts: Dict[Tuple[str, str], List[ArtifactFileData]] = dict() - self._attribute_type_converter_value_visitor = self.AttributeTypeConverterValueVisitor() - self._create_container(self._project_id, ContainerType.PROJECT, self.PROJECT_KEY) - - def get_display_address(self) -> str: - return "OFFLINE" - - def get_available_projects( - self, workspace_id: Optional[str] = None, search_term: Optional[str] = None - ) -> List[Project]: - return [ - Project( - id=UniqueId(str(uuid.uuid4())), - name=self.PROJECT_NAME, - workspace=self.WORKSPACE_NAME, - sys_id=self.PROJECT_KEY, - ) - ] - - def get_available_workspaces(self) -> List[Workspace]: - return [Workspace(id=UniqueId(str(uuid.uuid4())), name=self.WORKSPACE_NAME)] - - def _create_container(self, container_id: UniqueId, container_type: ContainerType, sys_id: SysId): - container = self._containers.setdefault((container_id, container_type), ContainerStructure[Value, dict]()) - container.set(["sys", "id"], String(str(sys_id))) - container.set(["sys", "state"], String("Active")) - container.set(["sys", "owner"], String("offline_user")) - container.set(["sys", "size"], Float(0)) - container.set(["sys", "tags"], StringSet(set())) - container.set(["sys", "creation_time"], Datetime(datetime.now())) - container.set(["sys", "modification_time"], Datetime(datetime.now())) - container.set(["sys", "failed"], Boolean(False)) - if container_type == ContainerType.MODEL_VERSION: - container.set(["sys", "model_id"], String(str(self.MODEL_SYS_ID))) - container.set(["sys", "stage"], String("none")) - return container - - def _get_container(self, container_id: UniqueId, container_type: ContainerType): - key = (container_id, container_type) - if key not in self._containers: - raise ContainerUUIDNotFound(container_id, container_type) - container = self._containers[(container_id, container_type)] - return container - - def create_run( - self, - project_id: UniqueId, - git_info: Optional[GitInfo] = None, - custom_run_id: Optional[str] = None, - notebook_id: Optional[str] = None, - checkpoint_id: Optional[str] = None, - ) -> ApiExperiment: - sys_id = SysId(f"{self.PROJECT_KEY}-{self._next_run}") - self._next_run += 1 - new_run_id = UniqueId(str(uuid.uuid4())) - self._create_container(new_run_id, ContainerType.RUN, sys_id=sys_id) - return ApiExperiment( - id=new_run_id, - type=ContainerType.RUN, - sys_id=sys_id, - workspace=self.WORKSPACE_NAME, - project_name=self.PROJECT_NAME, - trashed=False, - ) - - def create_model(self, project_id: str, key: str) -> ApiExperiment: - sys_id = SysId(f"{self.PROJECT_KEY}-{key}") - new_run_id = UniqueId(str(uuid.uuid4())) - self._create_container(new_run_id, ContainerType.MODEL, sys_id=sys_id) - return ApiExperiment( - id=new_run_id, - type=ContainerType.MODEL, - sys_id=sys_id, - workspace=self.WORKSPACE_NAME, - project_name=self.PROJECT_NAME, - trashed=False, - ) - - def create_model_version(self, project_id: str, model_id: UniqueId) -> ApiExperiment: - try: - model_key = self._get_container(container_id=model_id, container_type=ContainerType.MODEL).get("sys/id") - except ContainerUUIDNotFound: - model_key = "MOD" - - sys_id = SysId(f"{self.PROJECT_KEY}-{model_key}-{self._next_model_version[model_id]}") - self._next_model_version[model_id] += 1 - new_run_id = UniqueId(str(uuid.uuid4())) - self._create_container(new_run_id, ContainerType.MODEL_VERSION, sys_id=sys_id) - return ApiExperiment( - id=new_run_id, - type=ContainerType.MODEL, - sys_id=sys_id, - workspace=self.WORKSPACE_NAME, - project_name=self.PROJECT_NAME, - trashed=False, - ) - - def create_checkpoint(self, notebook_id: str, jupyter_path: str) -> Optional[str]: - return None - - def get_project(self, project_id: QualifiedName) -> Project: - return Project( - id=self._project_id, - name=self.PROJECT_NAME, - workspace=self.WORKSPACE_NAME, - sys_id=self.PROJECT_KEY, - ) - - def get_metadata_container( - self, - container_id: Union[UniqueId, QualifiedName], - expected_container_type: Optional[ContainerType], - ) -> ApiExperiment: - if "/" not in container_id: - raise ValueError("Backend mock expect container_id as QualifiedName only") - - if expected_container_type == ContainerType.RUN: - raise RunNotFound(container_id) - elif expected_container_type == ContainerType.MODEL: - return ApiExperiment( - id=UniqueId(str(uuid.uuid4())), - type=ContainerType.MODEL, - sys_id=SysId(container_id.rsplit("/", 1)[-1]), - workspace=self.WORKSPACE_NAME, - project_name=self.PROJECT_NAME, - ) - elif expected_container_type == ContainerType.MODEL_VERSION: - raise ModelVersionNotFound(container_id) - else: - raise ProjectNotFound(container_id) - - def execute_operations( - self, - container_id: UniqueId, - container_type: ContainerType, - operations: List[Operation], - operation_storage: OperationStorage, - ) -> Tuple[int, List[NeptuneException]]: - result = [] - for op in operations: - try: - self._execute_operation(container_id, container_type, op, operation_storage) - except NeptuneException as e: - result.append(e) - return len(operations), result - - def _execute_operation( - self, container_id: UniqueId, container_type: ContainerType, op: Operation, operation_storage: OperationStorage - ) -> None: - run = self._get_container(container_id, container_type) - val = run.get(op.path) - if val is not None and not isinstance(val, Value): - if isinstance(val, dict): - raise MetadataInconsistency("{} is a namespace, not an attribute".format(op.path)) - else: - raise InternalClientError("{} is a {}".format(op.path, type(val))) - visitor = NeptuneBackendMock.NewValueOpVisitor(self, op.path, val, operation_storage) - new_val = visitor.visit(op) - if new_val is not None: - run.set(op.path, new_val) - else: - run.pop(op.path) - - def get_attributes(self, container_id: str, container_type: ContainerType) -> List[FieldDefinition]: - run = self._get_container(container_id, container_type) - return list(self._generate_attributes(None, run.get_structure())) - - def _generate_attributes(self, base_path: Optional[str], values: dict): - for key, value_or_dict in values.items(): - new_path = base_path + "/" + key if base_path is not None else key - if isinstance(value_or_dict, dict): - yield from self._generate_attributes(new_path, value_or_dict) - else: - yield FieldDefinition( - new_path, - value_or_dict.accept(self._attribute_type_converter_value_visitor), - ) - - def download_file( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, - ): - run = self._get_container(container_id, container_type) - value: File = run.get(path) - target_path = os.path.abspath(destination or (path[-1] + ("." + value.extension if value.extension else ""))) - if value.file_type is FileType.IN_MEMORY: - with open(target_path, "wb") as target_file: - target_file.write(value.content) - elif value.file_type is FileType.LOCAL_FILE: - if value.path != target_path: - copyfile(value.path, target_path) - else: - raise ValueError(f"Unexpected FileType: {value.file_type}") - - def download_file_set( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - destination: Optional[str] = None, - progress_bar: Optional[ProgressBarType] = None, - ): - run = self._get_container(container_id, container_type) - source_file_set_value: FileSet = run.get(path) - - if destination is None: - target_file = path[-1] + ".zip" - elif os.path.isdir(destination): - target_file = os.path.join(destination, path[-1] + ".zip") - else: - target_file = destination - - upload_entries = get_unique_upload_entries(source_file_set_value.file_globs) - - with ZipFile(target_file, "w") as zipObj: - for upload_entry in upload_entries: - zipObj.write(upload_entry.source, upload_entry.target_path) - - def get_float_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> FloatField: - val = self._get_attribute(container_id, container_type, path, Float) - return FloatField(path=path_to_str(path), value=val.value) - - def get_int_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> IntField: - val = self._get_attribute(container_id, container_type, path, Integer) - return IntField(path=path_to_str(path), value=val.value) - - def get_bool_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> BoolField: - val = self._get_attribute(container_id, container_type, path, Boolean) - return BoolField(path=path_to_str(path), value=val.value) - - def get_file_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> FileField: - val = self._get_attribute(container_id, container_type, path, File) - return FileField( - path=path_to_str(path), - name=os.path.basename(val.path) if val.file_type is FileType.LOCAL_FILE else "", - ext=val.extension or "", - size=0, - ) - - def get_string_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> StringField: - val = self._get_attribute(container_id, container_type, path, String) - return StringField(path=path_to_str(path), value=val.value) - - def get_datetime_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> DateTimeField: - val = self._get_attribute(container_id, container_type, path, Datetime) - return DateTimeField(path=path_to_str(path), value=val.value) - - def get_artifact_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> ArtifactField: - val = self._get_attribute(container_id, container_type, path, Artifact) - return ArtifactField(path=path_to_str(path), hash=val.hash) - - def list_artifact_files(self, project_id: str, artifact_hash: str) -> List[ArtifactFileData]: - return self._artifacts[(project_id, artifact_hash)] - - def get_float_series_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> FloatSeriesField: - val = self._get_attribute(container_id, container_type, path, FloatSeries) - return FloatSeriesField(path=path_to_str(path), last=val.values[-1] if val.values else None) - - def get_string_series_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> StringSeriesField: - val = self._get_attribute(container_id, container_type, path, StringSeries) - return StringSeriesField(path=path_to_str(path), last=val.values[-1] if val.values else None) - - def get_string_set_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> StringSetField: - val = self._get_attribute(container_id, container_type, path, StringSet) - return StringSetField(path=path_to_str(path), values=set(val.values)) - - def _get_attribute( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - expected_type: Type[Val], - ) -> Val: - run = self._get_container(container_id, container_type) - value: Optional[Value] = run.get(path) - str_path = path_to_str(path) - if value is None: - raise MetadataInconsistency("Attribute {} not found".format(str_path)) - if isinstance(value, expected_type): - return value - raise MetadataInconsistency("Attribute {} is not {}".format(str_path, type.__name__)) - - def get_string_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - limit: int, - from_step: Optional[float] = None, - ) -> StringSeriesValues: - val = self._get_attribute(container_id, container_type, path, StringSeries) - return StringSeriesValues( - len(val.values), - [StringPointValue(timestamp=datetime.now(), step=idx, value=v) for idx, v in enumerate(val.values)], - ) - - def get_float_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - limit: int, - from_step: Optional[float] = None, - use_proto: Optional[bool] = None, - include_inherited: bool = True, - ) -> FloatSeriesValues: - val = self._get_attribute(container_id, container_type, path, FloatSeries) - return FloatSeriesValues( - len(val.values), - [FloatPointValue(timestamp=datetime.now(), step=idx, value=v) for idx, v in enumerate(val.values)], - ) - - def get_image_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - offset: int, - limit: int, - ) -> ImageSeriesValues: - return ImageSeriesValues(0) - - def download_file_series_by_index( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - index: int, - destination: str, - progress_bar: Optional[ProgressBarType], - ): - """Non relevant for backend""" - - def get_run_url(self, run_id: str, workspace: str, project_name: str, sys_id: str) -> str: - return f"offline/{run_id}" - - def get_project_url(self, project_id: str, workspace: str, project_name: str) -> str: - return f"offline/{project_id}" - - def get_model_url(self, model_id: str, workspace: str, project_name: str, sys_id: str) -> str: - return f"offline/{model_id}" - - def get_model_version_url( - self, - model_version_id: str, - model_id: str, - workspace: str, - project_name: str, - sys_id: str, - ) -> str: - return f"offline/{model_version_id}" - - def get_fields_definitions( - self, - container_id: str, - container_type: ContainerType, - use_proto: Optional[bool] = None, - ) -> List[FieldDefinition]: - return [] - - def _get_attribute_values(self, value_dict, path_prefix: List[str]): - assert isinstance(value_dict, dict) - for k, value in value_dict.items(): - if isinstance(value, dict): - yield from self._get_attribute_values(value, path_prefix + [k]) - else: - attr_type = value.accept(self._attribute_type_converter_value_visitor).value - attr_path = "/".join(path_prefix + [k]) - if hasattr(value, "value"): - yield attr_path, attr_type, value.value - else: - return attr_path, attr_type, NoValue - - def fetch_atom_attribute_values( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> List[Tuple[str, FieldType, Any]]: - run = self._get_container(container_id, container_type) - values = self._get_attribute_values(run.get(path), path) - namespace_prefix = path_to_str(path) - if namespace_prefix: - # don't want to catch "ns/attribute/other" while looking for "ns/attr" - namespace_prefix += "/" - return [ - (full_path, attr_type, attr_value) - for (full_path, attr_type, attr_value) in values - if full_path.startswith(namespace_prefix) - ] - - def search_leaderboard_entries( - self, - project_id: UniqueId, - types: Optional[Iterable[ContainerType]] = None, - query: Optional[NQLQuery] = None, - columns: Optional[Iterable[str]] = None, - limit: Optional[int] = None, - sort_by: str = "sys/creation_time", - ascending: bool = False, - progress_bar: Optional[ProgressBarType] = None, - use_proto: Optional[bool] = None, - ) -> Generator[LeaderboardEntry, None, None]: - """Non relevant for mock""" - - class AttributeTypeConverterValueVisitor(ValueVisitor[FieldType]): - def visit_float(self, _: Float) -> FieldType: - return FieldType.FLOAT - - def visit_integer(self, _: Integer) -> FieldType: - return FieldType.INT - - def visit_boolean(self, _: Boolean) -> FieldType: - return FieldType.BOOL - - def visit_string(self, _: String) -> FieldType: - return FieldType.STRING - - def visit_datetime(self, _: Datetime) -> FieldType: - return FieldType.DATETIME - - def visit_file(self, _: File) -> FieldType: - return FieldType.FILE - - def visit_file_set(self, _: FileSet) -> FieldType: - return FieldType.FILE_SET - - def visit_float_series(self, _: FloatSeries) -> FieldType: - return FieldType.FLOAT_SERIES - - def visit_string_series(self, _: StringSeries) -> FieldType: - return FieldType.STRING_SERIES - - def visit_image_series(self, _: FileSeries) -> FieldType: - return FieldType.IMAGE_SERIES - - def visit_string_set(self, _: StringSet) -> FieldType: - return FieldType.STRING_SET - - def visit_git_ref(self, _: GitRef) -> FieldType: - return FieldType.GIT_REF - - def visit_artifact(self, _: Artifact) -> FieldType: - return FieldType.ARTIFACT - - def visit_namespace(self, _: Namespace) -> FieldType: - raise NotImplementedError - - def copy_value(self, source_type: Type[FieldDefinition], source_path: List[str]) -> FieldType: - raise NotImplementedError - - class NewValueOpVisitor(OperationVisitor[Optional[Value]]): - def __init__( - self, backend, path: List[str], current_value: Optional[Value], operation_storage: OperationStorage - ): - self._backend = backend - self._path = path - self._current_value = current_value - self._artifact_hash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" - self._operation_storage = operation_storage - - def visit_assign_float(self, op: AssignFloat) -> Optional[Value]: - if self._current_value is not None and not isinstance(self._current_value, Float): - raise self._create_type_error("assign", Float.__name__) - return Float(op.value) - - def visit_assign_int(self, op: AssignInt) -> Optional[Value]: - if self._current_value is not None and not isinstance(self._current_value, Integer): - raise self._create_type_error("assign", Integer.__name__) - return Integer(op.value) - - def visit_assign_bool(self, op: AssignBool) -> Optional[Value]: - if self._current_value is not None and not isinstance(self._current_value, Boolean): - raise self._create_type_error("assign", Boolean.__name__) - return Boolean(op.value) - - def visit_assign_string(self, op: AssignString) -> Optional[Value]: - if self._current_value is not None and not isinstance(self._current_value, String): - raise self._create_type_error("assign", String.__name__) - return String(op.value) - - def visit_assign_datetime(self, op: AssignDatetime) -> Optional[Value]: - if self._current_value is not None and not isinstance(self._current_value, Datetime): - raise self._create_type_error("assign", Datetime.__name__) - return Datetime(op.value) - - def visit_assign_artifact(self, op: AssignArtifact) -> Optional[Value]: - if self._current_value is not None and not isinstance(self._current_value, Artifact): - raise self._create_type_error("assign", Artifact.__name__) - return Artifact(op.hash) - - def visit_track_files_to_artifact(self, _: TrackFilesToArtifact) -> Optional[Value]: - if self._current_value is not None and not isinstance(self._current_value, Artifact): - raise self._create_type_error("save", Artifact.__name__) - return Artifact(self._artifact_hash) - - def visit_clear_artifact(self, _: ClearArtifact) -> Optional[Value]: - if self._current_value is None: - return Artifact() - if not isinstance(self._current_value, Artifact): - raise self._create_type_error("clear", Artifact.__name__) - return Artifact() - - def visit_upload_file(self, op: UploadFile) -> Optional[Value]: - if self._current_value is not None and not isinstance(self._current_value, File): - raise self._create_type_error("save", File.__name__) - return File.from_path(path=op.get_absolute_path(self._operation_storage), extension=op.ext) - - def visit_upload_file_content(self, op: UploadFileContent) -> Optional[Value]: - if self._current_value is not None and not isinstance(self._current_value, File): - raise self._create_type_error("upload_files", File.__name__) - return File.from_content(content=base64_decode(op.file_content), extension=op.ext) - - def visit_upload_file_set(self, op: UploadFileSet) -> Optional[Value]: - if self._current_value is None or op.reset: - return FileSet(op.file_globs) - if not isinstance(self._current_value, FileSet): - raise self._create_type_error("save", FileSet.__name__) - return FileSet(self._current_value.file_globs + op.file_globs) - - def visit_log_floats(self, op: LogFloats) -> Optional[Value]: - raw_values = [x.value for x in op.values] - if self._current_value is None: - return FloatSeries(raw_values) - if not isinstance(self._current_value, FloatSeries): - raise self._create_type_error("log", FloatSeries.__name__) - return FloatSeries( - self._current_value.values + raw_values, - min=self._current_value.min, - max=self._current_value.max, - unit=self._current_value.unit, - ) - - def visit_log_strings(self, op: LogStrings) -> Optional[Value]: - raw_values = [x.value for x in op.values] - if self._current_value is None: - return StringSeries(raw_values) - if not isinstance(self._current_value, StringSeries): - raise self._create_type_error("log", StringSeries.__name__) - return StringSeries(self._current_value.values + raw_values) - - def visit_log_images(self, op: LogImages) -> Optional[Value]: - raw_values = [File.from_content(base64_decode(x.value.data)) for x in op.values] - if self._current_value is None: - return FileSeries(raw_values) - if not isinstance(self._current_value, FileSeries): - raise self._create_type_error("log", FileSeries.__name__) - return FileSeries(self._current_value.values + raw_values) - - def visit_clear_float_log(self, op: ClearFloatLog) -> Optional[Value]: - if self._current_value is None: - return FloatSeries([]) - if not isinstance(self._current_value, FloatSeries): - raise self._create_type_error("clear", FloatSeries.__name__) - return FloatSeries( - [], - min=self._current_value.min, - max=self._current_value.max, - unit=self._current_value.unit, - ) - - def visit_clear_string_log(self, op: ClearStringLog) -> Optional[Value]: - if self._current_value is None: - return StringSeries([]) - if not isinstance(self._current_value, StringSeries): - raise self._create_type_error("clear", StringSeries.__name__) - return StringSeries([]) - - def visit_clear_image_log(self, op: ClearImageLog) -> Optional[Value]: - if self._current_value is None: - return FileSeries([]) - if not isinstance(self._current_value, FileSeries): - raise self._create_type_error("clear", FileSeries.__name__) - return FileSeries([]) - - def visit_config_float_series(self, op: ConfigFloatSeries) -> Optional[Value]: - if self._current_value is None: - return FloatSeries([], min=op.min, max=op.max, unit=op.unit) - if not isinstance(self._current_value, FloatSeries): - raise self._create_type_error("log", FloatSeries.__name__) - return FloatSeries(self._current_value.values, min=op.min, max=op.max, unit=op.unit) - - def visit_add_strings(self, op: AddStrings) -> Optional[Value]: - if self._current_value is None: - return StringSet(op.values) - if not isinstance(self._current_value, StringSet): - raise self._create_type_error("add", StringSet.__name__) - return StringSet(self._current_value.values.union(op.values)) - - def visit_remove_strings(self, op: RemoveStrings) -> Optional[Value]: - if self._current_value is None: - return StringSet(set()) - if not isinstance(self._current_value, StringSet): - raise self._create_type_error("remove", StringSet.__name__) - return StringSet(self._current_value.values.difference(op.values)) - - def visit_clear_string_set(self, op: ClearStringSet) -> Optional[Value]: - if self._current_value is None: - return StringSet(set()) - if not isinstance(self._current_value, StringSet): - raise self._create_type_error("clear", StringSet.__name__) - return StringSet(set()) - - def visit_delete_files(self, op: DeleteFiles) -> Optional[Value]: - if self._current_value is None: - return FileSet([]) - if not isinstance(self._current_value, FileSet): - raise self._create_type_error("delete_files", FileSet.__name__) - # It is not important to support deleting properly in debug mode, let's just ignore this operation - return self._current_value - - def visit_delete_attribute(self, op: DeleteAttribute) -> Optional[Value]: - if self._current_value is None: - raise MetadataInconsistency( - "Cannot perform delete operation on {}. Attribute is undefined.".format(self._path) - ) - return None - - def visit_copy_attribute(self, op: CopyAttribute) -> Optional[Value]: - return op.resolve(self._backend).accept(self) - - def _create_type_error(self, op_name, expected): - return MetadataInconsistency( - "Cannot perform {} operation on {}. Expected {}, {} found.".format( - op_name, self._path, expected, type(self._current_value) - ) - ) - - def list_fileset_files(self, attribute: List[str], container_id: str, path: str) -> List[FileEntry]: - return [ - FileEntry( - name="mock_name", - size=100, - mtime=datetime.now(), - file_type="file", - ) - ] - - def get_fields_with_paths_filter( - self, container_id: str, container_type: ContainerType, paths: List[str], use_proto: Optional[bool] = None - ) -> List[Field]: - return [] - - def query_fields_definitions_within_project( - self, - project_id: QualifiedName, - field_name_regex: Optional[str] = None, - experiment_ids_filter: Optional[List[str]] = None, - next_page: Optional[NextPage] = None, - ) -> QueryFieldDefinitionsResult: - return QueryFieldDefinitionsResult( - entries=[], - next_page=NextPage(next_page_token=None, limit=0), - ) - - def query_fields_within_project( - self, - project_id: QualifiedName, - field_name_regex: Optional[str] = None, - field_names_filter: Optional[List[str]] = None, - experiment_ids_filter: Optional[List[str]] = None, - experiment_names_filter: Optional[List[str]] = None, - next_page: Optional[NextPage] = None, - use_proto: Optional[bool] = None, - ) -> QueryFieldsResult: - return QueryFieldsResult( - entries=[], - next_page=NextPage(next_page_token=None, limit=0), - ) diff --git a/src/neptune/internal/backends/nql.py b/src/neptune/internal/backends/nql.py deleted file mode 100644 index 5e5d444f3..000000000 --- a/src/neptune/internal/backends/nql.py +++ /dev/null @@ -1,122 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from __future__ import annotations - -__all__ = [ - "NQLQuery", - "NQLEmptyQuery", - "NQLAggregator", - "NQLQueryAggregate", - "NQLAttributeOperator", - "NQLAttributeType", - "NQLQueryAttribute", - "RawNQLQuery", -] - -import typing -from dataclasses import dataclass -from enum import Enum -from typing import Iterable - - -@dataclass -class NQLQuery: - def eval(self) -> NQLQuery: - return self - - -@dataclass -class NQLEmptyQuery(NQLQuery): - def __str__(self) -> str: - return "" - - -class NQLAggregator(str, Enum): - AND = "AND" - OR = "OR" - - -@dataclass -class NQLQueryAggregate(NQLQuery): - items: Iterable[NQLQuery] - aggregator: NQLAggregator - - def eval(self) -> NQLQuery: - self.items = list(filter(lambda nql: not isinstance(nql, NQLEmptyQuery), (item.eval() for item in self.items))) - - if len(self.items) == 0: - return NQLEmptyQuery() - elif len(self.items) == 1: - return self.items[0] - return self - - def __str__(self) -> str: - evaluated = self.eval() - if isinstance(evaluated, NQLQueryAggregate): - return "(" + f" {self.aggregator.value} ".join(map(str, self.items)) + ")" - return str(evaluated) - - -class NQLAttributeOperator(str, Enum): - EQUALS = "=" - CONTAINS = "CONTAINS" - GREATER_THAN = ">" - LESS_THAN = "<" - MATCHES = "MATCHES" - NOT_MATCHES = "NOT MATCHES" - - -class NQLAttributeType(str, Enum): - STRING = "string" - STRING_SET = "stringSet" - EXPERIMENT_STATE = "experimentState" - BOOLEAN = "bool" - DATETIME = "datetime" - INTEGER = "integer" - FLOAT = "float" - - -@dataclass -class NQLQueryAttribute(NQLQuery): - name: str - type: NQLAttributeType - operator: NQLAttributeOperator - value: typing.Union[str, bool] - - def __str__(self) -> str: - if isinstance(self.value, bool): - value = str(self.value).lower() - else: - value = f'"{self.value}"' - - return f"(`{self.name}`:{self.type.value} {self.operator.value} {value})" - - -@dataclass -class RawNQLQuery(NQLQuery): - query: str - - def eval(self) -> NQLQuery: - if self.query == "": - return NQLEmptyQuery() - return self - - def __str__(self) -> str: - evaluated = self.eval() - if isinstance(evaluated, RawNQLQuery): - return self.query - return str(evaluated) diff --git a/src/neptune/internal/backends/offline_neptune_backend.py b/src/neptune/internal/backends/offline_neptune_backend.py deleted file mode 100644 index 7cd3a980d..000000000 --- a/src/neptune/internal/backends/offline_neptune_backend.py +++ /dev/null @@ -1,200 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["OfflineNeptuneBackend"] - -from typing import ( - Generator, - Iterable, - List, - Optional, -) - -from neptune.api.models import ( - ArtifactField, - BoolField, - DateTimeField, - Field, - FieldDefinition, - FileEntry, - FileField, - FloatField, - FloatSeriesField, - FloatSeriesValues, - ImageSeriesValues, - IntField, - LeaderboardEntry, - NextPage, - QueryFieldDefinitionsResult, - QueryFieldsResult, - StringField, - StringSeriesField, - StringSeriesValues, - StringSetField, -) -from neptune.exceptions import NeptuneOfflineModeFetchException -from neptune.internal.artifacts.types import ArtifactFileData -from neptune.internal.backends.neptune_backend_mock import NeptuneBackendMock -from neptune.internal.backends.nql import NQLQuery -from neptune.internal.container_type import ContainerType -from neptune.internal.id_formats import ( - QualifiedName, - UniqueId, -) -from neptune.typing import ProgressBarType - - -class OfflineNeptuneBackend(NeptuneBackendMock): - WORKSPACE_NAME = "offline" - - def get_attributes(self, container_id: str, container_type: ContainerType) -> List[FieldDefinition]: - raise NeptuneOfflineModeFetchException - - def get_float_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> FloatField: - raise NeptuneOfflineModeFetchException - - def get_int_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> IntField: - raise NeptuneOfflineModeFetchException - - def get_bool_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> BoolField: - raise NeptuneOfflineModeFetchException - - def get_file_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> FileField: - raise NeptuneOfflineModeFetchException - - def get_string_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> StringField: - raise NeptuneOfflineModeFetchException - - def get_datetime_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> DateTimeField: - raise NeptuneOfflineModeFetchException - - def get_artifact_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> ArtifactField: - raise NeptuneOfflineModeFetchException - - def list_artifact_files(self, project_id: str, artifact_hash: str) -> List[ArtifactFileData]: - raise NeptuneOfflineModeFetchException - - def get_float_series_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> FloatSeriesField: - raise NeptuneOfflineModeFetchException - - def get_string_series_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> StringSeriesField: - raise NeptuneOfflineModeFetchException - - def get_string_set_attribute( - self, container_id: str, container_type: ContainerType, path: List[str] - ) -> StringSetField: - raise NeptuneOfflineModeFetchException - - def get_string_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - limit: int, - from_step: Optional[float] = None, - ) -> StringSeriesValues: - raise NeptuneOfflineModeFetchException - - def get_float_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - limit: int, - from_step: Optional[float] = None, - use_proto: Optional[bool] = None, - include_inherited: bool = True, - ) -> FloatSeriesValues: - raise NeptuneOfflineModeFetchException - - def get_image_series_values( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - offset: int, - limit: int, - ) -> ImageSeriesValues: - raise NeptuneOfflineModeFetchException - - def download_file_series_by_index( - self, - container_id: str, - container_type: ContainerType, - path: List[str], - index: int, - destination: str, - progress_bar: Optional[ProgressBarType], - ): - raise NeptuneOfflineModeFetchException - - def list_fileset_files(self, attribute: List[str], container_id: str, path: str) -> List[FileEntry]: - raise NeptuneOfflineModeFetchException - - def get_fields_with_paths_filter( - self, container_id: str, container_type: ContainerType, paths: List[str], use_proto: Optional[bool] = None - ) -> List[Field]: - raise NeptuneOfflineModeFetchException - - def get_fields_definitions( - self, - container_id: str, - container_type: ContainerType, - use_proto: Optional[bool] = None, - ) -> List[FieldDefinition]: - raise NeptuneOfflineModeFetchException - - def search_leaderboard_entries( - self, - project_id: UniqueId, - types: Optional[Iterable[ContainerType]] = None, - query: Optional[NQLQuery] = None, - columns: Optional[Iterable[str]] = None, - limit: Optional[int] = None, - sort_by: str = "sys/creation_time", - ascending: bool = False, - progress_bar: Optional[ProgressBarType] = None, - use_proto: Optional[bool] = None, - ) -> Generator[LeaderboardEntry, None, None]: - raise NeptuneOfflineModeFetchException - - def query_fields_definitions_within_project( - self, - project_id: QualifiedName, - field_name_regex: Optional[str] = None, - experiment_ids_filter: Optional[List[str]] = None, - next_page: Optional[NextPage] = None, - ) -> QueryFieldDefinitionsResult: - raise NeptuneOfflineModeFetchException - - def query_fields_within_project( - self, - project_id: QualifiedName, - field_name_regex: Optional[str] = None, - field_names_filter: Optional[List[str]] = None, - experiment_ids_filter: Optional[List[str]] = None, - experiment_names_filter: Optional[List[str]] = None, - next_page: Optional[NextPage] = None, - use_proto: Optional[bool] = None, - ) -> QueryFieldsResult: - raise NeptuneOfflineModeFetchException diff --git a/src/neptune/internal/backends/operation_api_name_visitor.py b/src/neptune/internal/backends/operation_api_name_visitor.py deleted file mode 100644 index 987674596..000000000 --- a/src/neptune/internal/backends/operation_api_name_visitor.py +++ /dev/null @@ -1,126 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["OperationApiNameVisitor"] - -from neptune.internal.exceptions import InternalClientError -from neptune.internal.operation import ( - AddStrings, - AssignArtifact, - AssignBool, - AssignDatetime, - AssignFloat, - AssignInt, - AssignString, - ClearArtifact, - ClearFloatLog, - ClearImageLog, - ClearStringLog, - ClearStringSet, - ConfigFloatSeries, - CopyAttribute, - DeleteAttribute, - DeleteFiles, - LogFloats, - LogImages, - LogStrings, - Operation, - RemoveStrings, - TrackFilesToArtifact, - UploadFile, - UploadFileContent, - UploadFileSet, -) -from neptune.internal.operation_visitor import ( - OperationVisitor, - Ret, -) - - -class OperationApiNameVisitor(OperationVisitor[str]): - def visit(self, op: Operation) -> str: - return op.accept(self) - - def visit_assign_float(self, _: AssignFloat) -> str: - return "assignFloat" - - def visit_assign_int(self, _: AssignInt) -> str: - return "assignInt" - - def visit_assign_bool(self, _: AssignBool) -> str: - return "assignBool" - - def visit_assign_string(self, _: AssignString) -> str: - return "assignString" - - def visit_assign_datetime(self, _: AssignDatetime) -> Ret: - return "assignDatetime" - - def visit_upload_file(self, _: UploadFile) -> str: - raise InternalClientError("Specialized endpoint should be used to upload file attribute") - - def visit_upload_file_content(self, _: UploadFileContent) -> str: - raise InternalClientError("Specialized endpoint should be used to upload file attribute") - - def visit_upload_file_set(self, op: UploadFileSet) -> Ret: - raise InternalClientError("Specialized endpoints should be used to upload file set attribute") - - def visit_log_floats(self, _: LogFloats) -> str: - return "logFloats" - - def visit_log_strings(self, _: LogStrings) -> str: - return "logStrings" - - def visit_log_images(self, _: LogImages) -> str: - return "logImages" - - def visit_clear_float_log(self, _: ClearFloatLog) -> str: - return "clearFloatSeries" - - def visit_clear_string_log(self, _: ClearStringLog) -> str: - return "clearStringSeries" - - def visit_clear_image_log(self, _: ClearImageLog) -> str: - return "clearImageSeries" - - def visit_config_float_series(self, _: ConfigFloatSeries) -> str: - return "configFloatSeries" - - def visit_add_strings(self, _: AddStrings) -> str: - return "insertStrings" - - def visit_remove_strings(self, _: RemoveStrings) -> str: - return "removeStrings" - - def visit_delete_attribute(self, _: DeleteAttribute) -> str: - return "deleteAttribute" - - def visit_clear_string_set(self, _: ClearStringSet) -> str: - return "clearStringSet" - - def visit_delete_files(self, _: DeleteFiles) -> Ret: - return "deleteFiles" - - def visit_assign_artifact(self, _: AssignArtifact) -> Ret: - return "assignArtifact" - - def visit_track_files_to_artifact(self, _: TrackFilesToArtifact) -> Ret: - raise InternalClientError("Specialized endpoint should be used to track artifact files") - - def visit_clear_artifact(self, _: ClearArtifact) -> Ret: - return "clearArtifact" - - def visit_copy_attribute(self, _: CopyAttribute) -> Ret: - raise NotImplementedError("This operation is client-side only") diff --git a/src/neptune/internal/backends/operation_api_object_converter.py b/src/neptune/internal/backends/operation_api_object_converter.py deleted file mode 100644 index b182241aa..000000000 --- a/src/neptune/internal/backends/operation_api_object_converter.py +++ /dev/null @@ -1,157 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["OperationApiObjectConverter"] - -from neptune.internal.exceptions import InternalClientError -from neptune.internal.operation import ( - AddStrings, - AssignArtifact, - AssignBool, - AssignDatetime, - AssignFloat, - AssignInt, - AssignString, - ClearArtifact, - ClearFloatLog, - ClearImageLog, - ClearStringLog, - ClearStringSet, - ConfigFloatSeries, - CopyAttribute, - DeleteAttribute, - DeleteFiles, - LogFloats, - LogImages, - LogStrings, - Operation, - RemoveStrings, - TrackFilesToArtifact, - UploadFile, - UploadFileContent, - UploadFileSet, -) -from neptune.internal.operation_visitor import ( - OperationVisitor, - Ret, -) - - -class OperationApiObjectConverter(OperationVisitor[dict]): - def convert(self, op: Operation) -> dict: - return op.accept(self) - - def visit_assign_float(self, op: AssignFloat) -> dict: - return {"value": op.value} - - def visit_assign_int(self, op: AssignInt) -> dict: - return {"value": op.value} - - def visit_assign_bool(self, op: AssignBool) -> dict: - return {"value": op.value} - - def visit_assign_string(self, op: AssignString) -> dict: - return {"value": op.value} - - def visit_assign_datetime(self, op: AssignDatetime) -> Ret: - return {"valueMilliseconds": int(1000 * op.value.timestamp())} - - def visit_assign_artifact(self, op: AssignArtifact) -> dict: - return {"hash": op.hash} - - def visit_upload_file(self, _: UploadFile) -> dict: - raise InternalClientError("Specialized endpoint should be used to upload file attribute") - - def visit_upload_file_content(self, _: UploadFileContent) -> dict: - raise InternalClientError("Specialized endpoint should be used to upload file attribute") - - def visit_upload_file_set(self, op: UploadFileSet) -> Ret: - raise InternalClientError("Specialized endpoints should be used to upload file set attribute") - - def visit_log_floats(self, op: LogFloats) -> dict: - return { - "entries": [ - { - "value": value.value, - "step": value.step, - "timestampMilliseconds": int(value.ts * 1000), - } - for value in op.values - ] - } - - def visit_log_strings(self, op: LogStrings) -> dict: - return { - "entries": [ - { - "value": value.value, - "step": value.step, - "timestampMilliseconds": int(value.ts * 1000), - } - for value in op.values - ] - } - - def visit_log_images(self, op: LogImages) -> dict: - return { - "entries": [ - { - "value": { - "data": value.value.data, - "name": value.value.name, - "description": value.value.description, - }, - "step": value.step, - "timestampMilliseconds": int(value.ts * 1000), - } - for value in op.values - ] - } - - def visit_clear_float_log(self, _: ClearFloatLog) -> dict: - return {} - - def visit_clear_string_log(self, _: ClearStringLog) -> dict: - return {} - - def visit_clear_image_log(self, _: ClearImageLog) -> dict: - return {} - - def visit_config_float_series(self, op: ConfigFloatSeries) -> dict: - return {"min": op.min, "max": op.max, "unit": op.unit} - - def visit_add_strings(self, op: AddStrings) -> dict: - return {"values": list(op.values)} - - def visit_remove_strings(self, op: RemoveStrings) -> dict: - return {"values": list(op.values)} - - def visit_delete_attribute(self, _: DeleteAttribute) -> dict: - return {} - - def visit_clear_string_set(self, _: ClearStringSet) -> dict: - return {} - - def visit_delete_files(self, op: DeleteFiles) -> Ret: - return {"filePaths": list(op.file_paths)} - - def visit_track_files_to_artifact(self, op: TrackFilesToArtifact) -> dict: - raise InternalClientError("Specialized endpoint should be used to track artifact files") - - def visit_clear_artifact(self, _: ClearArtifact) -> Ret: - return {} - - def visit_copy_attribute(self, _: CopyAttribute) -> Ret: - raise NotImplementedError("This operation is client-side only") diff --git a/src/neptune/internal/backends/operations_preprocessor.py b/src/neptune/internal/backends/operations_preprocessor.py deleted file mode 100644 index 26a0e890a..000000000 --- a/src/neptune/internal/backends/operations_preprocessor.py +++ /dev/null @@ -1,371 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["OperationsPreprocessor"] - -import dataclasses -import typing -from enum import Enum -from typing import ( - Callable, - List, - TypeVar, -) - -from neptune.exceptions import MetadataInconsistency -from neptune.internal.exceptions import InternalClientError -from neptune.internal.operation import ( - AddStrings, - AssignArtifact, - AssignBool, - AssignDatetime, - AssignFloat, - AssignInt, - AssignString, - ClearFloatLog, - ClearImageLog, - ClearStringLog, - ClearStringSet, - ConfigFloatSeries, - CopyAttribute, - DeleteAttribute, - DeleteFiles, - LogFloats, - LogImages, - LogStrings, - Operation, - RemoveStrings, - TrackFilesToArtifact, - UploadFile, - UploadFileContent, - UploadFileSet, -) -from neptune.internal.operation_visitor import OperationVisitor -from neptune.internal.utils.paths import path_to_str - -T = TypeVar("T") - - -class RequiresPreviousCompleted(Exception): - """indicates that previous operations must be synchronized with server before preprocessing current one""" - - -@dataclasses.dataclass -class AccumulatedOperations: - upload_operations: List[Operation] = dataclasses.field(default_factory=list) - artifact_operations: List[TrackFilesToArtifact] = dataclasses.field(default_factory=list) - other_operations: List[Operation] = dataclasses.field(default_factory=list) - - errors: List[MetadataInconsistency] = dataclasses.field(default_factory=list) - - -class OperationsPreprocessor: - def __init__(self): - self._accumulators: typing.Dict[str, "_OperationsAccumulator"] = dict() - self.processed_ops_count = 0 - - def process(self, operations: List[Operation]): - for op in operations: - try: - self._process_op(op) - self.processed_ops_count += 1 - except RequiresPreviousCompleted: - return - - def _process_op(self, op: Operation) -> "_OperationsAccumulator": - path_str = path_to_str(op.path) - target_acc = self._accumulators.setdefault(path_str, _OperationsAccumulator(op.path)) - target_acc.visit(op) - return target_acc - - @staticmethod - def is_file_op(op: Operation): - return isinstance(op, (UploadFile, UploadFileContent, UploadFileSet)) - - @staticmethod - def is_artifact_op(op: Operation): - return isinstance(op, TrackFilesToArtifact) - - def get_operations(self) -> AccumulatedOperations: - result = AccumulatedOperations() - for _, acc in sorted(self._accumulators.items()): - acc: "_OperationsAccumulator" - for op in acc.get_operations(): - if self.is_artifact_op(op): - result.artifact_operations.append(op) - elif self.is_file_op(op): - result.upload_operations.append(op) - else: - result.other_operations.append(op) - result.errors.extend(acc.get_errors()) - - return result - - -class _DataType(Enum): - FLOAT = "Float" - INT = "Int" - BOOL = "Bool" - STRING = "String" - FILE = "File" - DATETIME = "Datetime" - FILE_SET = "File Set" - FLOAT_SERIES = "Float Series" - STRING_SERIES = "String Series" - IMAGE_SERIES = "Image Series" - STRING_SET = "String Set" - ARTIFACT = "Artifact" - - def is_file_op(self) -> bool: - return self in (self.FILE, self.FILE_SET) - - def is_artifact_op(self) -> bool: - return self in (self.ARTIFACT,) - - -class _OperationsAccumulator(OperationVisitor[None]): - def __init__(self, path: List[str]): - self._path = path - self._type: typing.Optional[_DataType] = None - self._delete_ops = [] - self._modify_ops = [] - self._config_ops = [] - self._errors = [] - - def get_operations(self) -> List[Operation]: - return self._delete_ops + self._modify_ops + self._config_ops - - def get_errors(self) -> List[MetadataInconsistency]: - return self._errors - - def _check_prerequisites(self, op: Operation): - if (OperationsPreprocessor.is_file_op(op) or OperationsPreprocessor.is_artifact_op(op)) and len( - self._delete_ops - ) > 0: - raise RequiresPreviousCompleted() - - def _process_modify_op( - self, - expected_type: _DataType, - op: Operation, - modifier: Callable[[List[Operation], Operation], List[Operation]], - ) -> None: - - if self._type and self._type != expected_type: - # This case should never happen since inconsistencies on data types are verified on user api. - # So such operations should not appear in the queue without delete operation between them. - # Still we want to support this case to avoid some unclear dependencies and assumptions. - self._errors.append( - MetadataInconsistency( - "Cannot perform {} operation on {}: Attribute is not a {}".format( - op.__class__.__name__, - path_to_str(self._path), - expected_type.value, - ) - ) - ) - else: - self._check_prerequisites(op) - self._type = expected_type - self._modify_ops = modifier(self._modify_ops, op) - - def _process_config_op(self, expected_type: _DataType, op: Operation) -> None: - - if self._type and self._type != expected_type: - # This case should never happen since inconsistencies on data types are verified on user api. - # So such operations should not appear in the queue without delete operation between them. - # Still we want to support this case to avoid some unclear dependencies and assumptions. - self._errors.append( - MetadataInconsistency( - "Cannot perform {} operation on {}: Attribute is not a {}".format( - op.__class__.__name__, - path_to_str(self._path), - expected_type.value, - ) - ) - ) - else: - self._check_prerequisites(op) - self._type = expected_type - self._config_ops = [op] - - def visit_assign_float(self, op: AssignFloat) -> None: - self._process_modify_op(_DataType.FLOAT, op, self._assign_modifier()) - - def visit_assign_int(self, op: AssignInt) -> None: - self._process_modify_op(_DataType.INT, op, self._assign_modifier()) - - def visit_assign_bool(self, op: AssignBool) -> None: - self._process_modify_op(_DataType.BOOL, op, self._assign_modifier()) - - def visit_assign_string(self, op: AssignString) -> None: - self._process_modify_op(_DataType.STRING, op, self._assign_modifier()) - - def visit_assign_datetime(self, op: AssignDatetime) -> None: - self._process_modify_op(_DataType.DATETIME, op, self._assign_modifier()) - - def visit_upload_file(self, op: UploadFile) -> None: - self._process_modify_op(_DataType.FILE, op, self._assign_modifier()) - - def visit_upload_file_content(self, op: UploadFileContent) -> None: - self._process_modify_op(_DataType.FILE, op, self._assign_modifier()) - - def visit_assign_artifact(self, op: AssignArtifact) -> None: - self._process_modify_op(_DataType.ARTIFACT, op, self._assign_modifier()) - - def visit_upload_file_set(self, op: UploadFileSet) -> None: - if op.reset: - self._process_modify_op(_DataType.FILE_SET, op, self._assign_modifier()) - else: - self._process_modify_op(_DataType.FILE_SET, op, self._add_modifier()) - - def visit_log_floats(self, op: LogFloats) -> None: - self._process_modify_op( - _DataType.FLOAT_SERIES, - op, - self._log_modifier( - LogFloats, - ClearFloatLog, - lambda op1, op2: LogFloats(op1.path, op1.values + op2.values), - ), - ) - - def visit_log_strings(self, op: LogStrings) -> None: - self._process_modify_op( - _DataType.STRING_SERIES, - op, - self._log_modifier( - LogStrings, - ClearStringLog, - lambda op1, op2: LogStrings(op1.path, op1.values + op2.values), - ), - ) - - def visit_log_images(self, op: LogImages) -> None: - self._process_modify_op( - _DataType.IMAGE_SERIES, - op, - self._log_modifier( - LogImages, - ClearImageLog, - lambda op1, op2: LogImages(op1.path, op1.values + op2.values), - ), - ) - - def visit_clear_float_log(self, op: ClearFloatLog) -> None: - self._process_modify_op(_DataType.FLOAT_SERIES, op, self._clear_modifier()) - - def visit_clear_string_log(self, op: ClearStringLog) -> None: - self._process_modify_op(_DataType.STRING_SERIES, op, self._clear_modifier()) - - def visit_clear_image_log(self, op: ClearImageLog) -> None: - self._process_modify_op(_DataType.IMAGE_SERIES, op, self._clear_modifier()) - - def visit_add_strings(self, op: AddStrings) -> None: - self._process_modify_op(_DataType.STRING_SET, op, self._add_modifier()) - - def visit_clear_string_set(self, op: ClearStringSet) -> None: - self._process_modify_op(_DataType.STRING_SET, op, self._clear_modifier()) - - def visit_remove_strings(self, op: RemoveStrings) -> None: - self._process_modify_op(_DataType.STRING_SET, op, self._remove_modifier()) - - def visit_config_float_series(self, op: ConfigFloatSeries) -> None: - self._process_config_op(_DataType.FLOAT_SERIES, op) - - def visit_delete_files(self, op: DeleteFiles) -> None: - self._process_modify_op(_DataType.FILE_SET, op, self._add_modifier()) - - def visit_delete_attribute(self, op: DeleteAttribute) -> None: - if self._type: - if self._delete_ops: - # Keep existing delete operation and simply clear all modification operations after it - self._modify_ops = [] - self._config_ops = [] - self._type = None - else: - # This case is tricky. There was no delete operation, but some modifications was performed. - # We do not know if this attribute exists on server side and we do not want a delete op to fail. - # So we need to send a single modification before delete to be sure a delete op is valid. - self._delete_ops = [self._modify_ops[0], op] - self._modify_ops = [] - self._config_ops = [] - self._type = None - else: - if self._delete_ops: - # Do nothing if there already is a delete operation - # and no other operations was performed after it - return - else: - # If value has not been set locally yet and no delete operation was performed, - # simply perform single delete operation. - self._delete_ops.append(op) - - @staticmethod - def _artifact_log_modifier( - ops: List[TrackFilesToArtifact], new_op: TrackFilesToArtifact - ) -> List[TrackFilesToArtifact]: - if len(ops) == 0: - return [new_op] - - # There should be exactly 1 operation, merge it with new_op - assert len(ops) == 1 - op_old = ops[0] - assert op_old.path == new_op.path - assert op_old.project_id == new_op.project_id - return [TrackFilesToArtifact(op_old.path, op_old.project_id, op_old.entries + new_op.entries)] - - def visit_track_files_to_artifact(self, op: TrackFilesToArtifact) -> None: - self._process_modify_op(_DataType.ARTIFACT, op, self._artifact_log_modifier) - - def visit_clear_artifact(self, op: ClearStringSet) -> None: - self._process_modify_op(_DataType.ARTIFACT, op, self._clear_modifier()) - - def visit_copy_attribute(self, op: CopyAttribute) -> None: - raise MetadataInconsistency("No CopyAttribute should reach accumulator") - - @staticmethod - def _assign_modifier(): - return lambda ops, new_op: [new_op] - - @staticmethod - def _clear_modifier(): - return lambda ops, new_op: [new_op] - - @staticmethod - def _log_modifier(log_op_class: type, clear_op_class: type, log_combine: Callable[[T, T], T]): - def modifier(ops, new_op): - if len(ops) == 0: - return [new_op] - elif len(ops) == 1 and isinstance(ops[0], log_op_class): - return [log_combine(ops[0], new_op)] - elif len(ops) == 1 and isinstance(ops[0], clear_op_class): - return [ops[0], new_op] - elif len(ops) == 2: - return [ops[0], log_combine(ops[1], new_op)] - else: - raise InternalClientError("Preprocessing operations failed: len(ops) == {}".format(len(ops))) - - return modifier - - @staticmethod - def _add_modifier(): - # We do not optimize it on client side for now. It should not be often operation. - return lambda ops, op: ops + [op] - - @staticmethod - def _remove_modifier(): - # We do not optimize it on client side for now. It should not be often operation. - return lambda ops, op: ops + [op] diff --git a/src/neptune/internal/backends/project_name_lookup.py b/src/neptune/internal/backends/project_name_lookup.py deleted file mode 100644 index d9c78b202..000000000 --- a/src/neptune/internal/backends/project_name_lookup.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["project_name_lookup"] - -import os -from typing import Optional - -from neptune.envs import PROJECT_ENV_NAME -from neptune.exceptions import NeptuneMissingProjectNameException -from neptune.internal.backends.api_model import Project -from neptune.internal.backends.neptune_backend import NeptuneBackend -from neptune.internal.id_formats import QualifiedName -from neptune.internal.utils import verify_type -from neptune.internal.utils.logger import get_logger - -_logger = get_logger() - - -def project_name_lookup(backend: NeptuneBackend, name: Optional[QualifiedName] = None) -> Project: - verify_type("name", name, (str, type(None))) - - if not name: - name = os.getenv(PROJECT_ENV_NAME) - if not name: - available_workspaces = backend.get_available_workspaces() - available_projects = backend.get_available_projects() - - raise NeptuneMissingProjectNameException( - available_workspaces=available_workspaces, - available_projects=available_projects, - ) - - return backend.get_project(name) diff --git a/src/neptune/internal/backends/swagger_client_wrapper.py b/src/neptune/internal/backends/swagger_client_wrapper.py deleted file mode 100644 index 5fd2658e0..000000000 --- a/src/neptune/internal/backends/swagger_client_wrapper.py +++ /dev/null @@ -1,145 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ApiMethodWrapper", "SwaggerClientWrapper"] - -from collections.abc import Callable -from typing import ( - Dict, - Optional, -) - -from bravado.client import SwaggerClient -from bravado.exception import HTTPError - -from neptune.api.requests_utils import ensure_json_response -from neptune.exceptions import ( - NeptuneFieldCountLimitExceedException, - NeptuneLimitExceedException, -) -from neptune.internal.exceptions import ( - NeptuneAuthTokenExpired, - WritingToArchivedProjectException, -) - - -class ApiMethodWrapper: - def __init__(self, api_method): - self._api_method = api_method - - @staticmethod - def handle_neptune_http_errors(response, exception: Optional[HTTPError] = None): - from neptune.management.exceptions import ( - ActiveProjectsLimitReachedException, - IncorrectIdentifierException, - ObjectNotFound, - ProjectKeyCollision, - ProjectKeyInvalid, - ProjectNameCollision, - ProjectNameInvalid, - ProjectPrivacyRestrictedException, - ProjectsLimitReached, - ) - - error_processors: Dict[str, Callable[[Dict], Exception]] = { - "ATTRIBUTES_PER_EXPERIMENT_LIMIT_EXCEEDED": lambda response_body: NeptuneFieldCountLimitExceedException( - limit=response_body.get("limit", ""), - container_type=response_body.get("experimentType", "object"), - identifier=response_body.get("experimentQualifiedName", ""), - ), - "AUTHORIZATION_TOKEN_EXPIRED": lambda _: NeptuneAuthTokenExpired(), - "EXPERIMENT_NOT_FOUND": lambda _: ObjectNotFound(), - "INCORRECT_IDENTIFIER": lambda response_body: IncorrectIdentifierException( - identifier=response_body.get("identifier", "") - ), - "LIMIT_OF_PROJECTS_REACHED": lambda _: ProjectsLimitReached(), - "PROJECT_KEY_COLLISION": lambda response_body: ProjectKeyCollision( - key=response_body.get("key", "") - ), - "PROJECT_KEY_INVALID": lambda response_body: ProjectKeyInvalid( - key=response_body.get("key", ""), - reason=response_body.get("reason", "Unknown reason"), - ), - "PROJECT_NAME_COLLISION": lambda response_body: ProjectNameCollision( - key=response_body.get("key", "") - ), - "PROJECT_NAME_INVALID": lambda response_body: ProjectNameInvalid( - name=response_body.get("name", "") - ), - "VISIBILITY_RESTRICTED": lambda response_body: ProjectPrivacyRestrictedException( - requested=response_body.get("requestedValue"), - allowed=response_body.get("allowedValues"), - ), - "WORKSPACE_IN_READ_ONLY_MODE": lambda response_body: NeptuneLimitExceedException( - reason=response_body.get("title", "Unknown reason") - ), - "PROJECT_LIMITS_EXCEEDED": lambda response_body: NeptuneLimitExceedException( - reason=response_body.get("title", "Unknown reason") - ), - "LIMIT_OF_ACTIVE_PROJECTS_REACHED": lambda response_body: ActiveProjectsLimitReachedException( - currentQuota=response_body.get("currentQuota", "") - ), - "WRITE_ACCESS_DENIED_TO_ARCHIVED_PROJECT": lambda _: WritingToArchivedProjectException(), - } - - body = ensure_json_response(response) - error_type: Optional[str] = body.get("errorType") - error_processor = error_processors.get(error_type) - if error_processor: - if exception: - raise error_processor(body) from exception - raise error_processor(body) - - if exception: - raise exception - - def __call__(self, *args, **kwargs): - try: - future = self._api_method(*args, **kwargs) - return FinishedApiResponseFuture(future.response()) # wait synchronously - except HTTPError as e: - self.handle_neptune_http_errors(e.response, exception=e) - - def __getattr__(self, item): - return getattr(self._api_method, item) - - -class ApiWrapper: - def __init__(self, api_obj): - self._api_obj = api_obj - - def __getattr__(self, item): - return ApiMethodWrapper(getattr(self._api_obj, item)) - - -class FinishedApiResponseFuture: - def __init__(self, response): - self._response = response - - def response(self): - return self._response - - -class SwaggerClientWrapper: - def __init__(self, swagger_client: SwaggerClient): - self._swagger_client = swagger_client - self.api = ApiWrapper(swagger_client.api) - self.swagger_spec = swagger_client.swagger_spec - - # For test purposes - def __eq__(self, other): - if isinstance(other, SwaggerClientWrapper): - return self._swagger_client == other._swagger_client - return False diff --git a/src/neptune/internal/backends/utils.py b/src/neptune/internal/backends/utils.py deleted file mode 100644 index 305e6f717..000000000 --- a/src/neptune/internal/backends/utils.py +++ /dev/null @@ -1,483 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "verify_host_resolution", - "create_swagger_client", - "verify_client_version", - "update_session_proxies", - "build_operation_url", - "handle_server_raw_response_messages", - "NeptuneResponseAdapter", - "MissingApiClient", - "cache", - "ssl_verify", - "parse_validation_errors", - "ExecuteOperationsBatchingManager", - "which_progress_bar", - "construct_progress_bar", - "with_api_exceptions_handler", -] - -import dataclasses -import itertools -import os -import socket -import time -from functools import ( - lru_cache, - wraps, -) -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterable, - List, - Mapping, - Optional, - Text, - Type, -) -from urllib.parse import ( - urljoin, - urlparse, -) - -import requests -import urllib3 -from bravado.client import SwaggerClient -from bravado.exception import ( - BravadoConnectionError, - BravadoTimeoutError, - HTTPBadGateway, - HTTPClientError, - HTTPError, - HTTPForbidden, - HTTPGatewayTimeout, - HTTPInternalServerError, - HTTPRequestTimeout, - HTTPServiceUnavailable, - HTTPTooManyRequests, - HTTPUnauthorized, -) -from bravado.http_client import HttpClient -from bravado.requests_client import RequestsResponseAdapter -from bravado_core.formatter import SwaggerFormat -from bravado_core.util import RecursiveCallException -from packaging.version import Version -from requests import ( - Response, - Session, -) -from requests.exceptions import ChunkedEncodingError -from urllib3.exceptions import NewConnectionError - -from neptune.envs import NEPTUNE_ALLOW_SELF_SIGNED_CERTIFICATE -from neptune.exceptions import ( - CannotResolveHostname, - MetadataInconsistency, - NeptuneClientUpgradeRequiredError, - NeptuneFeatureNotAvailableException, -) -from neptune.internal.backends.api_model import ClientConfig -from neptune.internal.backends.swagger_client_wrapper import SwaggerClientWrapper -from neptune.internal.envs import NEPTUNE_RETRIES_TIMEOUT_ENV -from neptune.internal.exceptions import ( - ClientHttpError, - Forbidden, - NeptuneAuthTokenExpired, - NeptuneConnectionLostException, - NeptuneInvalidApiTokenException, - NeptuneSSLVerificationError, - Unauthorized, -) -from neptune.internal.operation import ( - CopyAttribute, - Operation, -) -from neptune.internal.utils import replace_patch_version -from neptune.internal.utils.logger import get_logger -from neptune.internal.utils.utils import reset_internal_ssl_state -from neptune.internal.warnings import ( - NeptuneWarning, - warn_once, -) -from neptune.typing import ( - ProgressBarCallback, - ProgressBarType, -) -from neptune.utils import ( - NullProgressBar, - TqdmProgressBar, -) - -logger = get_logger() - -if TYPE_CHECKING: - from neptune.internal.backends.neptune_backend import NeptuneBackend - - -MAX_RETRY_TIME = 30 -MAX_RETRY_MULTIPLIER = 10 -retries_timeout = int(os.getenv(NEPTUNE_RETRIES_TIMEOUT_ENV, "60")) - - -def get_retry_from_headers_or_default(headers, retry_count): - try: - return ( - int(headers["retry-after"][0]) if "retry-after" in headers else 2 ** min(MAX_RETRY_MULTIPLIER, retry_count) - ) - except Exception: - return min(2 ** min(MAX_RETRY_MULTIPLIER, retry_count), MAX_RETRY_TIME) - - -def with_api_exceptions_handler(func): - def wrapper(*args, **kwargs): - ssl_error_occurred = False - last_exception = None - start_time = time.monotonic() - for retry in itertools.count(0): - if time.monotonic() - start_time > retries_timeout: - break - - try: - return func(*args, **kwargs) - except requests.exceptions.InvalidHeader as e: - if "X-Neptune-Api-Token" in e.args[0]: - raise NeptuneInvalidApiTokenException() - raise - except requests.exceptions.SSLError as e: - """ - OpenSSL's internal random number generator does not properly handle forked processes. - Applications must change the PRNG state of the parent process - if they use any SSL feature with os.fork(). - Any successful call of RAND_add(), RAND_bytes() or RAND_pseudo_bytes() is sufficient. - https://docs.python.org/3/library/ssl.html#multi-processing - On Linux it looks like it does not help much but does not break anything either. - But single retry seems to solve the issue. - """ - if not ssl_error_occurred: - ssl_error_occurred = True - reset_internal_ssl_state() - continue - - if "CertificateError" in str(e.__context__): - raise NeptuneSSLVerificationError() from e - else: - time.sleep(min(2 ** min(MAX_RETRY_MULTIPLIER, retry), MAX_RETRY_TIME)) - last_exception = e - continue - except ( - BravadoConnectionError, - BravadoTimeoutError, - requests.exceptions.ConnectionError, - requests.exceptions.Timeout, - HTTPRequestTimeout, - HTTPServiceUnavailable, - HTTPGatewayTimeout, - HTTPBadGateway, - HTTPInternalServerError, - NewConnectionError, - ChunkedEncodingError, - RecursiveCallException, - ) as e: - time.sleep(min(2 ** min(MAX_RETRY_MULTIPLIER, retry), MAX_RETRY_TIME)) - last_exception = e - continue - except HTTPTooManyRequests as e: - wait_time = get_retry_from_headers_or_default(e.response.headers, retry) - time.sleep(wait_time) - last_exception = e - continue - except NeptuneAuthTokenExpired as e: - last_exception = e - continue - except HTTPUnauthorized: - raise Unauthorized() - except HTTPForbidden: - raise Forbidden() - except HTTPClientError as e: - raise ClientHttpError(e.status_code, e.response.text) from e - except requests.exceptions.RequestException as e: - if e.response is None: - raise - status_code = e.response.status_code - if status_code in ( - HTTPRequestTimeout.status_code, - HTTPBadGateway.status_code, - HTTPServiceUnavailable.status_code, - HTTPGatewayTimeout.status_code, - HTTPInternalServerError.status_code, - ): - time.sleep(min(2 ** min(MAX_RETRY_MULTIPLIER, retry), MAX_RETRY_TIME)) - last_exception = e - continue - elif status_code == HTTPTooManyRequests.status_code: - wait_time = get_retry_from_headers_or_default(e.response.headers, retry) - time.sleep(wait_time) - last_exception = e - continue - elif status_code == HTTPUnauthorized.status_code: - raise Unauthorized() - elif status_code == HTTPForbidden.status_code: - raise Forbidden() - elif 400 <= status_code < 500: - raise ClientHttpError(status_code, e.response.text) from e - else: - raise - raise NeptuneConnectionLostException(last_exception) from last_exception - - return wrapper - - -@lru_cache(maxsize=None, typed=True) -def verify_host_resolution(url: str) -> None: - host = urlparse(url).netloc.split(":")[0] - try: - socket.gethostbyname(host) - except socket.gaierror: - raise CannotResolveHostname(host) - - -uuid_format = SwaggerFormat( - format="uuid", - to_python=lambda x: x, - to_wire=lambda x: x, - validate=lambda x: None, - description="", -) - - -@with_api_exceptions_handler -def create_swagger_client(url: str, http_client: HttpClient) -> SwaggerClient: - return SwaggerClient.from_url( - url, - config=dict( - validate_swagger_spec=False, - validate_requests=False, - validate_responses=False, - formats=[uuid_format], - ), - http_client=http_client, - ) - - -def verify_client_version(client_config: ClientConfig, version: Version): - base_version = Version(f"{version.major}.{version.minor}.{version.micro}") - version_with_patch_0 = Version(replace_patch_version(str(version))) - - min_compatible = client_config.version_info.min_compatible - max_compatible = client_config.version_info.max_compatible - min_recommended = client_config.version_info.min_recommended - - if min_compatible and min_compatible > base_version: - raise NeptuneClientUpgradeRequiredError(version, min_version=client_config.version_info.min_compatible) - - if max_compatible and max_compatible < version_with_patch_0: - raise NeptuneClientUpgradeRequiredError(version, max_version=client_config.version_info.max_compatible) - - if min_recommended and min_recommended > version: - logger.warning( - "WARNING: Your version of the Neptune client library (%s) is deprecated," - " and soon will no longer be supported by the Neptune server." - " We recommend upgrading to at least version %s.", - version, - min_recommended, - ) - - -def update_session_proxies(session: Session, proxies: Optional[Dict[str, str]]): - if proxies: - try: - session.proxies.update(proxies) - except (TypeError, ValueError): - raise ValueError(f"Wrong proxies format: {proxies}") - - -def build_operation_url(base_api: str, operation_url: str) -> str: - if "://" not in base_api: - base_api = f"https://{base_api}" - - return urljoin(base=base_api, url=operation_url) - - -# TODO print in color once colored exceptions are added -def handle_server_raw_response_messages(response: Response): - try: - info = response.headers.get("X-Server-Info") - if info: - logger.info(info) - warning = response.headers.get("X-Server-Warning") - if warning: - logger.warning(warning) - error = response.headers.get("X-Server-Error") - if error: - logger.error(error) - return response - except Exception: - # any issues with printing server messages should not cause code to fail - return response - - -# TODO print in color once colored exceptions are added -class NeptuneResponseAdapter(RequestsResponseAdapter): - @property - def raw_bytes(self) -> bytes: - self._handle_response() - return super().raw_bytes - - @property - def text(self) -> Text: - self._handle_response() - return super().text - - def json(self, **kwargs) -> Mapping[Text, Any]: - self._handle_response() - return super().json(**kwargs) - - def _handle_response(self): - try: - info = self._delegate.headers.get("X-Server-Info") - if info: - logger.info(info) - warning = self._delegate.headers.get("X-Server-Warning") - if warning: - logger.warning(warning) - error = self._delegate.headers.get("X-Server-Error") - if error: - logger.error(error) - except Exception: - # any issues with printing server messages should not cause code to fail - pass - - -class MissingApiClient(SwaggerClientWrapper): - """catch-all class to gracefully handle calls to unavailable API""" - - def __init__(self, feature_name: str): - self.feature_name = feature_name - - def __getattr__(self, item): - raise NeptuneFeatureNotAvailableException(missing_feature=self.feature_name) - - -# https://stackoverflow.com/a/44776960 -def cache(func): - """ - Transform mutable dictionary into immutable before call to lru_cache - """ - - class HDict(dict): - def __hash__(self): - return hash(frozenset(self.items())) - - func = lru_cache(maxsize=None, typed=True)(func) - - @wraps(func) - def wrapper(*args, **kwargs): - args = tuple([HDict(arg) if isinstance(arg, dict) else arg for arg in args]) - kwargs = {k: HDict(v) if isinstance(v, dict) else v for k, v in kwargs.items()} - return func(*args, **kwargs) - - wrapper.cache_clear = func.cache_clear - return wrapper - - -def ssl_verify(): - if os.getenv(NEPTUNE_ALLOW_SELF_SIGNED_CERTIFICATE): - urllib3.disable_warnings() - return False - - return True - - -def parse_validation_errors(error: HTTPError) -> Dict[str, str]: - return { - f"{error_description.get('errorCode').get('name')}": error_description.get("context", "") - for validation_error in error.swagger_result.validationErrors - for error_description in validation_error.get("errors") - } - - -@dataclasses.dataclass -class OperationsBatch: - operations: List[Operation] = dataclasses.field(default_factory=list) - errors: List[MetadataInconsistency] = dataclasses.field(default_factory=list) - dropped_operations_count: int = 0 - - -class ExecuteOperationsBatchingManager: - def __init__(self, backend: "NeptuneBackend"): - self._backend = backend - - def get_batch(self, ops: Iterable[Operation]) -> OperationsBatch: - result = OperationsBatch() - for op in ops: - if isinstance(op, CopyAttribute): - if not result.operations: - try: - # CopyAttribute can be at the start of a batch - result.operations.append(op.resolve(self._backend)) - except MetadataInconsistency as e: - result.errors.append(e) - result.dropped_operations_count += 1 - else: - # cannot have CopyAttribute after any other op in a batch - break - else: - result.operations.append(op) - - return result - - -def _check_if_tqdm_installed() -> bool: - try: - import tqdm # noqa: F401 - - return True - except ImportError: # tqdm not installed - return False - - -def which_progress_bar(progress_bar: Optional[ProgressBarType]) -> Type[ProgressBarCallback]: - if isinstance(progress_bar, type) and issubclass( - progress_bar, ProgressBarCallback - ): # return whatever the user gave us - return progress_bar - - if not isinstance(progress_bar, bool) and progress_bar is not None: - raise TypeError(f"progress_bar should be None, bool or ProgressBarCallback, got {type(progress_bar).__name__}") - - if progress_bar or progress_bar is None: - tqdm_available = _check_if_tqdm_installed() - - if not tqdm_available: - warn_once( - "To use the default progress bar, please install tqdm: pip install tqdm", - exception=NeptuneWarning, - ) - return NullProgressBar - return TqdmProgressBar - - return NullProgressBar - - -def construct_progress_bar( - progress_bar: Optional[ProgressBarType], - description: str, -) -> ProgressBarCallback: - progress_bar_type = which_progress_bar(progress_bar) - return progress_bar_type(description=description) diff --git a/src/neptune/internal/backgroud_job_list.py b/src/neptune/internal/backgroud_job_list.py deleted file mode 100644 index e284c8b48..000000000 --- a/src/neptune/internal/backgroud_job_list.py +++ /dev/null @@ -1,55 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["BackgroundJobList"] - -import time -from typing import ( - TYPE_CHECKING, - List, - Optional, -) - -from neptune.internal.background_job import BackgroundJob - -if TYPE_CHECKING: - from neptune.objects import NeptuneObject - - -class BackgroundJobList(BackgroundJob): - def __init__(self, jobs: List[BackgroundJob]): - self._jobs = jobs - - def start(self, container: "NeptuneObject"): - for job in self._jobs: - job.start(container) - - def stop(self): - for job in self._jobs: - job.stop() - - def join(self, seconds: Optional[float] = None): - ts = time.time() - for job in self._jobs: - sec_left = None if seconds is None else seconds - (time.time() - ts) - job.join(sec_left) - - def pause(self): - for job in self._jobs: - job.pause() - - def resume(self): - for job in self._jobs: - job.resume() diff --git a/src/neptune/internal/background_job.py b/src/neptune/internal/background_job.py deleted file mode 100644 index 1351d15da..000000000 --- a/src/neptune/internal/background_job.py +++ /dev/null @@ -1,47 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["BackgroundJob"] - -import abc -from typing import ( - TYPE_CHECKING, - Optional, -) - -if TYPE_CHECKING: - from neptune.objects import NeptuneObject - - -class BackgroundJob: - @abc.abstractmethod - def start(self, container: "NeptuneObject"): - pass - - @abc.abstractmethod - def stop(self): - pass - - @abc.abstractmethod - def join(self, seconds: Optional[float] = None): - pass - - @abc.abstractmethod - def pause(self): - pass - - @abc.abstractmethod - def resume(self): - pass diff --git a/src/neptune/internal/constants.py b/src/neptune/internal/constants.py deleted file mode 100644 index 082e9a4c1..000000000 --- a/src/neptune/internal/constants.py +++ /dev/null @@ -1,23 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = ["ANONYMOUS_API_TOKEN_CONTENT", "DO_NOT_TRACK_GIT_REPOSITORY"] - -ANONYMOUS_API_TOKEN_CONTENT = ( - "eyJhcGlfYWRkcmVzcyI6Imh0dHBzOi8vYXBwLm5lcHR1bmUuYWkiLCJhcGlfdXJsIjoiaHR0cHM6Ly9hcHAubmVwdHVuZS" - "5haSIsImFwaV9rZXkiOiJiNzA2YmM4Zi03NmY5LTRjMmUtOTM5ZC00YmEwMzZmOTMyZTQifQo=" -) -DO_NOT_TRACK_GIT_REPOSITORY = "DO_NOT_TRACK_GIT_REPOSITORY" diff --git a/src/neptune/internal/container_structure.py b/src/neptune/internal/container_structure.py deleted file mode 100644 index ebb01884b..000000000 --- a/src/neptune/internal/container_structure.py +++ /dev/null @@ -1,127 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ContainerStructure"] - -from collections import deque -from typing import ( - Callable, - Generic, - List, - Optional, - TypeVar, - Union, -) - -from neptune.exceptions import MetadataInconsistency -from neptune.internal.utils.paths import path_to_str - -T = TypeVar("T") -Node = TypeVar("Node") - - -def _default_node_factory(path): - return {} - - -class ContainerStructure(Generic[T, Node]): - def __init__(self, node_factory: Optional[Callable[[List[str]], Node]] = None): - if node_factory is None: - node_factory = _default_node_factory - - self._structure = node_factory(path=[]) - self._node_factory = node_factory - self._node_type = type(self._structure) - - def get_structure(self) -> Node: - return self._structure - - def _iterate_node(self, node, path_prefix: List[str]): - """this iterates in BFS order in order to more meaningful suggestions before cutoff""" - nodes_queue = deque([(node, path_prefix)]) - while nodes_queue: - node, prefix = nodes_queue.popleft() - for key, value in node.items(): - if not isinstance(value, self._node_type): - yield prefix + [key] - else: - nodes_queue.append((value, prefix + [key])) - - def iterate_subpaths(self, path_prefix: List[str]): - root = self.get(path_prefix) - for path in self._iterate_node(root or {}, path_prefix): - yield path_to_str(path) - - def get(self, path: List[str]) -> Union[T, Node, None]: - ref = self._structure - - for index, part in enumerate(path): - if not isinstance(ref, self._node_type): - raise MetadataInconsistency( - "Cannot access path '{}': '{}' is already defined as an attribute, " - "not a namespace".format(path_to_str(path), path_to_str(path[:index])) - ) - if part not in ref: - return None - ref = ref[part] - - return ref - - def set(self, path: List[str], attr: T) -> None: - ref = self._structure - location, attribute_name = path[:-1], path[-1] - - for idx, part in enumerate(location): - if part not in ref: - ref[part] = self._node_factory(location[: idx + 1]) - ref = ref[part] - if not isinstance(ref, self._node_type): - raise MetadataInconsistency( - "Cannot access path '{}': '{}' is already defined as an attribute, " - "not a namespace".format(path_to_str(path), part) - ) - - if attribute_name in ref and isinstance(ref[attribute_name], self._node_type): - if isinstance(attr, self._node_type): - # in-between nodes are auto-created, so ignore it's OK unless we want to change the type - return - raise MetadataInconsistency("Cannot set attribute '{}'. It's a namespace".format(path_to_str(path))) - - ref[attribute_name] = attr - - def pop(self, path: List[str]) -> None: - self._pop_impl(self._structure, path, path) - - def _pop_impl(self, ref, sub_path: List[str], attr_path: List[str]): - if not sub_path: - return - - head, tail = sub_path[0], sub_path[1:] - if head not in ref: - raise MetadataInconsistency("Cannot delete {}. Attribute not found.".format(path_to_str(attr_path))) - - if not tail: - if isinstance(ref[head], self._node_type): - raise MetadataInconsistency( - "Cannot delete {}. It's a namespace, not an attribute.".format(path_to_str(attr_path)) - ) - del ref[head] - else: - self._pop_impl(ref[head], tail, attr_path) - if not ref[head]: - del ref[head] - - def clear(self): - self._structure.clear() diff --git a/src/neptune/internal/container_type.py b/src/neptune/internal/container_type.py deleted file mode 100644 index f13ee81b8..000000000 --- a/src/neptune/internal/container_type.py +++ /dev/null @@ -1,43 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ContainerType"] - -import enum - -from neptune.internal.id_formats import UniqueId - - -class ContainerType(str, enum.Enum): - RUN = "run" - PROJECT = "project" - MODEL = "model" - MODEL_VERSION = "model_version" - - def to_api(self) -> str: - if self == ContainerType.MODEL_VERSION: - return "modelVersion" - else: - return self.value - - @staticmethod - def from_api(api_type: str) -> "ContainerType": - if api_type == "modelVersion": - return ContainerType.MODEL_VERSION - else: - return ContainerType(api_type) - - def create_dir_name(self, container_id: UniqueId) -> str: - return f"{self.value}__{container_id}" diff --git a/src/neptune/internal/credentials.py b/src/neptune/internal/credentials.py deleted file mode 100644 index a8dfc0551..000000000 --- a/src/neptune/internal/credentials.py +++ /dev/null @@ -1,70 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Credentials"] - -import base64 -import json -import os -from dataclasses import dataclass -from typing import ( - Dict, - Optional, -) - -from neptune.constants import ANONYMOUS_API_TOKEN -from neptune.exceptions import NeptuneMissingApiTokenException -from neptune.internal.constants import ANONYMOUS_API_TOKEN_CONTENT -from neptune.internal.envs import API_TOKEN_ENV_NAME -from neptune.internal.exceptions import NeptuneInvalidApiTokenException - - -@dataclass(frozen=True) -class Credentials: - api_token: str - token_origin_address: str - api_url_opt: str - - @classmethod - def from_token(cls, api_token: Optional[str] = None) -> "Credentials": - if api_token is None: - api_token = os.getenv(API_TOKEN_ENV_NAME) - - if api_token == ANONYMOUS_API_TOKEN: - api_token = ANONYMOUS_API_TOKEN_CONTENT - - if api_token is None: - raise NeptuneMissingApiTokenException() - - api_token = api_token.strip() - token_dict = Credentials._api_token_to_dict(api_token) - # TODO: Consider renaming 'api_address' (breaking backward compatibility) - if "api_address" not in token_dict: - raise NeptuneInvalidApiTokenException() - token_origin_address = token_dict["api_address"] - api_url = token_dict["api_url"] if "api_url" in token_dict else None - - return Credentials( - api_token=api_token, - token_origin_address=token_origin_address, - api_url_opt=api_url, - ) - - @staticmethod - def _api_token_to_dict(api_token: str) -> Dict[str, str]: - try: - return json.loads(base64.b64decode(api_token.encode()).decode("utf-8")) - except Exception: - raise NeptuneInvalidApiTokenException() diff --git a/src/neptune/internal/envs.py b/src/neptune/internal/envs.py deleted file mode 100644 index 9029e0017..000000000 --- a/src/neptune/internal/envs.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -API_TOKEN_ENV_NAME = "NEPTUNE_API_TOKEN" - -NEPTUNE_RETRIES_TIMEOUT_ENV = "NEPTUNE_RETRIES_TIMEOUT" - -PROJECT_ENV_NAME = "NEPTUNE_PROJECT" - -NOTEBOOK_ID_ENV_NAME = "NEPTUNE_NOTEBOOK_ID" - -NOTEBOOK_PATH_ENV_NAME = "NEPTUNE_NOTEBOOK_PATH" - -BACKEND = "NEPTUNE_BACKEND" diff --git a/src/neptune/internal/exceptions.py b/src/neptune/internal/exceptions.py deleted file mode 100644 index 0e7f1d771..000000000 --- a/src/neptune/internal/exceptions.py +++ /dev/null @@ -1,430 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import platform -from typing import ( - Any, - Optional, -) - -from neptune.internal.envs import ( - API_TOKEN_ENV_NAME, - PROJECT_ENV_NAME, -) - -UNIX_STYLES = { - "h1": "\033[95m", - "h2": "\033[94m", - "blue": "\033[94m", - "python": "\033[96m", - "bash": "\033[95m", - "warning": "\033[93m", - "correct": "\033[92m", - "fail": "\033[91m", - "bold": "\033[1m", - "underline": "\033[4m", - "end": "\033[0m", -} - -WINDOWS_STYLES = { - "h1": "", - "h2": "", - "python": "", - "bash": "", - "warning": "", - "correct": "", - "fail": "", - "bold": "", - "underline": "", - "end": "", -} - -EMPTY_STYLES = { - "h1": "", - "h2": "", - "python": "", - "bash": "", - "warning": "", - "correct": "", - "fail": "", - "bold": "", - "underline": "", - "end": "", -} - -if platform.system() in ["Linux", "Darwin"]: - STYLES = UNIX_STYLES -elif platform.system() == "Windows": - STYLES = WINDOWS_STYLES -else: - STYLES = EMPTY_STYLES - - -class NeptuneException(Exception): - def __eq__(self, other: Any) -> bool: - if type(other) is type(self): - return super().__eq__(other) and str(self).__eq__(str(other)) - else: - return False - - def __hash__(self) -> int: - return hash((super().__hash__(), str(self))) - - -class NeptuneInvalidApiTokenException(NeptuneException): - def __init__(self) -> None: - message = """ -{h1} -----NeptuneInvalidApiTokenException------------------------------------------------ -{end} -The provided API token is invalid. -Make sure you copied and provided your API token correctly. - -You can get it or check if it is correct here: - - https://app.neptune.ai/get_my_api_token - -There are two options to add it: - - specify it in your code - - set it as an environment variable in your operating system. - -{h2}CODE{end} -Pass the token to the {bold}init_run(){end} function via the {bold}api_token{end} argument: - {python}neptune.init_run(project='WORKSPACE_NAME/PROJECT_NAME', api_token='YOUR_API_TOKEN'){end} - -{h2}ENVIRONMENT VARIABLE{end} {correct}(Recommended option){end} -or export or set an environment variable depending on your operating system: - - {correct}Linux/Unix{end} - In your terminal run: - {bash}export {env_api_token}="YOUR_API_TOKEN"{end} - - {correct}Windows{end} - In your CMD run: - {bash}set {env_api_token}="YOUR_API_TOKEN"{end} - -and skip the {bold}api_token{end} argument of the {bold}init_run(){end} function: - {python}neptune.init_run(project='WORKSPACE_NAME/PROJECT_NAME'){end} - -You may also want to check the following docs page: - - https://docs.neptune.ai/setup/setting_api_token/ - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(env_api_token=API_TOKEN_ENV_NAME, **STYLES)) - - -class UploadedFileChanged(NeptuneException): - def __init__(self, filename: str) -> None: - super().__init__("File {} changed during upload, restarting upload.".format(filename)) - - -class InternalClientError(NeptuneException): - def __init__(self, msg: str) -> None: - message = """ -{h1} -----InternalClientError----------------------------------------------------------------------- -{end} -The Neptune client library encountered an unexpected internal error: -{msg} - -Please contact Neptune support. - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(msg=msg, **STYLES)) - - -class ClientHttpError(NeptuneException): - def __init__(self, status: str, response: str) -> None: - self.status = status - self.response = response - message = """ -{h1} -----ClientHttpError----------------------------------------------------------------------- -{end} -The Neptune server returned the status {fail}{status}{end}. - -The server response was: -{fail}{response}{end} - -Verify the correctness of your call or contact Neptune support. - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(status=status, response=response, **STYLES)) - - -class NeptuneApiException(NeptuneException): - pass - - -class Forbidden(NeptuneApiException): - def __init__(self) -> None: - message = """ -{h1} -----Forbidden----------------------------------------------------------------------- -{end} -You don't have permission to access the given resource. - - - Verify that your API token is correct. - See: https://app.neptune.ai/get_my_api_token - - - Verify that the provided project name is correct. - The correct project name should look like this: {correct}WORKSPACE_NAME/PROJECT_NAME{end} - It has two parts: - - {correct}WORKSPACE_NAME{end}: can be your username or your organization name - - {correct}PROJECT_NAME{end}: the name specified for the project - - - Ask your organization administrator to grant you the necessary privileges to the project. - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(**STYLES)) - - -class Unauthorized(NeptuneApiException): - def __init__(self, msg: Optional[str] = None) -> None: - default_message = """ -{h1} -----Unauthorized----------------------------------------------------------------------- -{end} -You don't have permission to access the given resource. - - - Verify that your API token is correct. - See: https://app.neptune.ai/get_my_api_token - - - Verify that the provided project name is correct. - The correct project name should look like this: {correct}WORKSPACE_NAME/PROJECT_NAME{end} - It has two parts: - - {correct}WORKSPACE_NAME{end}: can be your username or your organization name - - {correct}PROJECT_NAME{end}: the name specified for the project - - - Ask your organization administrator to grant you the necessary privileges to the project. - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - message = msg if msg is not None else default_message - super().__init__(message.format(**STYLES)) - - -class NeptuneAuthTokenExpired(Unauthorized): - def __init__(self) -> None: - super().__init__("Authorization token expired") - - -class InternalServerError(NeptuneApiException): - def __init__(self, response: str) -> None: - message = """ -{h1} -----InternalServerError----------------------------------------------------------------------- -{end} -The Neptune client library encountered an unexpected internal server error. - -The server response was: -{fail}{response}{end} - -Please try again later or contact Neptune support. - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" - super().__init__(message.format(response=response, **STYLES)) - - -class NeptuneConnectionLostException(NeptuneException): - def __init__(self, cause: Exception) -> None: - self.cause = cause - message = """ -{h1} -----NeptuneConnectionLostException--------------------------------------------------------- -{end} -The connection to the Neptune server was lost. -If you are using the asynchronous (default) connection mode, Neptune continues to locally track your metadata and continuously tries to re-establish a connection to the Neptune servers. -If the connection is not re-established, you can upload your data later with the Neptune Command Line Interface tool: - {bash}neptune sync -p workspace_name/project_name{end} - -What should I do? - - Check if your computer is connected to the internet. - - If your connection is unstable, consider working in offline mode: - {python}run = neptune.init_run(mode="offline"){end} - -You can find detailed instructions on the following doc pages: - - https://docs.neptune.ai/api/connection_modes/#offline-mode - - https://docs.neptune.ai/api/neptune_sync/ - -You may also want to check the following docs page: - - https://docs.neptune.ai/api/connection_modes/#connectivity-issues - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" # noqa: E501 - super().__init__(message.format(**STYLES)) - - -class NeptuneSSLVerificationError(NeptuneException): - def __init__(self) -> None: - message = """ -{h1} -----NeptuneSSLVerificationError----------------------------------------------------------------------- -{end} - -The Neptune client was unable to verify your SSL Certificate. - -{bold}What could have gone wrong?{end} - - You are behind a proxy that inspects traffic to Neptune servers. - - Contact your network administrator - - The SSL/TLS certificate of your on-premises installation is not recognized due to a custom Certificate Authority (CA). - - To check, run the following command in your terminal: - {bash}curl https:///api/backend/echo {end} - - Where is the address that you use to access Neptune app, such as abc.com - - Contact your network administrator if you get the following output: - {fail}"curl: (60) server certificate verification failed..."{end} - - Your machine software is outdated. - - Minimal OS requirements: - - Windows >= XP SP3 - - macOS >= 10.12.1 - - Ubuntu >= 12.04 - - Debian >= 8 - -{bold}What can I do?{end} -You can manually configure Neptune to skip all SSL checks. To do that, -set the NEPTUNE_ALLOW_SELF_SIGNED_CERTIFICATE environment variable to 'TRUE'. -{bold}Note: This might mean that your connection is less secure{end}. - -Linux/Unix -In your terminal run: - {bash}export NEPTUNE_ALLOW_SELF_SIGNED_CERTIFICATE='TRUE'{end} - -Windows -In your terminal run: - {bash}set NEPTUNE_ALLOW_SELF_SIGNED_CERTIFICATE='TRUE'{end} - -Jupyter notebook -In your code cell: - {bash}%env NEPTUNE_ALLOW_SELF_SIGNED_CERTIFICATE='TRUE'{end} - -You may also want to check the following docs page: - - https://docs.neptune.ai/api/environment_variables/#neptune_allow_self_signed_certificate - - -{correct}Need help?{end}-> https://docs.neptune.ai/getting_help -""" # noqa: E501 - super().__init__(message.format(**STYLES)) - - -class FileNotFound(NeptuneException): - def __init__(self, path: str) -> None: - super(FileNotFound, self).__init__("File {} doesn't exist.".format(path)) - - -class InvalidNotebookPath(NeptuneException): - def __init__(self, path: str) -> None: - super(InvalidNotebookPath, self).__init__( - "File {} is not a valid notebook. Should end with .ipynb.".format(path) - ) - - -class NeptuneIncorrectProjectQualifiedNameException(NeptuneException): - def __init__(self, project_qualified_name: str) -> None: - message = """ -{h1} -----NeptuneIncorrectProjectQualifiedNameException----------------------------------------------------------------------- -{end} -Project qualified name {fail}"{project_qualified_name}"{end} you specified was incorrect. - -The correct project qualified name should look like this {correct}WORKSPACE/PROJECT_NAME{end}. -It has two parts: - - {correct}WORKSPACE{end}: which can be your username or your organization name - - {correct}PROJECT_NAME{end}: which is the actual project name you chose - -For example, a project {correct}neptune-ai/credit-default-prediction{end} parts are: - - {correct}neptune-ai{end}: {underline}WORKSPACE{end} our company organization name - - {correct}credit-default-prediction{end}: {underline}PROJECT_NAME{end} a project name - -The URL to this project looks like this: https://app.neptune.ai/neptune-ai/credit-default-prediction - -You may also want to check the following docs pages: - - https://docs-legacy.neptune.ai/workspace-project-and-user-management/index.html - - https://docs-legacy.neptune.ai/getting-started/quick-starts/log_first_experiment.html - -{correct}Need help?{end}-> https://docs-legacy.neptune.ai/getting-started/getting-help.html -""" - super(NeptuneIncorrectProjectQualifiedNameException, self).__init__( - message.format(project_qualified_name=project_qualified_name, **STYLES) - ) - - -class NeptuneMissingProjectQualifiedNameException(NeptuneException): - def __init__(self) -> None: - message = """ -{h1} -----NeptuneMissingProjectQualifiedNameException------------------------------------------------------------------------- -{end} -Neptune client couldn't find your project name. - -There are two options two add it: - - specify it in your code - - set an environment variable in your operating system. - -{h2}CODE{end} -Pass it to {bold}neptune.init(){end} via {bold}project_qualified_name{end} argument: - {python}neptune.init(project_qualified_name='WORKSPACE_NAME/PROJECT_NAME', api_token='YOUR_API_TOKEN'){end} - -{h2}ENVIRONMENT VARIABLE{end} -or export or set an environment variable depending on your operating system: - - {correct}Linux/Unix{end} - In your terminal run: - {bash}export {env_project}=WORKSPACE_NAME/PROJECT_NAME{end} - - {correct}Windows{end} - In your CMD run: - {bash}set {env_project}=WORKSPACE_NAME/PROJECT_NAME{end} - -and skip the {bold}project_qualified_name{end} argument of {bold}neptune.init(){end}: - {python}neptune.init(api_token='YOUR_API_TOKEN'){end} - -You may also want to check the following docs pages: - - https://docs-legacy.neptune.ai/workspace-project-and-user-management/index.html - - https://docs-legacy.neptune.ai/getting-started/quick-starts/log_first_experiment.html - -{correct}Need help?{end}-> https://docs-legacy.neptune.ai/getting-started/getting-help.html -""" - super(NeptuneMissingProjectQualifiedNameException, self).__init__( - message.format(env_project=PROJECT_ENV_NAME, **STYLES) - ) - - -class NotAFile(NeptuneException): - def __init__(self, path: str) -> None: - super(NotAFile, self).__init__("Path {} is not a file.".format(path)) - - -class NotADirectory(NeptuneException): - def __init__(self, path: str) -> None: - super(NotADirectory, self).__init__("Path {} is not a directory.".format(path)) - - -class WritingToArchivedProjectException(NeptuneException): - def __init__(self) -> None: - message = """ -{h1} -----WritingToArchivedProjectException----------------------------------------------------------------------- -{end} -You're trying to write to a project that was archived. - -Set the project as active again or use mode="read-only" at initialization to fetch metadata from it. - -{correct}Need help?{end}-> https://docs.neptune.ai/help/error_writing_to_archived_project/ -""" - super(WritingToArchivedProjectException, self).__init__(message.format(**STYLES)) diff --git a/src/neptune/internal/extensions.py b/src/neptune/internal/extensions.py deleted file mode 100644 index 592886d1c..000000000 --- a/src/neptune/internal/extensions.py +++ /dev/null @@ -1,53 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["load_extensions"] - -import sys -from typing import ( - Callable, - List, - Tuple, -) - -if sys.version_info >= (3, 8): - from importlib.metadata import entry_points -else: - from importlib_metadata import entry_points - -from neptune.internal.warnings import ( - NeptuneWarning, - warn_once, -) - - -def get_entry_points(name: str) -> List[Tuple[str, Callable[[], None]]]: - if (3, 8) <= sys.version_info < (3, 10): - return [(entry_point.name, entry_point.load()) for entry_point in entry_points().get(name, tuple())] - return [ - (entry_point.name, entry_point.load()) # type: ignore[unused-ignore, attr-defined] - for entry_point in entry_points(group=name) # type: ignore[unused-ignore, call-arg] - ] - - -def load_extensions() -> None: - for entry_point_name, loaded_extension in get_entry_points(name="neptune.extensions"): - try: - _ = loaded_extension() - except Exception as e: - warn_once( - message=f"Failed to load neptune extension `{entry_point_name}` with exception: {e}", - exception=NeptuneWarning, - ) diff --git a/src/neptune/internal/hardware/__init__.py b/src/neptune/internal/hardware/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/internal/hardware/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/hardware/cgroup/__init__.py b/src/neptune/internal/hardware/cgroup/__init__.py deleted file mode 100644 index 62a86a5be..000000000 --- a/src/neptune/internal/hardware/cgroup/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/hardware/cgroup/cgroup_filesystem_reader.py b/src/neptune/internal/hardware/cgroup/cgroup_filesystem_reader.py deleted file mode 100644 index c8f4020d4..000000000 --- a/src/neptune/internal/hardware/cgroup/cgroup_filesystem_reader.py +++ /dev/null @@ -1,69 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import os -import re - - -class CGroupFilesystemReader(object): - def __init__(self): - cgroup_memory_dir = self.__cgroup_mount_dir(subsystem="memory") - self.__memory_usage_file = os.path.join(cgroup_memory_dir, "memory.usage_in_bytes") - self.__memory_limit_file = os.path.join(cgroup_memory_dir, "memory.limit_in_bytes") - - cgroup_cpu_dir = self.__cgroup_mount_dir(subsystem="cpu") - self.__cpu_period_file = os.path.join(cgroup_cpu_dir, "cpu.cfs_period_us") - self.__cpu_quota_file = os.path.join(cgroup_cpu_dir, "cpu.cfs_quota_us") - - cgroup_cpuacct_dir = self.__cgroup_mount_dir(subsystem="cpuacct") - self.__cpuacct_usage_file = os.path.join(cgroup_cpuacct_dir, "cpuacct.usage") - - def get_memory_usage_in_bytes(self): - return self.__read_int_file(self.__memory_usage_file) - - def get_memory_limit_in_bytes(self): - return self.__read_int_file(self.__memory_limit_file) - - def get_cpu_quota_micros(self): - return self.__read_int_file(self.__cpu_quota_file) - - def get_cpu_period_micros(self): - return self.__read_int_file(self.__cpu_period_file) - - def get_cpuacct_usage_nanos(self): - return self.__read_int_file(self.__cpuacct_usage_file) - - def __read_int_file(self, filename): - with open(filename) as f: - return int(f.read()) - - def __cgroup_mount_dir(self, subsystem): - """ - :param subsystem: cgroup subsystem like memory, cpu - :return: directory where given subsystem is mounted - """ - with open("/proc/mounts", "r") as f: - for line in f.readlines(): - split_line = re.split(r"\s+", line) - mount_dir = split_line[1] - - if "cgroup" in mount_dir: - dirname = mount_dir.split("/")[-1] - subsystems = dirname.split(",") - if subsystem in subsystems: - return mount_dir - - assert False, 'Mount directory for "{}" subsystem not found'.format(subsystem) diff --git a/src/neptune/internal/hardware/cgroup/cgroup_monitor.py b/src/neptune/internal/hardware/cgroup/cgroup_monitor.py deleted file mode 100644 index 07a282ffc..000000000 --- a/src/neptune/internal/hardware/cgroup/cgroup_monitor.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import time - -from neptune.internal.hardware.cgroup.cgroup_filesystem_reader import CGroupFilesystemReader -from neptune.internal.hardware.system.system_monitor import SystemMonitor - - -class CGroupMonitor(object): - def __init__(self, cgroup_filesystem_reader, system_monitor): - self.__cgroup_filesystem_reader = cgroup_filesystem_reader - self.__system_monitor = system_monitor - - self.__last_cpu_usage_measurement_timestamp_nanos = None - self.__last_cpu_cumulative_usage_nanos = None - - @staticmethod - def create(): - return CGroupMonitor(CGroupFilesystemReader(), SystemMonitor()) - - def get_memory_usage_in_bytes(self): - return self.__cgroup_filesystem_reader.get_memory_usage_in_bytes() - - def get_memory_limit_in_bytes(self): - cgroup_mem_limit = self.__cgroup_filesystem_reader.get_memory_limit_in_bytes() - total_virtual_memory = self.__system_monitor.virtual_memory().total - return min(cgroup_mem_limit, total_virtual_memory) - - def get_cpu_usage_limit_in_cores(self): - cpu_quota_micros = self.__cgroup_filesystem_reader.get_cpu_quota_micros() - - if cpu_quota_micros == -1: - return float(self.__system_monitor.cpu_count()) - else: - cpu_period_micros = self.__cgroup_filesystem_reader.get_cpu_period_micros() - return float(cpu_quota_micros) / float(cpu_period_micros) - - def get_cpu_usage_percentage(self): - current_timestamp_nanos = time.time() * 10**9 - cpu_cumulative_usage_nanos = self.__cgroup_filesystem_reader.get_cpuacct_usage_nanos() - - if self.__first_measurement(): - current_usage = 0.0 - else: - usage_diff = cpu_cumulative_usage_nanos - self.__last_cpu_cumulative_usage_nanos - time_diff = current_timestamp_nanos - self.__last_cpu_usage_measurement_timestamp_nanos - current_usage = float(usage_diff) / float(time_diff) / self.get_cpu_usage_limit_in_cores() * 100.0 - - self.__last_cpu_usage_measurement_timestamp_nanos = current_timestamp_nanos - self.__last_cpu_cumulative_usage_nanos = cpu_cumulative_usage_nanos - - # cgroup cpu usage may slightly exceed the given limit, but we don't want to show it - return self.__clamp(current_usage, lower_limit=0.0, upper_limit=100.0) - - def __first_measurement(self): - return ( - self.__last_cpu_usage_measurement_timestamp_nanos is None or self.__last_cpu_cumulative_usage_nanos is None - ) - - @staticmethod - def __clamp(value, lower_limit, upper_limit): - return max(lower_limit, min(value, upper_limit)) diff --git a/src/neptune/internal/hardware/constants.py b/src/neptune/internal/hardware/constants.py deleted file mode 100644 index 1530fe3a7..000000000 --- a/src/neptune/internal/hardware/constants.py +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["BYTES_IN_ONE_MB", "BYTES_IN_ONE_GB"] - -BYTES_IN_ONE_MB = 2**20 -BYTES_IN_ONE_GB = 2**30 diff --git a/src/neptune/internal/hardware/gauges/__init__.py b/src/neptune/internal/hardware/gauges/__init__.py deleted file mode 100644 index 62a86a5be..000000000 --- a/src/neptune/internal/hardware/gauges/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/hardware/gauges/cpu.py b/src/neptune/internal/hardware/gauges/cpu.py deleted file mode 100644 index 50897409a..000000000 --- a/src/neptune/internal/hardware/gauges/cpu.py +++ /dev/null @@ -1,53 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from neptune.internal.hardware.cgroup.cgroup_monitor import CGroupMonitor -from neptune.internal.hardware.gauges.gauge import Gauge -from neptune.internal.hardware.system.system_monitor import SystemMonitor - - -class SystemCpuUsageGauge(Gauge): - def __init__(self): - self.__system_monitor = SystemMonitor() - - def name(self): - return "cpu" - - def value(self): - return self.__system_monitor.cpu_percent() - - def __eq__(self, other): - return self.__class__ == other.__class__ - - def __repr__(self): - return str("SystemCpuUsageGauge") - - -class CGroupCpuUsageGauge(Gauge): - def __init__(self): - self.__cgroup_monitor = CGroupMonitor.create() - - def name(self): - return "cpu" - - def value(self): - return self.__cgroup_monitor.get_cpu_usage_percentage() - - def __eq__(self, other): - return self.__class__ == other.__class__ - - def __repr__(self): - return str("CGroupCpuUsageGauge") diff --git a/src/neptune/internal/hardware/gauges/gauge.py b/src/neptune/internal/hardware/gauges/gauge.py deleted file mode 100644 index 32b070b89..000000000 --- a/src/neptune/internal/hardware/gauges/gauge.py +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from abc import ( - ABCMeta, - abstractmethod, -) - - -class Gauge(object): - __metaclass__ = ABCMeta - - @abstractmethod - def name(self): - """ - :return: Gauge name (str). - """ - raise NotImplementedError() - - @abstractmethod - def value(self): - """ - :return: Current value (float). - """ - raise NotImplementedError() diff --git a/src/neptune/internal/hardware/gauges/gauge_factory.py b/src/neptune/internal/hardware/gauges/gauge_factory.py deleted file mode 100644 index fd9b4e53a..000000000 --- a/src/neptune/internal/hardware/gauges/gauge_factory.py +++ /dev/null @@ -1,60 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.hardware.gauges.cpu import ( - CGroupCpuUsageGauge, - SystemCpuUsageGauge, -) -from neptune.internal.hardware.gauges.gauge_mode import GaugeMode -from neptune.internal.hardware.gauges.gpu import ( - GpuMemoryGauge, - GpuUsageGauge, -) -from neptune.internal.hardware.gauges.memory import ( - CGroupMemoryUsageGauge, - SystemMemoryUsageGauge, -) - - -class GaugeFactory(object): - def __init__(self, gauge_mode): - self.__gauge_mode = gauge_mode - - def create_cpu_usage_gauge(self): - if self.__gauge_mode == GaugeMode.SYSTEM: - return SystemCpuUsageGauge() - elif self.__gauge_mode == GaugeMode.CGROUP: - return CGroupCpuUsageGauge() - else: - raise self.__invalid_gauge_mode_exception() - - def create_memory_usage_gauge(self): - if self.__gauge_mode == GaugeMode.SYSTEM: - return SystemMemoryUsageGauge() - elif self.__gauge_mode == GaugeMode.CGROUP: - return CGroupMemoryUsageGauge() - else: - raise self.__invalid_gauge_mode_exception() - - @staticmethod - def create_gpu_usage_gauge(card_index): - return GpuUsageGauge(card_index=card_index) - - @staticmethod - def create_gpu_memory_gauge(card_index): - return GpuMemoryGauge(card_index=card_index) - - def __invalid_gauge_mode_exception(self): - return ValueError(str("Invalid gauge mode: {}".format(self.__gauge_mode))) diff --git a/src/neptune/internal/hardware/gauges/gauge_mode.py b/src/neptune/internal/hardware/gauges/gauge_mode.py deleted file mode 100644 index ce0a02b16..000000000 --- a/src/neptune/internal/hardware/gauges/gauge_mode.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -class GaugeMode(object): - SYSTEM = "system" - CGROUP = "cgroup" diff --git a/src/neptune/internal/hardware/gauges/gpu.py b/src/neptune/internal/hardware/gauges/gpu.py deleted file mode 100644 index 057c959f4..000000000 --- a/src/neptune/internal/hardware/gauges/gpu.py +++ /dev/null @@ -1,54 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.hardware.constants import BYTES_IN_ONE_GB -from neptune.internal.hardware.gauges.gauge import Gauge -from neptune.internal.hardware.gpu.gpu_monitor import GPUMonitor - - -class GpuUsageGauge(Gauge): - def __init__(self, card_index): - self.card_index = card_index - self.__gpu_monitor = GPUMonitor() - - def name(self): - return str(self.card_index) - - def value(self): - return self.__gpu_monitor.get_card_usage_percent(self.card_index) - - def __eq__(self, other): - return self.__class__ == other.__class__ and self.card_index == other.card_index - - def __repr__(self): - return str("GpuUsageGauge") - - -class GpuMemoryGauge(Gauge): - def __init__(self, card_index): - self.card_index = card_index - self.__gpu_monitor = GPUMonitor() - - def name(self): - return str(self.card_index) - - def value(self): - return self.__gpu_monitor.get_card_used_memory_in_bytes(self.card_index) / float(BYTES_IN_ONE_GB) - - def __eq__(self, other): - return self.__class__ == other.__class__ and self.card_index == other.card_index - - def __repr__(self): - return str("GpuMemoryGauge") diff --git a/src/neptune/internal/hardware/gauges/memory.py b/src/neptune/internal/hardware/gauges/memory.py deleted file mode 100644 index 195936e62..000000000 --- a/src/neptune/internal/hardware/gauges/memory.py +++ /dev/null @@ -1,55 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from neptune.internal.hardware.cgroup.cgroup_monitor import CGroupMonitor -from neptune.internal.hardware.constants import BYTES_IN_ONE_GB -from neptune.internal.hardware.gauges.gauge import Gauge -from neptune.internal.hardware.system.system_monitor import SystemMonitor - - -class SystemMemoryUsageGauge(Gauge): - def __init__(self): - self.__system_monitor = SystemMonitor() - - def name(self): - return "ram" - - def value(self): - virtual_mem = self.__system_monitor.virtual_memory() - return (virtual_mem.total - virtual_mem.available) / float(BYTES_IN_ONE_GB) - - def __eq__(self, other): - return self.__class__ == other.__class__ - - def __repr__(self): - return str("SystemMemoryUsageGauge") - - -class CGroupMemoryUsageGauge(Gauge): - def __init__(self): - self.__cgroup_monitor = CGroupMonitor.create() - - def name(self): - return "ram" - - def value(self): - return self.__cgroup_monitor.get_memory_usage_in_bytes() / float(BYTES_IN_ONE_GB) - - def __eq__(self, other): - return self.__class__ == other.__class__ - - def __repr__(self): - return str("CGroupMemoryUsageGauge") diff --git a/src/neptune/internal/hardware/gpu/__init__.py b/src/neptune/internal/hardware/gpu/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/internal/hardware/gpu/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/hardware/gpu/gpu_monitor.py b/src/neptune/internal/hardware/gpu/gpu_monitor.py deleted file mode 100644 index ab683b19e..000000000 --- a/src/neptune/internal/hardware/gpu/gpu_monitor.py +++ /dev/null @@ -1,73 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["GPUMonitor"] - -from neptune.internal.utils.logger import get_logger -from neptune.vendor.pynvml import ( - NVMLError, - nvmlDeviceGetCount, - nvmlDeviceGetHandleByIndex, - nvmlDeviceGetMemoryInfo, - nvmlDeviceGetUtilizationRates, - nvmlInit, -) - -_logger = get_logger() - - -class GPUMonitor(object): - - nvml_error_printed = False - - def get_card_count(self): - return self.__nvml_get_or_else(nvmlDeviceGetCount, default=0) - - def get_card_usage_percent(self, card_index): - return self.__nvml_get_or_else( - lambda: float(nvmlDeviceGetUtilizationRates(nvmlDeviceGetHandleByIndex(card_index)).gpu) - ) - - def get_card_used_memory_in_bytes(self, card_index): - return self.__nvml_get_or_else(lambda: nvmlDeviceGetMemoryInfo(nvmlDeviceGetHandleByIndex(card_index)).used) - - def get_top_card_memory_in_bytes(self): - def read_top_card_memory_in_bytes(): - return self.__nvml_get_or_else( - lambda: [ - nvmlDeviceGetMemoryInfo(nvmlDeviceGetHandleByIndex(card_index)).total - for card_index in range(nvmlDeviceGetCount()) - ], - default=0, - ) - - memory_per_card = read_top_card_memory_in_bytes() - if not memory_per_card: - return 0 - return max(memory_per_card) - - def __nvml_get_or_else(self, getter, default=None): - try: - nvmlInit() - return getter() - except NVMLError as e: - if not GPUMonitor.nvml_error_printed: - warning = ( - "Info (NVML): %s. GPU usage metrics may not be reported. For more information, " - "see https://docs.neptune.ai/help/nvml_error/" - ) - _logger.warning(warning, e) - GPUMonitor.nvml_error_printed = True - return default diff --git a/src/neptune/internal/hardware/hardware_metric_reporting_job.py b/src/neptune/internal/hardware/hardware_metric_reporting_job.py deleted file mode 100644 index bb6773e03..000000000 --- a/src/neptune/internal/hardware/hardware_metric_reporting_job.py +++ /dev/null @@ -1,122 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["HardwareMetricReportingJob"] - -import os -import time -from itertools import groupby -from typing import ( - TYPE_CHECKING, - Dict, - Optional, -) - -from neptune.internal.background_job import BackgroundJob -from neptune.internal.hardware.gauges.gauge_factory import GaugeFactory -from neptune.internal.hardware.gauges.gauge_mode import GaugeMode -from neptune.internal.hardware.gpu.gpu_monitor import GPUMonitor -from neptune.internal.hardware.metrics.metrics_factory import MetricsFactory -from neptune.internal.hardware.metrics.reports.metric_reporter import MetricReporter -from neptune.internal.hardware.metrics.reports.metric_reporter_factory import MetricReporterFactory -from neptune.internal.hardware.resources.system_resource_info_factory import SystemResourceInfoFactory -from neptune.internal.hardware.system.system_monitor import SystemMonitor -from neptune.internal.threading.daemon import Daemon -from neptune.internal.utils.logger import get_logger -from neptune.internal.utils.utils import in_docker -from neptune.types.series import FloatSeries - -if TYPE_CHECKING: - from neptune.objects import NeptuneObject - -_logger = get_logger() - - -class HardwareMetricReportingJob(BackgroundJob): - def __init__(self, period: float = 10, attribute_namespace: str = "monitoring"): - self._period = period - self._thread = None - self._started = False - self._gauges_in_resource: Dict[str, int] = dict() - self._attribute_namespace = attribute_namespace - - def start(self, container: "NeptuneObject"): - gauge_mode = GaugeMode.CGROUP if in_docker() else GaugeMode.SYSTEM - system_resource_info = SystemResourceInfoFactory( - system_monitor=SystemMonitor(), - gpu_monitor=GPUMonitor(), - os_environ=os.environ, - ).create(gauge_mode=gauge_mode) - gauge_factory = GaugeFactory(gauge_mode=gauge_mode) - metrics_factory = MetricsFactory(gauge_factory=gauge_factory, system_resource_info=system_resource_info) - metrics_container = metrics_factory.create_metrics_container() - metric_reporter = MetricReporterFactory(time.time()).create(metrics=metrics_container.metrics()) - - for metric in metrics_container.metrics(): - self._gauges_in_resource[metric.resource_type] = len(metric.gauges) - - for metric in metrics_container.metrics(): - for gauge in metric.gauges: - path = self.get_attribute_name(metric.resource_type, gauge.name()) - if not container.get_attribute(path): - container[path] = FloatSeries([], min=metric.min_value, max=metric.max_value, unit=metric.unit) - - self._thread = self.ReportingThread(self, self._period, container, metric_reporter) - self._thread.start() - self._started = True - - def stop(self): - if not self._started: - return - self._thread.interrupt() - - def pause(self): - self._thread.pause() - - def resume(self): - self._thread.resume() - - def join(self, seconds: Optional[float] = None): - if not self._started: - return - self._thread.join(seconds) - - def get_attribute_name(self, resource_type, gauge_name) -> str: - gauges_count = self._gauges_in_resource.get(resource_type, None) - if gauges_count is None or gauges_count != 1: - return "{}/{}_{}".format(self._attribute_namespace, resource_type, gauge_name).lower() - return "{}/{}".format(self._attribute_namespace, resource_type).lower() - - class ReportingThread(Daemon): - def __init__( - self, - outer: "HardwareMetricReportingJob", - period: float, - container: "NeptuneObject", - metric_reporter: MetricReporter, - ): - super().__init__(sleep_time=period, name="NeptuneReporting") - self._outer = outer - self._container = container - self._metric_reporter = metric_reporter - - def work(self) -> None: - metric_reports = self._metric_reporter.report(time.time()) - for report in metric_reports: - for gauge_name, metric_values in groupby(report.values, lambda value: value.gauge_name): - attr = self._container[self._outer.get_attribute_name(report.metric.resource_type, gauge_name)] - # TODO: Avoid loop - for metric_value in metric_values: - attr.log(value=metric_value.value, timestamp=metric_value.timestamp) diff --git a/src/neptune/internal/hardware/metrics/__init__.py b/src/neptune/internal/hardware/metrics/__init__.py deleted file mode 100644 index 62a86a5be..000000000 --- a/src/neptune/internal/hardware/metrics/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/hardware/metrics/metric.py b/src/neptune/internal/hardware/metrics/metric.py deleted file mode 100644 index b54ed808a..000000000 --- a/src/neptune/internal/hardware/metrics/metric.py +++ /dev/null @@ -1,99 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -class Metric(object): - def __init__( - self, - name, - description, - resource_type, - unit, - min_value, - max_value, - gauges, - internal_id=None, - ): - self.__internal_id = internal_id - self.__name = name - self.__description = description - self.__resource_type = resource_type - self.__unit = unit - self.__min_value = min_value - self.__max_value = max_value - self.__gauges = gauges - - @property - def internal_id(self): - return self.__internal_id - - @internal_id.setter - def internal_id(self, value): - self.__internal_id = value - - @property - def name(self): - return self.__name - - @property - def description(self): - return self.__description - - @property - def resource_type(self): - return self.__resource_type - - @property - def unit(self): - return self.__unit - - @property - def min_value(self): - return self.__min_value - - @property - def max_value(self): - return self.__max_value - - @property - def gauges(self): - return self.__gauges - - def __repr__(self): - return ( - "Metric(internal_id={}, name={}, description={}, resource_type={}, unit={}, min_value={}, " - "max_value={}, gauges={})" - ).format( - self.internal_id, - self.name, - self.description, - self.resource_type, - self.unit, - self.min_value, - self.max_value, - self.gauges, - ) - - def __eq__(self, other): - return self.__class__ == other.__class__ and repr(self) == repr(other) - - -class MetricResourceType(object): - CPU = "CPU" - RAM = "MEMORY" - GPU = "GPU" - GPU_RAM = "GPU_MEMORY" - OTHER = "OTHER" diff --git a/src/neptune/internal/hardware/metrics/metrics_container.py b/src/neptune/internal/hardware/metrics/metrics_container.py deleted file mode 100644 index f7d90a9b2..000000000 --- a/src/neptune/internal/hardware/metrics/metrics_container.py +++ /dev/null @@ -1,35 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -class MetricsContainer(object): - def __init__(self, cpu_usage_metric, memory_metric, gpu_usage_metric, gpu_memory_metric): - self.cpu_usage_metric = cpu_usage_metric - self.memory_metric = memory_metric - self.gpu_usage_metric = gpu_usage_metric - self.gpu_memory_metric = gpu_memory_metric - - def metrics(self): - return [ - metric - for metric in [ - self.cpu_usage_metric, - self.memory_metric, - self.gpu_usage_metric, - self.gpu_memory_metric, - ] - if metric is not None - ] diff --git a/src/neptune/internal/hardware/metrics/metrics_factory.py b/src/neptune/internal/hardware/metrics/metrics_factory.py deleted file mode 100644 index 666d6b23e..000000000 --- a/src/neptune/internal/hardware/metrics/metrics_factory.py +++ /dev/null @@ -1,92 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.hardware.constants import BYTES_IN_ONE_GB -from neptune.internal.hardware.metrics.metric import ( - Metric, - MetricResourceType, -) -from neptune.internal.hardware.metrics.metrics_container import MetricsContainer - - -class MetricsFactory(object): - def __init__(self, gauge_factory, system_resource_info): - self.__gauge_factory = gauge_factory - self.__system_resource_info = system_resource_info - - def create_metrics_container(self): - cpu_usage_metric = self.__create_cpu_usage_metric() - memory_metric = self.__create_memory_metric() - - has_gpu = self.__system_resource_info.has_gpu() - gpu_usage_metric = self.__create_gpu_usage_metric() if has_gpu else None - gpu_memory_metric = self.__create_gpu_memory_metric() if has_gpu else None - - return MetricsContainer( - cpu_usage_metric=cpu_usage_metric, - memory_metric=memory_metric, - gpu_usage_metric=gpu_usage_metric, - gpu_memory_metric=gpu_memory_metric, - ) - - def __create_cpu_usage_metric(self): - return Metric( - name="CPU - usage", - description="average of all cores", - resource_type=MetricResourceType.CPU, - unit="%", - min_value=0.0, - max_value=100.0, - gauges=[self.__gauge_factory.create_cpu_usage_gauge()], - ) - - def __create_memory_metric(self): - return Metric( - name="RAM", - description="", - resource_type=MetricResourceType.RAM, - unit="GB", - min_value=0.0, - max_value=self.__system_resource_info.memory_amount_bytes / float(BYTES_IN_ONE_GB), - gauges=[self.__gauge_factory.create_memory_usage_gauge()], - ) - - def __create_gpu_usage_metric(self): - return Metric( - name="GPU - usage", - description="{} cards".format(self.__system_resource_info.gpu_card_count), - resource_type=MetricResourceType.GPU, - unit="%", - min_value=0.0, - max_value=100.0, - gauges=[ - self.__gauge_factory.create_gpu_usage_gauge(card_index=card_index) - for card_index in self.__system_resource_info.gpu_card_indices - ], - ) - - def __create_gpu_memory_metric(self): - return Metric( - name="GPU - memory", - description="{} cards".format(self.__system_resource_info.gpu_card_count), - resource_type=MetricResourceType.GPU_RAM, - unit="GB", - min_value=0.0, - max_value=self.__system_resource_info.gpu_memory_amount_bytes / float(BYTES_IN_ONE_GB), - gauges=[ - self.__gauge_factory.create_gpu_memory_gauge(card_index=card_index) - for card_index in self.__system_resource_info.gpu_card_indices - ], - ) diff --git a/src/neptune/internal/hardware/metrics/reports/__init__.py b/src/neptune/internal/hardware/metrics/reports/__init__.py deleted file mode 100644 index 62a86a5be..000000000 --- a/src/neptune/internal/hardware/metrics/reports/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/hardware/metrics/reports/metric_report.py b/src/neptune/internal/hardware/metrics/reports/metric_report.py deleted file mode 100644 index c028e0e67..000000000 --- a/src/neptune/internal/hardware/metrics/reports/metric_report.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from collections import namedtuple - -MetricReport = namedtuple("MetricReport", ["metric", "values"]) - -MetricValue = namedtuple("MetricValue", ["timestamp", "running_time", "gauge_name", "value"]) diff --git a/src/neptune/internal/hardware/metrics/reports/metric_reporter.py b/src/neptune/internal/hardware/metrics/reports/metric_reporter.py deleted file mode 100644 index 8dee14409..000000000 --- a/src/neptune/internal/hardware/metrics/reports/metric_reporter.py +++ /dev/null @@ -1,51 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.hardware.metrics.reports.metric_report import ( - MetricReport, - MetricValue, -) - - -class MetricReporter(object): - def __init__(self, metrics, reference_timestamp): - self.__metrics = metrics - self.__reference_timestamp = reference_timestamp - - def report(self, timestamp): - """ - :param timestamp: Time of measurement (float, seconds since Epoch). - :return: list[MetricReport] - """ - return [ - MetricReport( - metric=metric, - values=[x for x in [self.__metric_value_for_gauge(gauge, timestamp) for gauge in metric.gauges] if x], - ) - for metric in self.__metrics - ] - - def __metric_value_for_gauge(self, gauge, timestamp): - value = gauge.value() - return ( - MetricValue( - timestamp=timestamp, - running_time=timestamp - self.__reference_timestamp, - gauge_name=gauge.name(), - value=value, - ) - if value - else None - ) diff --git a/src/neptune/internal/hardware/metrics/reports/metric_reporter_factory.py b/src/neptune/internal/hardware/metrics/reports/metric_reporter_factory.py deleted file mode 100644 index ab1d88576..000000000 --- a/src/neptune/internal/hardware/metrics/reports/metric_reporter_factory.py +++ /dev/null @@ -1,24 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.hardware.metrics.reports.metric_reporter import MetricReporter - - -class MetricReporterFactory(object): - def __init__(self, reference_timestamp): - self.__reference_timestamp = reference_timestamp - - def create(self, metrics): - return MetricReporter(metrics=metrics, reference_timestamp=self.__reference_timestamp) diff --git a/src/neptune/internal/hardware/metrics/service/__init__.py b/src/neptune/internal/hardware/metrics/service/__init__.py deleted file mode 100644 index 62a86a5be..000000000 --- a/src/neptune/internal/hardware/metrics/service/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/hardware/metrics/service/metric_service.py b/src/neptune/internal/hardware/metrics/service/metric_service.py deleted file mode 100644 index dda6cdd08..000000000 --- a/src/neptune/internal/hardware/metrics/service/metric_service.py +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -class MetricService(object): - def __init__(self, backend, metric_reporter, experiment, metrics_container): - self.__backend = backend - self.__metric_reporter = metric_reporter - self.experiment = experiment - self.metrics_container = metrics_container - - def report_and_send(self, timestamp): - metric_reports = self.__metric_reporter.report(timestamp) - self.__backend.send_hardware_metric_reports(self.experiment, self.metrics_container.metrics(), metric_reports) diff --git a/src/neptune/internal/hardware/metrics/service/metric_service_factory.py b/src/neptune/internal/hardware/metrics/service/metric_service_factory.py deleted file mode 100644 index 11ade5a18..000000000 --- a/src/neptune/internal/hardware/metrics/service/metric_service_factory.py +++ /dev/null @@ -1,51 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.hardware.gauges.gauge_factory import GaugeFactory -from neptune.internal.hardware.gpu.gpu_monitor import GPUMonitor -from neptune.internal.hardware.metrics.metrics_factory import MetricsFactory -from neptune.internal.hardware.metrics.reports.metric_reporter_factory import MetricReporterFactory -from neptune.internal.hardware.metrics.service.metric_service import MetricService -from neptune.internal.hardware.resources.system_resource_info_factory import SystemResourceInfoFactory -from neptune.internal.hardware.system.system_monitor import SystemMonitor - - -class MetricServiceFactory(object): - def __init__(self, backend, os_environ): - self.__backend = backend - self.__os_environ = os_environ - - def create(self, gauge_mode, experiment, reference_timestamp): - system_resource_info = SystemResourceInfoFactory( - system_monitor=SystemMonitor(), - gpu_monitor=GPUMonitor(), - os_environ=self.__os_environ, - ).create(gauge_mode=gauge_mode) - - gauge_factory = GaugeFactory(gauge_mode=gauge_mode) - metrics_factory = MetricsFactory(gauge_factory=gauge_factory, system_resource_info=system_resource_info) - metrics_container = metrics_factory.create_metrics_container() - - for metric in metrics_container.metrics(): - metric.internal_id = self.__backend.create_hardware_metric(experiment, metric) - - metric_reporter = MetricReporterFactory(reference_timestamp).create(metrics=metrics_container.metrics()) - - return MetricService( - backend=self.__backend, - metric_reporter=metric_reporter, - experiment=experiment, - metrics_container=metrics_container, - ) diff --git a/src/neptune/internal/hardware/resources/__init__.py b/src/neptune/internal/hardware/resources/__init__.py deleted file mode 100644 index 62a86a5be..000000000 --- a/src/neptune/internal/hardware/resources/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/hardware/resources/gpu_card_indices_provider.py b/src/neptune/internal/hardware/resources/gpu_card_indices_provider.py deleted file mode 100644 index 4c1d73ea8..000000000 --- a/src/neptune/internal/hardware/resources/gpu_card_indices_provider.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import re - - -class GPUCardIndicesProvider(object): - def __init__(self, cuda_visible_devices, gpu_card_count): - self.__cuda_visible_devices = cuda_visible_devices - self.__gpu_card_count = gpu_card_count - self.__cuda_visible_devices_regex = r"^-?\d+(,-?\d+)*$" - - def get(self): - if self.__is_cuda_visible_devices_correct(): - return self.__gpu_card_indices_from_cuda_visible_devices() - else: - return list(range(self.__gpu_card_count)) - - def __is_cuda_visible_devices_correct(self): - return self.__cuda_visible_devices is not None and re.match( - self.__cuda_visible_devices_regex, self.__cuda_visible_devices - ) - - def __gpu_card_indices_from_cuda_visible_devices(self): - correct_indices = [] - - # According to CUDA Toolkit specification. - # https://docs.nvidia.com/cuda/cuda-c-programming-guide/index.html#env-vars - for gpu_index_str in self.__cuda_visible_devices.split(","): - gpu_index = int(gpu_index_str) - if 0 <= gpu_index < self.__gpu_card_count: - correct_indices.append(gpu_index) - else: - break - - return list(set(correct_indices)) diff --git a/src/neptune/internal/hardware/resources/system_resource_info.py b/src/neptune/internal/hardware/resources/system_resource_info.py deleted file mode 100644 index 9c5cd0ba6..000000000 --- a/src/neptune/internal/hardware/resources/system_resource_info.py +++ /dev/null @@ -1,55 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -class SystemResourceInfo(object): - def __init__( - self, - cpu_core_count, - memory_amount_bytes, - gpu_card_indices, - gpu_memory_amount_bytes, - ): - self.__cpu_core_count = cpu_core_count - self.__memory_amount_bytes = memory_amount_bytes - self.__gpu_card_indices = gpu_card_indices - self.__gpu_memory_amount_bytes = gpu_memory_amount_bytes - - @property - def cpu_core_count(self): - return self.__cpu_core_count - - @property - def memory_amount_bytes(self): - return self.__memory_amount_bytes - - @property - def gpu_card_count(self): - return len(self.__gpu_card_indices) - - @property - def gpu_card_indices(self): - return self.__gpu_card_indices - - @property - def gpu_memory_amount_bytes(self): - return self.__gpu_memory_amount_bytes - - def has_gpu(self): - return self.gpu_card_count > 0 - - def __repr__(self): - return str(self.__dict__) diff --git a/src/neptune/internal/hardware/resources/system_resource_info_factory.py b/src/neptune/internal/hardware/resources/system_resource_info_factory.py deleted file mode 100644 index b05b8bf56..000000000 --- a/src/neptune/internal/hardware/resources/system_resource_info_factory.py +++ /dev/null @@ -1,56 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from neptune.internal.hardware.cgroup.cgroup_monitor import CGroupMonitor -from neptune.internal.hardware.gauges.gauge_mode import GaugeMode -from neptune.internal.hardware.resources.gpu_card_indices_provider import GPUCardIndicesProvider -from neptune.internal.hardware.resources.system_resource_info import SystemResourceInfo - - -class SystemResourceInfoFactory(object): - def __init__(self, system_monitor, gpu_monitor, os_environ): - self.__system_monitor = system_monitor - self.__gpu_monitor = gpu_monitor - self.__gpu_card_indices_provider = GPUCardIndicesProvider( - cuda_visible_devices=os_environ.get("CUDA_VISIBLE_DEVICES"), - gpu_card_count=self.__gpu_monitor.get_card_count(), - ) - - def create(self, gauge_mode): - if gauge_mode == GaugeMode.SYSTEM: - return self.__create_whole_system_resource_info() - elif gauge_mode == GaugeMode.CGROUP: - return self.__create_cgroup_resource_info() - else: - raise ValueError(str("Unknown gauge mode: {}".format(gauge_mode))) - - def __create_whole_system_resource_info(self): - return SystemResourceInfo( - cpu_core_count=float(self.__system_monitor.cpu_count()), - memory_amount_bytes=self.__system_monitor.virtual_memory().total, - gpu_card_indices=self.__gpu_card_indices_provider.get(), - gpu_memory_amount_bytes=self.__gpu_monitor.get_top_card_memory_in_bytes(), - ) - - def __create_cgroup_resource_info(self): - cgroup_monitor = CGroupMonitor.create() - - return SystemResourceInfo( - cpu_core_count=cgroup_monitor.get_cpu_usage_limit_in_cores(), - memory_amount_bytes=cgroup_monitor.get_memory_limit_in_bytes(), - gpu_card_indices=self.__gpu_card_indices_provider.get(), - gpu_memory_amount_bytes=self.__gpu_monitor.get_top_card_memory_in_bytes(), - ) diff --git a/src/neptune/internal/hardware/system/__init__.py b/src/neptune/internal/hardware/system/__init__.py deleted file mode 100644 index 62a86a5be..000000000 --- a/src/neptune/internal/hardware/system/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/hardware/system/system_monitor.py b/src/neptune/internal/hardware/system/system_monitor.py deleted file mode 100644 index ec6a2dea3..000000000 --- a/src/neptune/internal/hardware/system/system_monitor.py +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -try: - import psutil - - PSUTIL_INSTALLED = True -except ImportError: - PSUTIL_INSTALLED = False - - -class SystemMonitor(object): - @staticmethod - def cpu_count(): - return psutil.cpu_count() - - @staticmethod - def cpu_percent(): - return psutil.cpu_percent() - - @staticmethod - def virtual_memory(): - return psutil.virtual_memory() diff --git a/src/neptune/internal/id_formats.py b/src/neptune/internal/id_formats.py deleted file mode 100644 index c46b16292..000000000 --- a/src/neptune/internal/id_formats.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["UniqueId", "SysId", "QualifiedName", "conform_optional"] - -import typing -from typing import NewType - -UniqueId = NewType("UniqueId", str) - -SysId = NewType("SysId", str) - -QualifiedName = NewType("QualifiedName", str) - - -def conform_optional(value: typing.Optional[str], cls): - return cls(value) if value is not None else None diff --git a/src/neptune/internal/init/__init__.py b/src/neptune/internal/init/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/internal/init/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/init/parameters.py b/src/neptune/internal/init/parameters.py deleted file mode 100644 index 285b1b62b..000000000 --- a/src/neptune/internal/init/parameters.py +++ /dev/null @@ -1,34 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "DEFAULT_FLUSH_PERIOD", - "DEFAULT_NAME", - "OFFLINE_PROJECT_QUALIFIED_NAME", - "ASYNC_LAG_THRESHOLD", - "ASYNC_NO_PROGRESS_THRESHOLD", - "DEFAULT_STOP_TIMEOUT", - "MAX_SERVER_OFFSET", - "IN_BETWEEN_CALLBACKS_MINIMUM_INTERVAL", -] - -DEFAULT_FLUSH_PERIOD = 5 -DEFAULT_NAME = "Untitled" -OFFLINE_PROJECT_QUALIFIED_NAME = "offline/project-placeholder" -ASYNC_LAG_THRESHOLD = 1800.0 -ASYNC_NO_PROGRESS_THRESHOLD = 300.0 -DEFAULT_STOP_TIMEOUT = 60.0 -IN_BETWEEN_CALLBACKS_MINIMUM_INTERVAL = 300.0 -MAX_SERVER_OFFSET = 10_000 diff --git a/src/neptune/internal/notebooks/__init__.py b/src/neptune/internal/notebooks/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/internal/notebooks/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/notebooks/comm.py b/src/neptune/internal/notebooks/comm.py deleted file mode 100644 index 8746157e9..000000000 --- a/src/neptune/internal/notebooks/comm.py +++ /dev/null @@ -1,55 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["send_checkpoint_created"] - -from neptune.internal.utils.logger import get_logger - -_logger = get_logger() - - -class MessageType(object): - CHECKPOINT_CREATED = "CHECKPOINT_CREATED" - - -def send_checkpoint_created(notebook_id, notebook_path, checkpoint_id) -> None: - """Send checkpoint created message. - - Args: - notebook_id (:obj:`str`): The notebook's id. - notebook_path (:obj:`str`): The notebook's path. - checkpoint_id (:obj:`str`): The checkpoint's path. - - - Raises: - `ImportError`: If ipykernel is not available. - """ - neptune_comm = _get_comm() - neptune_comm.send( - data=dict( - message_type=MessageType.CHECKPOINT_CREATED, - data=dict( - checkpoint_id=str(checkpoint_id), - notebook_id=str(notebook_id), - notebook_path=str(notebook_path), - ), - ) - ) - - -def _get_comm(): - from ipykernel.comm import Comm - - return Comm(target_name="neptune_comm") diff --git a/src/neptune/internal/notebooks/notebooks.py b/src/neptune/internal/notebooks/notebooks.py deleted file mode 100644 index 07f55f462..000000000 --- a/src/neptune/internal/notebooks/notebooks.py +++ /dev/null @@ -1,52 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["create_checkpoint"] - -import threading - -from neptune.internal.backends.neptune_backend import NeptuneBackend -from neptune.internal.notebooks.comm import send_checkpoint_created -from neptune.internal.utils import is_ipython -from neptune.internal.utils.logger import get_logger - -_logger = get_logger() - -_checkpoints_lock = threading.Lock() -_checkpoints = dict() - - -def create_checkpoint(backend: NeptuneBackend, notebook_id: str, notebook_path: str): - if is_ipython(): - import IPython - - ipython = IPython.core.getipython.get_ipython() - execution_count = -1 - if ipython.kernel is not None: - execution_count = ipython.kernel.execution_count - with _checkpoints_lock: - - if execution_count in _checkpoints: - return _checkpoints[execution_count] - - checkpoint = backend.create_checkpoint(notebook_id, notebook_path) - if ipython is not None and ipython.kernel is not None: - send_checkpoint_created( - notebook_id=notebook_id, - notebook_path=notebook_path, - checkpoint_id=checkpoint, - ) - _checkpoints[execution_count] = checkpoint - return checkpoint diff --git a/src/neptune/internal/oauth.py b/src/neptune/internal/oauth.py deleted file mode 100644 index 6234e03d6..000000000 --- a/src/neptune/internal/oauth.py +++ /dev/null @@ -1,136 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import threading -import time - -import jwt -from bravado.exception import HTTPUnauthorized -from bravado.requests_client import Authenticator -from oauthlib.oauth2 import ( - OAuth2Error, - TokenExpiredError, -) -from requests.auth import AuthBase -from requests_oauthlib import OAuth2Session - -from neptune.internal.backends.utils import with_api_exceptions_handler -from neptune.internal.exceptions import NeptuneInvalidApiTokenException -from neptune.internal.utils.utils import update_session_proxies - -_decoding_options = { - "verify_signature": False, - "verify_exp": False, - "verify_nbf": False, - "verify_iat": False, - "verify_aud": False, - "verify_iss": False, -} - - -class NeptuneAuth(AuthBase): - __LOCK = threading.RLock() - - def __init__(self, session_factory): - self.session_factory = session_factory - self.session = session_factory() - self.token_expires_at = 0 - - def __call__(self, r): - try: - return self._add_token(r) - except TokenExpiredError: - self._refresh_token() - return self._add_token(r) - - def _add_token(self, r): - r.url, r.headers, r.body = self.session._client.add_token( - r.url, http_method=r.method, body=r.body, headers=r.headers - ) - return r - - @with_api_exceptions_handler - def refresh_token_if_needed(self): - if self.token_expires_at - time.time() < 30: - self._refresh_token() - - def _refresh_token(self): - with self.__LOCK: - try: - self._refresh_session_token() - except OAuth2Error: - # for some reason oauth session is no longer valid. Retry by creating new fresh session - # we can safely ignore this error, as it will be thrown again if it's persistent - try: - self.session.close() - except Exception: - pass - self.session = self.session_factory() - self._refresh_session_token() - - def _refresh_session_token(self): - self.session.refresh_token(self.session.auto_refresh_url, verify=self.session.verify) - if self.session.token is not None and self.session.token.get("access_token") is not None: - decoded_json_token = jwt.decode(self.session.token.get("access_token"), options=_decoding_options) - self.token_expires_at = decoded_json_token.get("exp") - - -class NeptuneAuthenticator(Authenticator): - def __init__(self, api_token, backend_client, ssl_verify, proxies): - super(NeptuneAuthenticator, self).__init__(host="") - - # We need to pass a lambda to be able to re-create fresh session at any time when needed - def session_factory(): - try: - auth_tokens = backend_client.api.exchangeApiToken(X_Neptune_Api_Token=api_token).response().result - except HTTPUnauthorized: - raise NeptuneInvalidApiTokenException() - - decoded_json_token = jwt.decode(auth_tokens.accessToken, options=_decoding_options) - expires_at = decoded_json_token.get("exp") - client_name = decoded_json_token.get("azp") - refresh_url = "{realm_url}/protocol/openid-connect/token".format(realm_url=decoded_json_token.get("iss")) - token = { - "access_token": auth_tokens.accessToken, - "refresh_token": auth_tokens.refreshToken, - "expires_in": expires_at - time.time(), - } - - session = OAuth2Session( - client_id=client_name, - token=token, - auto_refresh_url=refresh_url, - auto_refresh_kwargs={"client_id": client_name}, - token_updater=_no_token_updater, - ) - session.verify = ssl_verify - - update_session_proxies(session, proxies) - return session - - self.auth = NeptuneAuth(session_factory) - - def matches(self, url): - return True - - def apply(self, request): - self.auth.refresh_token_if_needed() - request.auth = self.auth - return request - - -def _no_token_updater(): - # For unit tests. - return None diff --git a/src/neptune/internal/operation.py b/src/neptune/internal/operation.py deleted file mode 100644 index 4516c6b51..000000000 --- a/src/neptune/internal/operation.py +++ /dev/null @@ -1,605 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -import os -from dataclasses import dataclass -from datetime import datetime -from typing import ( - TYPE_CHECKING, - Generic, - List, - Optional, - Set, - Tuple, - Type, - TypeVar, -) - -from neptune.core.components.operation_storage import OperationStorage -from neptune.exceptions import MalformedOperation -from neptune.internal.container_type import ContainerType -from neptune.internal.exceptions import ( - InternalClientError, - NeptuneException, -) -from neptune.internal.types.file_types import FileType -from neptune.types.atoms.file import File - -if TYPE_CHECKING: - from neptune.attributes.attribute import Attribute - from neptune.internal.backends.neptune_backend import NeptuneBackend - from neptune.internal.operation_visitor import OperationVisitor - -Ret = TypeVar("Ret") -T = TypeVar("T") - - -def all_subclasses(cls): - return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)]) - - -@dataclass -class Operation(abc.ABC): - - path: List[str] - - @abc.abstractmethod - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - pass - - def clean(self, operation_storage: OperationStorage): - pass - - def to_dict(self) -> dict: - return {"type": self.__class__.__name__, "path": self.path} - - @staticmethod - def from_dict(data: dict) -> "Operation": - if "type" not in data: - raise ValueError("Malformed operation {} - type is missing".format(data)) - sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)} - if not data["type"] in sub_classes: - raise ValueError("Malformed operation {} - unknown type {}".format(data, data["type"])) - return sub_classes[data["type"]].from_dict(data) - - -@dataclass -class AssignFloat(Operation): - - value: float - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_assign_float(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["value"] = self.value - return ret - - @staticmethod - def from_dict(data: dict) -> "AssignFloat": - return AssignFloat(data["path"], data["value"]) - - -@dataclass -class AssignInt(Operation): - - value: int - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_assign_int(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["value"] = self.value - return ret - - @staticmethod - def from_dict(data: dict) -> "AssignInt": - return AssignInt(data["path"], data["value"]) - - -@dataclass -class AssignBool(Operation): - - value: bool - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_assign_bool(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["value"] = self.value - return ret - - @staticmethod - def from_dict(data: dict) -> "AssignBool": - return AssignBool(data["path"], data["value"]) - - -@dataclass -class AssignString(Operation): - - value: str - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_assign_string(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["value"] = self.value - return ret - - @staticmethod - def from_dict(data: dict) -> "AssignString": - return AssignString(data["path"], data["value"]) - - -@dataclass -class AssignDatetime(Operation): - - value: datetime - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_assign_datetime(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["value"] = int(1000 * self.value.timestamp()) - return ret - - @staticmethod - def from_dict(data: dict) -> "AssignDatetime": - return AssignDatetime(data["path"], datetime.fromtimestamp(data["value"] / 1000)) - - -@dataclass -class AssignArtifact(Operation): - - hash: str - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_assign_artifact(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["hash"] = self.hash - return ret - - @staticmethod - def from_dict(data: dict) -> "AssignArtifact": - return AssignArtifact(data["path"], str(data["hash"])) - - -@dataclass -class UploadFile(Operation): - - ext: str - file_path: str = None - tmp_file_name: str = None - # `clean_after_upload` is for backward compatibility and should be removed in the future - clean_after_upload: bool = False - - @classmethod - def of_file(cls, value: File, attribute_path: List[str], operation_storage: OperationStorage): - if value.file_type is FileType.LOCAL_FILE: - operation = UploadFile( - path=attribute_path, - ext=value.extension, - file_path=os.path.abspath(value.path), - ) - elif value.file_type in (FileType.IN_MEMORY, FileType.STREAM): - tmp_file_name = cls.get_tmp_file_name(attribute_path, value.extension) - value._save(operation_storage.upload_path / tmp_file_name) - operation = UploadFile(path=attribute_path, ext=value.extension, tmp_file_name=tmp_file_name) - else: - raise ValueError(f"Unexpected FileType: {value.file_type}") - return operation - - def clean(self, operation_storage: OperationStorage): - if self.clean_after_upload or self.tmp_file_name: - os.remove(self.get_absolute_path(operation_storage)) - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_upload_file(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["ext"] = self.ext - ret["file_path"] = self.file_path - ret["tmp_file_name"] = self.tmp_file_name - ret["clean_after_upload"] = self.clean_after_upload - return ret - - @staticmethod - def from_dict(data: dict) -> "UploadFile": - return UploadFile( - data["path"], - data["ext"], - data.get("file_path"), - data.get("tmp_file_name"), - data.get("clean_after_upload", False), - ) - - @staticmethod - def get_tmp_file_name(attribute_path: List[str], extension: str): - now = datetime.now() - tmp_file_name = ( - f"{'_'.join(attribute_path)}-{now.timestamp()}-{now.strftime('%Y-%m-%d_%H.%M.%S.%f')}.{extension}" - ) - return tmp_file_name - - def get_absolute_path(self, operation_storage: OperationStorage) -> str: - if self.file_path: - return self.file_path - elif self.tmp_file_name: - return str(operation_storage.upload_path / self.tmp_file_name) - - raise NeptuneException("Expected 'file_path' or 'tmp_file_name' to be filled.") - - -@dataclass -class UploadFileContent(Operation): - - ext: str - file_content: str - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_upload_file_content(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["ext"] = self.ext - ret["file_content"] = self.file_content - return ret - - @staticmethod - def from_dict(data: dict) -> "UploadFileContent": - return UploadFileContent(data["path"], data["ext"], data["file_content"]) - - -@dataclass -class UploadFileSet(Operation): - - file_globs: List[str] - reset: bool - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_upload_file_set(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["file_globs"] = self.file_globs - ret["reset"] = str(self.reset) - return ret - - @staticmethod - def from_dict(data: dict) -> "UploadFileSet": - return UploadFileSet(data["path"], data["file_globs"], data["reset"] != str(False)) - - -class LogOperation(Operation, abc.ABC): - pass - - -@dataclass -class LogSeriesValue(Generic[T]): - - value: T - step: Optional[float] - ts: float - - def to_dict(self, value_serializer=lambda x: x) -> dict: - return {"value": value_serializer(self.value), "step": self.step, "ts": self.ts} - - @staticmethod - def from_dict(data: dict, value_deserializer=lambda x: x) -> "LogSeriesValue[T]": - return LogSeriesValue[T](value_deserializer(data["value"]), data.get("step", None), data["ts"]) - - -@dataclass -class LogFloats(LogOperation): - - ValueType = LogSeriesValue[float] - - values: List[ValueType] - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_log_floats(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["values"] = [value.to_dict() for value in self.values] - return ret - - @staticmethod - def from_dict(data: dict) -> "LogFloats": - return LogFloats( - data["path"], - [LogFloats.ValueType.from_dict(value) for value in data["values"]], - ) - - -@dataclass -class LogStrings(LogOperation): - - ValueType = LogSeriesValue[str] - - values: List[ValueType] - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_log_strings(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["values"] = [value.to_dict() for value in self.values] - return ret - - @staticmethod - def from_dict(data: dict) -> "LogStrings": - return LogStrings( - data["path"], - [LogStrings.ValueType.from_dict(value) for value in data["values"]], - ) - - -@dataclass -class ImageValue: - data: Optional[str] - name: Optional[str] - description: Optional[str] - - @staticmethod - def serializer(obj: "ImageValue"): - return dict(data=obj.data, name=obj.name, description=obj.description) - - @staticmethod - def deserializer(obj) -> "ImageValue": - if obj is None: - return ImageValue(None, None, None) - if isinstance(obj, str): - return ImageValue(data=obj, name=None, description=None) - if isinstance(obj, dict): - return ImageValue(data=obj["data"], name=obj["name"], description=obj["description"]) - else: - raise InternalClientError("Run data on disk is malformed or was saved by newer version of Neptune Library") - - -@dataclass -class LogImages(LogOperation): - - ValueType = LogSeriesValue[ImageValue] - - values: List[ValueType] - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_log_images(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["values"] = [value.to_dict(ImageValue.serializer) for value in self.values] - return ret - - @staticmethod - def from_dict(data: dict) -> "LogImages": - return LogImages( - data["path"], - [LogImages.ValueType.from_dict(value, ImageValue.deserializer) for value in data["values"]], - ) - - -@dataclass -class ClearFloatLog(Operation): - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_clear_float_log(self) - - @staticmethod - def from_dict(data: dict) -> "ClearFloatLog": - return ClearFloatLog(data["path"]) - - -@dataclass -class ClearStringLog(Operation): - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_clear_string_log(self) - - @staticmethod - def from_dict(data: dict) -> "ClearStringLog": - return ClearStringLog(data["path"]) - - -@dataclass -class ClearImageLog(Operation): - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_clear_image_log(self) - - @staticmethod - def from_dict(data: dict) -> "ClearImageLog": - return ClearImageLog(data["path"]) - - -@dataclass -class ConfigFloatSeries(Operation): - - min: Optional[float] - max: Optional[float] - unit: Optional[str] - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_config_float_series(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["min"] = self.min - ret["max"] = self.max - ret["unit"] = self.unit - return ret - - @staticmethod - def from_dict(data: dict) -> "ConfigFloatSeries": - return ConfigFloatSeries(data["path"], data["min"], data["max"], data["unit"]) - - -@dataclass -class AddStrings(Operation): - - values: Set[str] - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_add_strings(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["values"] = list(self.values) - return ret - - @staticmethod - def from_dict(data: dict) -> "AddStrings": - return AddStrings(data["path"], set(data["values"])) - - -@dataclass -class RemoveStrings(Operation): - - values: Set[str] - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_remove_strings(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["values"] = list(self.values) - return ret - - @staticmethod - def from_dict(data: dict) -> "RemoveStrings": - return RemoveStrings(data["path"], set(data["values"])) - - -@dataclass -class ClearStringSet(Operation): - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_clear_string_set(self) - - @staticmethod - def from_dict(data: dict) -> "ClearStringSet": - return ClearStringSet(data["path"]) - - -@dataclass -class DeleteFiles(Operation): - - file_paths: Set[str] - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_delete_files(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["file_paths"] = list(self.file_paths) - return ret - - @staticmethod - def from_dict(data: dict) -> "DeleteFiles": - return DeleteFiles(data["path"], set(data["file_paths"])) - - -@dataclass -class DeleteAttribute(Operation): - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_delete_attribute(self) - - @staticmethod - def from_dict(data: dict) -> "DeleteAttribute": - return DeleteAttribute(data["path"]) - - -@dataclass -class TrackFilesToArtifact(Operation): - project_id: str - entries: List[Tuple[str, Optional[str]]] - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_track_files_to_artifact(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["entries"] = self.entries - ret["project_id"] = self.project_id - return ret - - @staticmethod - def from_dict(data: dict) -> "TrackFilesToArtifact": - return TrackFilesToArtifact( - path=data["path"], - project_id=data["project_id"], - entries=list(map(tuple, data["entries"])), - ) - - -@dataclass -class ClearArtifact(Operation): - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_clear_artifact(self) - - @staticmethod - def from_dict(data: dict) -> "ClearArtifact": - return ClearArtifact(data["path"]) - - -@dataclass -class CopyAttribute(Operation): - container_id: str - container_type: ContainerType - source_path: List[str] - source_attr_cls: Type["Attribute"] - - def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: - return visitor.visit_copy_attribute(self) - - def to_dict(self) -> dict: - ret = super().to_dict() - ret["container_id"] = self.container_id - ret["container_type"] = self.container_type.value - ret["source_path"] = self.source_path - ret["source_attr_name"] = self.source_attr_cls.__name__ - return ret - - @staticmethod - def from_dict(data: dict) -> "CopyAttribute": - from neptune.attributes.attribute import Attribute - - source_attr_cls = {cls.__name__: cls for cls in all_subclasses(Attribute) if cls.supports_copy}.get( - data["source_attr_name"] - ) - - if source_attr_cls is None: - raise MalformedOperation("Copy of non-copiable type found in queue!") - - return CopyAttribute( - data["path"], - data["container_id"], - ContainerType(data["container_type"]), - data["source_path"], - source_attr_cls, - ) - - def resolve(self, backend: "NeptuneBackend") -> Operation: - # repack CopyAttribute op into target attribute assignment - getter = self.source_attr_cls.getter - create_assignment_operation = self.source_attr_cls.create_assignment_operation - value = getter(backend, self.container_id, self.container_type, self.source_path) - return create_assignment_operation(self.path, value) diff --git a/src/neptune/internal/operation_processors/__init__.py b/src/neptune/internal/operation_processors/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/internal/operation_processors/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/operation_processors/async_operation_processor.py b/src/neptune/internal/operation_processors/async_operation_processor.py deleted file mode 100644 index e868a3e72..000000000 --- a/src/neptune/internal/operation_processors/async_operation_processor.py +++ /dev/null @@ -1,359 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ("AsyncOperationProcessor",) - -import os -import threading -from pathlib import Path -from queue import Queue -from time import ( - monotonic, - time, -) -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - List, - Optional, - Tuple, -) - -from neptune.constants import ASYNC_DIRECTORY -from neptune.core.components.abstract import WithResources -from neptune.core.components.metadata_file import MetadataFile -from neptune.core.components.operation_storage import OperationStorage -from neptune.core.components.queue.disk_queue import DiskQueue -from neptune.envs import NEPTUNE_SYNC_AFTER_STOP_TIMEOUT -from neptune.exceptions import NeptuneSynchronizationAlreadyStoppedException -from neptune.internal.exceptions import NeptuneException -from neptune.internal.init.parameters import DEFAULT_STOP_TIMEOUT -from neptune.internal.operation import Operation -from neptune.internal.operation_processors.operation_logger import ProcessorStopLogger -from neptune.internal.operation_processors.operation_processor import OperationProcessor -from neptune.internal.operation_processors.utils import ( - common_metadata, - get_container_full_path, -) -from neptune.internal.signals_processing.utils import ( - signal_batch_lag, - signal_batch_processed, - signal_batch_started, -) -from neptune.internal.threading.daemon import Daemon -from neptune.internal.utils.disk_utilization import ensure_disk_not_overutilize -from neptune.internal.utils.logger import get_logger -from neptune.internal.warnings import ( - NeptuneWarning, - warn_once, -) - -if TYPE_CHECKING: - from neptune.core.components.abstract import Resource - from neptune.internal.backends.neptune_backend import NeptuneBackend - from neptune.internal.container_type import ContainerType - from neptune.internal.id_formats import UniqueId - from neptune.internal.operation_processors.operation_logger import ProcessorStopSignal - from neptune.internal.signals_processing.signals import Signal - -logger = get_logger() - - -serializer: Callable[[Operation], Dict[str, Any]] = lambda op: op.to_dict() - - -class AsyncOperationProcessor(WithResources, OperationProcessor): - STOP_QUEUE_STATUS_UPDATE_FREQ_SECONDS = 30.0 - STOP_QUEUE_MAX_TIME_NO_CONNECTION_SECONDS = float(os.getenv(NEPTUNE_SYNC_AFTER_STOP_TIMEOUT, DEFAULT_STOP_TIMEOUT)) - - def __init__( - self, - container_id: "UniqueId", - container_type: "ContainerType", - backend: "NeptuneBackend", - lock: threading.RLock, - queue: "Queue[Signal]", - sleep_time: float = 5, - batch_size: int = 1000, - data_path: Optional[Path] = None, - should_print_logs: bool = True, - ): - self._should_print_logs: bool = should_print_logs - - self._data_path = ( - data_path if data_path else get_container_full_path(ASYNC_DIRECTORY, container_id, container_type) - ) - - # Initialize directory - self._data_path.mkdir(parents=True, exist_ok=True) - - self._metadata_file = MetadataFile( - data_path=self._data_path, - metadata=common_metadata(mode="async", container_id=container_id, container_type=container_type), - ) - self._operation_storage = OperationStorage(data_path=self._data_path) - self._queue = DiskQueue( - data_path=self._data_path, - to_dict=serializer, - from_dict=Operation.from_dict, - lock=lock, - ) - - self._container_id: "UniqueId" = container_id - self._container_type: "ContainerType" = container_type - self._backend: "NeptuneBackend" = backend - self._batch_size: int = batch_size - self._last_version: int = 0 - self._consumed_version: int = 0 - self._consumer: Daemon = self.ConsumerThread(self, sleep_time, batch_size) - self._lock: threading.RLock = lock - self._signals_queue: "Queue[Signal]" = queue - self._accepts_operations: bool = True - - # Caller is responsible for taking this lock - self._waiting_cond = threading.Condition(lock=lock) - - @property - def operation_storage(self) -> "OperationStorage": - return self._operation_storage - - @property - def data_path(self) -> Path: - return self._data_path - - @property - def resources(self) -> Tuple["Resource", ...]: - return self._metadata_file, self._operation_storage, self._queue - - @ensure_disk_not_overutilize - def enqueue_operation(self, op: Operation, *, wait: bool) -> None: - if not self._accepts_operations: - warn_once("Not accepting operations", exception=NeptuneWarning) - return - - self._last_version = self._queue.put(op) - - if self._check_queue_size(): - self._consumer.wake_up() - if wait: - self.wait() - - def start(self) -> None: - self._consumer.start() - - def pause(self) -> None: - self._consumer.pause() - self.flush() - - def resume(self) -> None: - self._consumer.resume() - - def wait(self) -> None: - self.flush() - waiting_for_version = self._last_version - self._consumer.wake_up() - - # Probably reentering lock just for sure - with self._waiting_cond: - self._waiting_cond.wait_for( - lambda: self._consumed_version >= waiting_for_version or not self._consumer.is_running() - ) - if not self._consumer.is_running(): - raise NeptuneSynchronizationAlreadyStoppedException() - - def _check_queue_size(self) -> bool: - return self._queue.size() > self._batch_size / 2 - - def _wait_for_queue_empty( - self, - initial_queue_size: int, - seconds: Optional[float], - signal_queue: Optional["Queue[ProcessorStopSignal]"] = None, - ) -> None: - waiting_start: float = monotonic() - time_elapsed: float = 0.0 - max_reconnect_wait_time: float = self.STOP_QUEUE_MAX_TIME_NO_CONNECTION_SECONDS if seconds is None else seconds - op_logger = ProcessorStopLogger( - processor_id=id(self), - signal_queue=signal_queue, - logger=logger, - should_print_logs=self._should_print_logs, - ) - if initial_queue_size > 0: - if self._consumer.last_backoff_time > 0: - op_logger.log_connection_interruption(max_reconnect_wait_time) - else: - op_logger.log_remaining_operations(size_remaining=initial_queue_size) - - while True: - if seconds is None: - if self._consumer.last_backoff_time == 0: - # reset `waiting_start` on successful action - waiting_start = monotonic() - wait_time = self.STOP_QUEUE_STATUS_UPDATE_FREQ_SECONDS - else: - wait_time = max( - min( - seconds - time_elapsed, - self.STOP_QUEUE_STATUS_UPDATE_FREQ_SECONDS, - ), - 0.0, - ) - self._queue.wait_for_empty(wait_time) - size_remaining = self._queue.size() - already_synced = initial_queue_size - size_remaining - already_synced_proc = (already_synced / initial_queue_size) * 100 if initial_queue_size else 100 - if size_remaining == 0: - op_logger.log_success(ops_synced=initial_queue_size) - return - - time_elapsed = monotonic() - waiting_start - if self._consumer.last_backoff_time > 0 and time_elapsed >= max_reconnect_wait_time: - - op_logger.log_reconnect_failure( - max_reconnect_wait_time=max_reconnect_wait_time, - size_remaining=size_remaining, - ) - return - - if seconds is not None and wait_time == 0: - op_logger.log_sync_failure(seconds=seconds, size_remaining=size_remaining) - return - - if not self._consumer.is_running(): - exception = NeptuneSynchronizationAlreadyStoppedException() - logger.warning(str(exception)) - return - - op_logger.log_still_waiting( - size_remaining=size_remaining, - already_synced=already_synced, - already_synced_proc=already_synced_proc, - ) - - def stop( - self, seconds: Optional[float] = None, signal_queue: Optional["Queue[ProcessorStopSignal]"] = None - ) -> None: - ts = time() - self.flush() - if self._consumer.is_running(): - self._consumer.disable_sleep() - self._consumer.wake_up() - self._wait_for_queue_empty( - initial_queue_size=self._queue.size(), - seconds=seconds, - signal_queue=signal_queue, - ) - self._consumer.interrupt() - sec_left = None if seconds is None else seconds - (time() - ts) - self._consumer.join(sec_left) - - # Close resources - self.close() - - # Remove local files - if self._queue.is_empty(): - self.cleanup() - - def cleanup(self) -> None: - super().cleanup() - try: - self._data_path.rmdir() - except OSError: - pass - - def close(self) -> None: - self._accepts_operations = False - super().close() - - class ConsumerThread(Daemon): - def __init__( - self, - processor: "AsyncOperationProcessor", - sleep_time: float, - batch_size: int, - ): - super().__init__(sleep_time=sleep_time, name="NeptuneAsyncOpProcessor") - self._processor: "AsyncOperationProcessor" = processor - self._batch_size: int = batch_size - self._last_flush: float = 0.0 - - def run(self) -> None: - try: - super().run() - except Exception: - with self._processor._waiting_cond: - self._processor._waiting_cond.notify_all() - raise - - def work(self) -> None: - ts = time() - if ts - self._last_flush >= self._sleep_time: - self._last_flush = ts - self._processor._queue.flush() - - while True: - batch = self._processor._queue.get_batch(self._batch_size) - if not batch: - return - - signal_batch_started(queue=self._processor._signals_queue) - self.process_batch([element.obj for element in batch], batch[-1].ver, batch[-1].at) - - # WARNING: Be careful when changing this function. It is used in the experimental package - def _handle_errors(self, errors: List[NeptuneException]) -> None: - for error in errors: - logger.error( - "Error occurred during asynchronous operation processing: %s", - error, - ) - - @Daemon.ConnectionRetryWrapper( - kill_message=( - "Killing Neptune asynchronous thread. All data is safe on disk and can be later" - " synced manually using `neptune sync` command." - ) - ) - def process_batch(self, batch: List[Operation], version: int, occurred_at: Optional[float] = None) -> None: - if occurred_at is not None: - signal_batch_lag(queue=self._processor._signals_queue, lag=time() - occurred_at) - - expected_count = len(batch) - version_to_ack = version - expected_count - while True: - # TODO: Handle Metadata errors - processed_count, errors = self._processor._backend.execute_operations( - container_id=self._processor._container_id, - container_type=self._processor._container_type, - operations=batch, - operation_storage=self._processor._operation_storage, - ) - - signal_batch_processed(queue=self._processor._signals_queue) - version_to_ack += processed_count - batch = batch[processed_count:] - - with self._processor._waiting_cond: - self._processor._queue.ack(version_to_ack) - - self._handle_errors(errors) - - self._processor._consumed_version = version_to_ack - - if version_to_ack == version: - self._processor._waiting_cond.notify_all() - return diff --git a/src/neptune/internal/operation_processors/factory.py b/src/neptune/internal/operation_processors/factory.py deleted file mode 100644 index 8e847c14f..000000000 --- a/src/neptune/internal/operation_processors/factory.py +++ /dev/null @@ -1,88 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = ["get_operation_processor"] - -import os -import threading -from queue import Queue -from typing import TYPE_CHECKING - -from neptune.envs import NEPTUNE_ASYNC_BATCH_SIZE -from neptune.internal.backends.neptune_backend import NeptuneBackend -from neptune.internal.container_type import ContainerType -from neptune.internal.id_formats import UniqueId -from neptune.types.mode import Mode - -from .async_operation_processor import AsyncOperationProcessor -from .offline_operation_processor import OfflineOperationProcessor -from .operation_processor import OperationProcessor -from .read_only_operation_processor import ReadOnlyOperationProcessor -from .sync_operation_processor import SyncOperationProcessor - -if TYPE_CHECKING: - from neptune.internal.signals_processing.signals import Signal - - -# WARNING: Be careful when changing this function. It is used in the experimental package -def build_async_operation_processor( - container_id: UniqueId, - container_type: ContainerType, - backend: NeptuneBackend, - lock: threading.RLock, - sleep_time: float, - queue: "Queue[Signal]", -) -> OperationProcessor: - return AsyncOperationProcessor( - container_id=container_id, - container_type=container_type, - backend=backend, - lock=lock, - sleep_time=sleep_time, - batch_size=int(os.environ.get(NEPTUNE_ASYNC_BATCH_SIZE) or "1000"), - queue=queue, - ) - - -def get_operation_processor( - mode: Mode, - container_id: UniqueId, - container_type: ContainerType, - backend: NeptuneBackend, - lock: threading.RLock, - flush_period: float, - queue: "Queue[Signal]", -) -> OperationProcessor: - if mode == Mode.ASYNC: - return build_async_operation_processor( - container_id=container_id, - container_type=container_type, - backend=backend, - lock=lock, - sleep_time=flush_period, - queue=queue, - ) - elif mode == Mode.SYNC: - return SyncOperationProcessor(container_id, container_type, backend) - elif mode == Mode.DEBUG: - return SyncOperationProcessor(container_id, container_type, backend) - elif mode == Mode.OFFLINE: - # the object was returned by mocked backend and has some random ID. - return OfflineOperationProcessor(container_id, container_type, lock) - elif mode == Mode.READ_ONLY: - return ReadOnlyOperationProcessor() - else: - raise ValueError(f"mode should be one of {[m for m in Mode]}") diff --git a/src/neptune/internal/operation_processors/lazy_operation_processor_wrapper.py b/src/neptune/internal/operation_processors/lazy_operation_processor_wrapper.py deleted file mode 100644 index bda1a65e3..000000000 --- a/src/neptune/internal/operation_processors/lazy_operation_processor_wrapper.py +++ /dev/null @@ -1,125 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from __future__ import annotations - -__all__ = ("LazyOperationProcessorWrapper",) - -from pathlib import Path -from typing import ( - Any, - Callable, - Optional, - TypeVar, -) - -from neptune.core.components.abstract import Resource -from neptune.core.components.operation_storage import OperationStorage -from neptune.internal.operation import Operation -from neptune.internal.operation_processors.operation_processor import OperationProcessor - -RT = TypeVar("RT") - - -def trigger_evaluation(method: Callable[..., RT]) -> Callable[..., RT]: - def _wrapper(self: LazyOperationProcessorWrapper, *args: Any, **kwargs: Any) -> RT: - self.evaluate() - return method(self, *args, **kwargs) - - return _wrapper - - -def noop_if_not_evaluated(method: Callable[..., RT]) -> Callable[..., Optional[RT]]: - def _wrapper(self: LazyOperationProcessorWrapper, *args: Any, **kwargs: Any) -> Optional[RT]: - if self.is_evaluated: - return method(self, *args, **kwargs) - return None - - return _wrapper - - -def noop_if_evaluated(method: Callable[..., RT]) -> Callable[..., Optional[RT]]: - def _wrapper(self: LazyOperationProcessorWrapper, *args: Any, **kwargs: Any) -> Optional[RT]: - if not self.is_evaluated: - return method(self, *args, **kwargs) - return None - - return _wrapper - - -class LazyOperationProcessorWrapper(OperationProcessor): - def __init__( - self, - operation_processor_getter: Callable[[], OperationProcessor], - post_trigger_side_effect: Optional[Callable[[], Any]] = None, - ): - self._operation_processor_getter = operation_processor_getter - self._post_trigger_side_effect = post_trigger_side_effect - self._operation_processor: OperationProcessor = None # type: ignore - - @noop_if_evaluated - def evaluate(self) -> None: - self._operation_processor = self._operation_processor_getter() - self._operation_processor.start() - - @property - def is_evaluated(self) -> bool: - return self._operation_processor is not None - - @trigger_evaluation - def enqueue_operation(self, op: Operation, *, wait: bool) -> None: - self._operation_processor.enqueue_operation(op, wait=wait) - - @property - @trigger_evaluation - def operation_storage(self) -> OperationStorage: - return self._operation_processor.operation_storage - - @property - @trigger_evaluation - def data_path(self) -> Path: - if isinstance(self._operation_processor, Resource): - return self._operation_processor.data_path - else: - raise NotImplementedError - - @trigger_evaluation - def start(self) -> None: - self._operation_processor.start() - - @noop_if_not_evaluated - def pause(self) -> None: - self._operation_processor.pause() - - @noop_if_not_evaluated - def resume(self) -> None: - self._operation_processor.resume() - - @noop_if_not_evaluated - def flush(self) -> None: - self._operation_processor.flush() - - @noop_if_not_evaluated - def wait(self) -> None: - self._operation_processor.wait() - - @noop_if_not_evaluated - def stop(self, seconds: Optional[float] = None) -> None: - self._operation_processor.stop(seconds=seconds) - - @noop_if_not_evaluated - def close(self) -> None: - self._operation_processor.close() diff --git a/src/neptune/internal/operation_processors/offline_operation_processor.py b/src/neptune/internal/operation_processors/offline_operation_processor.py deleted file mode 100644 index 9c3361188..000000000 --- a/src/neptune/internal/operation_processors/offline_operation_processor.py +++ /dev/null @@ -1,86 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ("OfflineOperationProcessor",) - -import threading -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Optional, - Tuple, -) - -from neptune.constants import OFFLINE_DIRECTORY -from neptune.core.components.abstract import WithResources -from neptune.core.components.metadata_file import MetadataFile -from neptune.core.components.operation_storage import OperationStorage -from neptune.core.components.queue.disk_queue import DiskQueue -from neptune.internal.operation import Operation -from neptune.internal.operation_processors.operation_processor import OperationProcessor -from neptune.internal.operation_processors.utils import ( - common_metadata, - get_container_full_path, -) -from neptune.internal.utils.disk_utilization import ensure_disk_not_overutilize - -if TYPE_CHECKING: - from neptune.core.components.abstract import Resource - from neptune.internal.container_type import ContainerType - from neptune.internal.id_formats import UniqueId - - -serializer: Callable[[Operation], Dict[str, Any]] = lambda op: op.to_dict() - - -class OfflineOperationProcessor(WithResources, OperationProcessor): - def __init__(self, container_id: "UniqueId", container_type: "ContainerType", lock: "threading.RLock"): - self._data_path = get_container_full_path(OFFLINE_DIRECTORY, container_id, container_type) - - # Initialize directory - self._data_path.mkdir(parents=True, exist_ok=True) - - self._metadata_file = MetadataFile( - data_path=self._data_path, - metadata=common_metadata(mode="offline", container_id=container_id, container_type=container_type), - ) - self._operation_storage = OperationStorage(data_path=self._data_path) - self._queue = DiskQueue(data_path=self._data_path, to_dict=serializer, from_dict=Operation.from_dict, lock=lock) - - @property - def operation_storage(self) -> "OperationStorage": - return self._operation_storage - - @property - def data_path(self) -> Path: - return self._data_path - - @property - def resources(self) -> Tuple["Resource", ...]: - return self._metadata_file, self._operation_storage, self._queue - - @ensure_disk_not_overutilize - def enqueue_operation(self, op: Operation, *, wait: bool) -> None: - self._queue.put(op) - - def wait(self) -> None: - self.flush() - - def stop(self, seconds: Optional[float] = None) -> None: - self.flush() - self.close() diff --git a/src/neptune/internal/operation_processors/operation_logger.py b/src/neptune/internal/operation_processors/operation_logger.py deleted file mode 100644 index 2e0307b45..000000000 --- a/src/neptune/internal/operation_processors/operation_logger.py +++ /dev/null @@ -1,197 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "ProcessorStopSignal", - "ProcessorStopLogger", - "ProcessorStopSignalData", - "ProcessorStopSignalType", -] - -import logging -from dataclasses import ( - dataclass, - field, -) -from enum import Enum -from queue import Queue -from typing import Optional - -CONNECTION_INTERRUPTED_MSG = ( - "We have been experiencing connection interruptions during your run." - " Neptune client will now try to resume connection and sync data for the next" - " %s seconds." - " You can also kill this process and synchronize your data manually later" - " using `neptune sync` command." -) - -WAITING_FOR_OPERATIONS_MSG = ( - "Waiting for the remaining %s operations to synchronize with Neptune." " Do not kill this process." -) - -SUCCESS_MSG = "All %s operations synced, thanks for waiting!" - -SYNC_FAILURE_MSG = ( - "Failed to sync all operations in %s seconds." - " You have %s operations saved on disk that can be manually synced" - " using `neptune sync` command." -) - -RECONNECT_FAILURE_MSG = ( - "Failed to reconnect with Neptune in %s seconds." - " You have %s operations saved on disk that can be manually synced" - " using `neptune sync` command." -) - -STILL_WAITING_MSG = "Still waiting for the remaining %s operations" " (%.2f%% done). Please wait." - - -class ProcessorStopSignalType(Enum): - CONNECTION_INTERRUPTED = "CONNECTION_INTERRUPTED" - WAITING_FOR_OPERATIONS = "WAITING_FOR_OPERATIONS" - SUCCESS = "SUCCESS" - SYNC_FAILURE = "SYNC_FAILURE" - RECONNECT_FAILURE = "RECONNECT_FAILURE" - STILL_WAITING = "STILL_WAITING" - - -@dataclass -class ProcessorStopSignalData: - processor_id: int = 0 - size_remaining: int = 0 - already_synced: int = 0 - already_synced_proc: float = 0.0 - seconds: float = 0.0 - max_reconnect_wait_time: float = 0.0 - - -@dataclass -class ProcessorStopSignal: - signal_type: ProcessorStopSignalType - data: ProcessorStopSignalData = field(default_factory=ProcessorStopSignalData) - - -class ProcessorStopLogger: - def __init__( - self, - processor_id: int, - signal_queue: Optional["Queue[ProcessorStopSignal]"], - logger: logging.Logger, - should_print_logs: bool = True, - ) -> None: - self._id = processor_id - self._signal_queue = signal_queue - self._logger = logger - self._should_print_logs = should_print_logs - - def log_connection_interruption(self, max_reconnect_wait_time: float) -> None: - if self._signal_queue is not None: - self._signal_queue.put( - ProcessorStopSignal( - signal_type=ProcessorStopSignalType.CONNECTION_INTERRUPTED, - data=ProcessorStopSignalData( - processor_id=self._id, max_reconnect_wait_time=max_reconnect_wait_time - ), - ) - ) - else: - self._logger.warning( - CONNECTION_INTERRUPTED_MSG, - max_reconnect_wait_time, - ) - - def log_remaining_operations(self, size_remaining: int) -> None: - if self._signal_queue is not None: - self._signal_queue.put( - ProcessorStopSignal( - signal_type=ProcessorStopSignalType.WAITING_FOR_OPERATIONS, - data=ProcessorStopSignalData(processor_id=self._id, size_remaining=size_remaining), - ) - ) - else: - if size_remaining: - self._logger.info( - WAITING_FOR_OPERATIONS_MSG, - size_remaining, - ) - - def log_success(self, ops_synced: int) -> None: - if self._signal_queue is not None: - self._signal_queue.put( - ProcessorStopSignal( - signal_type=ProcessorStopSignalType.SUCCESS, - data=ProcessorStopSignalData(processor_id=self._id, already_synced=ops_synced), - ) - ) - else: - if self._should_print_logs: - self._logger.info(SUCCESS_MSG, ops_synced) - - def log_sync_failure(self, seconds: float, size_remaining: int) -> None: - if self._signal_queue is not None: - self._signal_queue.put( - ProcessorStopSignal( - signal_type=ProcessorStopSignalType.SYNC_FAILURE, - data=ProcessorStopSignalData(processor_id=self._id, seconds=seconds), - ) - ) - else: - if self._should_print_logs: - self._logger.warning( - SYNC_FAILURE_MSG, - seconds, - size_remaining, - ) - - def log_reconnect_failure(self, max_reconnect_wait_time: float, size_remaining: int) -> None: - if self._signal_queue is not None: - self._signal_queue.put( - ProcessorStopSignal( - signal_type=ProcessorStopSignalType.RECONNECT_FAILURE, - data=ProcessorStopSignalData( - processor_id=self._id, - max_reconnect_wait_time=max_reconnect_wait_time, - size_remaining=size_remaining, - ), - ) - ) - else: - if self._should_print_logs: - self._logger.warning( - RECONNECT_FAILURE_MSG, - max_reconnect_wait_time, - size_remaining, - ) - - def log_still_waiting(self, size_remaining: int, already_synced: int, already_synced_proc: float) -> None: - if self._signal_queue is not None: - self._signal_queue.put( - ProcessorStopSignal( - signal_type=ProcessorStopSignalType.STILL_WAITING, - data=ProcessorStopSignalData( - processor_id=self._id, - size_remaining=size_remaining, - already_synced=already_synced, - already_synced_proc=already_synced_proc, - ), - ) - ) - else: - if self._should_print_logs: - self._logger.info( - STILL_WAITING_MSG, - size_remaining, - already_synced_proc, - ) diff --git a/src/neptune/internal/operation_processors/operation_processor.py b/src/neptune/internal/operation_processors/operation_processor.py deleted file mode 100644 index 5ae0c500c..000000000 --- a/src/neptune/internal/operation_processors/operation_processor.py +++ /dev/null @@ -1,56 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ("OperationProcessor",) - -import abc -from typing import ( - TYPE_CHECKING, - Optional, -) - -if TYPE_CHECKING: - from neptune.core.components.operation_storage import OperationStorage - from neptune.internal.operation import Operation - - -class OperationProcessor(abc.ABC): - @abc.abstractmethod - def enqueue_operation(self, op: "Operation", *, wait: bool) -> None: ... - - @property - def operation_storage(self) -> "OperationStorage": - raise NotImplementedError() - - def start(self) -> None: - pass - - def pause(self) -> None: - pass - - def resume(self) -> None: - pass - - def flush(self) -> None: - pass - - def wait(self) -> None: - pass - - def stop(self, seconds: Optional[float] = None) -> None: - pass - - def close(self) -> None: - pass diff --git a/src/neptune/internal/operation_processors/read_only_operation_processor.py b/src/neptune/internal/operation_processors/read_only_operation_processor.py deleted file mode 100644 index 799732625..000000000 --- a/src/neptune/internal/operation_processors/read_only_operation_processor.py +++ /dev/null @@ -1,32 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ("ReadOnlyOperationProcessor",) - -from typing import TYPE_CHECKING - -from neptune.internal.operation_processors.operation_processor import OperationProcessor -from neptune.internal.warnings import ( - NeptuneWarning, - warn_once, -) - -if TYPE_CHECKING: - from neptune.internal.operation import Operation - - -class ReadOnlyOperationProcessor(OperationProcessor): - def enqueue_operation(self, op: "Operation", *, wait: bool) -> None: - warn_once("Client in read-only mode, nothing will be saved to server.", exception=NeptuneWarning) diff --git a/src/neptune/internal/operation_processors/sync_operation_processor.py b/src/neptune/internal/operation_processors/sync_operation_processor.py deleted file mode 100644 index 084964ebe..000000000 --- a/src/neptune/internal/operation_processors/sync_operation_processor.py +++ /dev/null @@ -1,94 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ("SyncOperationProcessor",) - -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Optional, - Tuple, -) - -from neptune.constants import SYNC_DIRECTORY -from neptune.core.components.abstract import WithResources -from neptune.core.components.metadata_file import MetadataFile -from neptune.core.components.operation_storage import OperationStorage -from neptune.internal.operation_processors.operation_processor import OperationProcessor -from neptune.internal.operation_processors.utils import ( - common_metadata, - get_container_full_path, -) -from neptune.internal.utils.disk_utilization import ensure_disk_not_overutilize - -if TYPE_CHECKING: - from neptune.core.components.abstract import Resource - from neptune.internal.backends.neptune_backend import NeptuneBackend - from neptune.internal.container_type import ContainerType - from neptune.internal.id_formats import UniqueId - from neptune.internal.operation import Operation - - -class SyncOperationProcessor(WithResources, OperationProcessor): - def __init__(self, container_id: "UniqueId", container_type: "ContainerType", backend: "NeptuneBackend"): - self._container_id: "UniqueId" = container_id - self._container_type: "ContainerType" = container_type - self._backend: "NeptuneBackend" = backend - - self._data_path = get_container_full_path(SYNC_DIRECTORY, container_id, container_type) - - # Initialize directory - self._data_path.mkdir(parents=True, exist_ok=True) - - self._metadata_file = MetadataFile( - data_path=self._data_path, - metadata=common_metadata(mode="sync", container_id=container_id, container_type=container_type), - ) - self._operation_storage = OperationStorage(data_path=self._data_path) - - @property - def operation_storage(self) -> "OperationStorage": - return self._operation_storage - - @property - def data_path(self) -> Path: - return self._data_path - - @property - def resources(self) -> Tuple["Resource", ...]: - return self._metadata_file, self._operation_storage - - @ensure_disk_not_overutilize - def enqueue_operation(self, op: "Operation", *, wait: bool) -> None: - _, errors = self._backend.execute_operations( - container_id=self._container_id, - container_type=self._container_type, - operations=[op], - operation_storage=self._operation_storage, - ) - if errors: - raise errors[0] - - def stop(self, seconds: Optional[float] = None) -> None: - self.flush() - self.close() - self.cleanup() - - def cleanup(self) -> None: - super().cleanup() - try: - self._data_path.rmdir() - except OSError: - pass diff --git a/src/neptune/internal/operation_processors/utils.py b/src/neptune/internal/operation_processors/utils.py deleted file mode 100644 index a58e8f21a..000000000 --- a/src/neptune/internal/operation_processors/utils.py +++ /dev/null @@ -1,75 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["common_metadata", "get_container_full_path", "get_container_dir"] - -import os -import platform -import random -import string -import sys -from datetime import ( - datetime, - timezone, -) -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Dict, -) - -from neptune.constants import NEPTUNE_DATA_DIRECTORY -from neptune.objects.structure_version import StructureVersion - -if TYPE_CHECKING: - from neptune.internal.container_type import ContainerType - from neptune.internal.id_formats import UniqueId - - -RANDOM_KEY_LENGTH = 8 - - -def get_neptune_version() -> str: - from neptune.version import __version__ as neptune_version - - return neptune_version - - -def common_metadata(mode: str, container_id: "UniqueId", container_type: "ContainerType") -> Dict[str, Any]: - return { - "mode": mode, - "containerId": container_id, - "containerType": container_type, - "structureVersion": StructureVersion.DIRECT_DIRECTORY.value, - "os": platform.platform(), - "pythonVersion": sys.version, - "neptuneClientVersion": get_neptune_version(), - "createdAt": datetime.now(timezone.utc).isoformat(), - } - - -def get_container_dir(container_id: "UniqueId", container_type: "ContainerType") -> str: - return f"{container_type.value}__{container_id}__{os.getpid()}__{random_key(RANDOM_KEY_LENGTH)}" - - -def get_container_full_path(type_dir: str, container_id: "UniqueId", container_type: "ContainerType") -> Path: - neptune_data_dir = Path(os.getenv("NEPTUNE_DATA_DIRECTORY", NEPTUNE_DATA_DIRECTORY)) - return neptune_data_dir / type_dir / get_container_dir(container_id=container_id, container_type=container_type) - - -def random_key(length: int) -> str: - characters = string.ascii_lowercase + string.digits - return "".join(random.choice(characters) for _ in range(length)) diff --git a/src/neptune/internal/operation_visitor.py b/src/neptune/internal/operation_visitor.py deleted file mode 100644 index 12ef82ac6..000000000 --- a/src/neptune/internal/operation_visitor.py +++ /dev/null @@ -1,153 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["OperationVisitor"] - -import abc -from typing import ( - Generic, - TypeVar, -) - -from neptune.internal.operation import ( - AddStrings, - AssignArtifact, - AssignBool, - AssignDatetime, - AssignFloat, - AssignInt, - AssignString, - ClearArtifact, - ClearFloatLog, - ClearImageLog, - ClearStringLog, - ClearStringSet, - ConfigFloatSeries, - CopyAttribute, - DeleteAttribute, - DeleteFiles, - LogFloats, - LogImages, - LogStrings, - Operation, - RemoveStrings, - TrackFilesToArtifact, - UploadFile, - UploadFileContent, - UploadFileSet, -) - -Ret = TypeVar("Ret") - - -class OperationVisitor(Generic[Ret]): - def visit(self, op: Operation) -> Ret: - return op.accept(self) - - @abc.abstractmethod - def visit_assign_float(self, op: AssignFloat) -> Ret: - pass - - @abc.abstractmethod - def visit_assign_int(self, op: AssignInt) -> Ret: - pass - - @abc.abstractmethod - def visit_assign_bool(self, op: AssignBool) -> Ret: - pass - - @abc.abstractmethod - def visit_assign_string(self, op: AssignString) -> Ret: - pass - - @abc.abstractmethod - def visit_assign_datetime(self, op: AssignDatetime) -> Ret: - pass - - @abc.abstractmethod - def visit_assign_artifact(self, op: AssignArtifact) -> Ret: - pass - - @abc.abstractmethod - def visit_upload_file(self, op: UploadFile) -> Ret: - pass - - @abc.abstractmethod - def visit_upload_file_content(self, op: UploadFileContent) -> Ret: - pass - - @abc.abstractmethod - def visit_upload_file_set(self, op: UploadFileSet) -> Ret: - pass - - @abc.abstractmethod - def visit_log_floats(self, op: LogFloats) -> Ret: - pass - - @abc.abstractmethod - def visit_log_strings(self, op: LogStrings) -> Ret: - pass - - @abc.abstractmethod - def visit_log_images(self, op: LogImages) -> Ret: - pass - - @abc.abstractmethod - def visit_clear_float_log(self, op: ClearFloatLog) -> Ret: - pass - - @abc.abstractmethod - def visit_clear_string_log(self, op: ClearStringLog) -> Ret: - pass - - @abc.abstractmethod - def visit_clear_image_log(self, op: ClearImageLog) -> Ret: - pass - - @abc.abstractmethod - def visit_config_float_series(self, op: ConfigFloatSeries) -> Ret: - pass - - @abc.abstractmethod - def visit_add_strings(self, op: AddStrings) -> Ret: - pass - - @abc.abstractmethod - def visit_remove_strings(self, op: RemoveStrings) -> Ret: - pass - - @abc.abstractmethod - def visit_delete_attribute(self, op: DeleteAttribute) -> Ret: - pass - - @abc.abstractmethod - def visit_clear_string_set(self, op: ClearStringSet) -> Ret: - pass - - @abc.abstractmethod - def visit_delete_files(self, op: DeleteFiles) -> Ret: - pass - - @abc.abstractmethod - def visit_track_files_to_artifact(self, op: TrackFilesToArtifact) -> Ret: - pass - - @abc.abstractmethod - def visit_clear_artifact(self, op: ClearArtifact) -> Ret: - pass - - @abc.abstractmethod - def visit_copy_attribute(self, op: CopyAttribute) -> Ret: - pass diff --git a/src/neptune/internal/patches/__init__.py b/src/neptune/internal/patches/__init__.py deleted file mode 100644 index 1a172a04f..000000000 --- a/src/neptune/internal/patches/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["apply_patches"] - -from neptune.internal.patches.bravado import patch as bravado_patch - -patches = [bravado_patch] - - -# Apply patches when importing a patching module -# Should be called before usages of patched objects -def apply_patches(): - for patch in patches: - patch() diff --git a/src/neptune/internal/patches/bravado.py b/src/neptune/internal/patches/bravado.py deleted file mode 100644 index 475471560..000000000 --- a/src/neptune/internal/patches/bravado.py +++ /dev/null @@ -1,80 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import functools -import re - -import bravado_core.model -from bravado_core.model import ( - _bless_models, - _collect_models, - _get_unprocessed_uri, - _post_process_spec, - _tag_models, -) - - -def _run_post_processing(spec): - visited_models = {} - - def _call_post_process_spec(spec_dict): - # Discover all the models in spec_dict - _post_process_spec( - spec_dict=spec_dict, - spec_resolver=spec.resolver, - on_container_callbacks=[ - functools.partial( - _tag_models, - visited_models=visited_models, - swagger_spec=spec, - ), - functools.partial( - _bless_models, - visited_models=visited_models, - swagger_spec=spec, - ), - functools.partial( - _collect_models, - models=spec.definitions, - swagger_spec=spec, - ), - ], - ) - - # Post process specs to identify models - _call_post_process_spec(spec.spec_dict) - - processed_uris = { - uri - for uri in spec.resolver.store - if uri == spec.origin_url or re.match(r"http(s)?://json-schema\.org/draft(/\d{4})?-\d+/(schema|meta/.*)", uri) - } - additional_uri = _get_unprocessed_uri(spec, processed_uris) - while additional_uri is not None: - # Post process each referenced specs to identify models in definitions of linked files - with spec.resolver.in_scope(additional_uri): - _call_post_process_spec( - spec.resolver.store[additional_uri], - ) - - processed_uris.add(additional_uri) - additional_uri = _get_unprocessed_uri(spec, processed_uris) - - -# Issue: https://github.com/Yelp/bravado-core/issues/388 -# Bravado currently makes additional requests to `json-schema.org` in order to gather mission schemas -# This makes `neptune` unable to run without internet connection or with a many security policies -def patch(): - bravado_core.model._run_post_processing = _run_post_processing diff --git a/src/neptune/internal/signals_processing/__init__.py b/src/neptune/internal/signals_processing/__init__.py deleted file mode 100644 index 8d06af532..000000000 --- a/src/neptune/internal/signals_processing/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/signals_processing/background_job.py b/src/neptune/internal/signals_processing/background_job.py deleted file mode 100644 index 0962a995d..000000000 --- a/src/neptune/internal/signals_processing/background_job.py +++ /dev/null @@ -1,79 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["CallbacksMonitor"] - -from queue import Queue -from typing import ( - TYPE_CHECKING, - Callable, - Optional, -) - -from neptune.internal.background_job import BackgroundJob -from neptune.internal.signals_processing.signals_processor import SignalsProcessor - -if TYPE_CHECKING: - from neptune.internal.signals_processing.signals import Signal - from neptune.objects import NeptuneObject - - -class CallbacksMonitor(BackgroundJob): - def __init__( - self, - queue: "Queue[Signal]", - async_lag_threshold: float, - async_no_progress_threshold: float, - async_lag_callback: Optional[Callable[["NeptuneObject"], None]] = None, - async_no_progress_callback: Optional[Callable[["NeptuneObject"], None]] = None, - period: float = 10, - ) -> None: - self._period: float = period - self._queue: "Queue[Signal]" = queue - self._thread: Optional["SignalsProcessor"] = None - self._started: bool = False - self._async_lag_threshold: float = async_lag_threshold - self._async_no_progress_threshold: float = async_no_progress_threshold - self._async_lag_callback: Optional[Callable[["NeptuneObject"], None]] = async_lag_callback - self._async_no_progress_callback: Optional[Callable[["NeptuneObject"], None]] = async_no_progress_callback - - def start(self, container: "NeptuneObject") -> None: - self._thread = SignalsProcessor( - period=self._period, - container=container, - queue=self._queue, - async_lag_threshold=self._async_lag_threshold, - async_no_progress_threshold=self._async_no_progress_threshold, - async_lag_callback=self._async_lag_callback, - async_no_progress_callback=self._async_no_progress_callback, - ) - self._thread.start() - self._started = True - - def stop(self) -> None: - if self._thread and self._started: - self._thread.interrupt() - - def join(self, seconds: Optional[float] = None) -> None: - if self._thread and self._started: - self._thread.join(seconds) - - def pause(self) -> None: - if self._thread: - self._thread.pause() - - def resume(self) -> None: - if self._thread: - self._thread.resume() diff --git a/src/neptune/internal/signals_processing/signals.py b/src/neptune/internal/signals_processing/signals.py deleted file mode 100644 index a598eec12..000000000 --- a/src/neptune/internal/signals_processing/signals.py +++ /dev/null @@ -1,64 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "Signal", - "SignalsVisitor", - "BatchStartedSignal", - "BatchProcessedSignal", - "BatchLagSignal", -] - -from abc import abstractmethod -from dataclasses import dataclass - - -@dataclass -class Signal: - occured_at: float - - @abstractmethod - def accept(self, visitor: "SignalsVisitor") -> None: ... - - -@dataclass -class BatchStartedSignal(Signal): - def accept(self, visitor: "SignalsVisitor") -> None: - visitor.visit_batch_started(signal=self) - - -@dataclass -class BatchProcessedSignal(Signal): - def accept(self, visitor: "SignalsVisitor") -> None: - visitor.visit_batch_processed(signal=self) - - -@dataclass -class BatchLagSignal(Signal): - lag: float - - def accept(self, visitor: "SignalsVisitor") -> None: - visitor.visit_batch_lag(signal=self) - - -class SignalsVisitor: - @abstractmethod - def visit_batch_started(self, signal: Signal) -> None: ... - - @abstractmethod - def visit_batch_processed(self, signal: Signal) -> None: ... - - @abstractmethod - def visit_batch_lag(self, signal: Signal) -> None: ... diff --git a/src/neptune/internal/signals_processing/signals_processor.py b/src/neptune/internal/signals_processing/signals_processor.py deleted file mode 100644 index 9ac6fc819..000000000 --- a/src/neptune/internal/signals_processing/signals_processor.py +++ /dev/null @@ -1,127 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["SignalsProcessor"] - -from queue import ( - Empty, - Queue, -) -from threading import Thread -from time import monotonic -from typing import ( - TYPE_CHECKING, - Callable, - Optional, -) - -from neptune.internal.init.parameters import IN_BETWEEN_CALLBACKS_MINIMUM_INTERVAL -from neptune.internal.signals_processing.signals import ( - BatchLagSignal, - SignalsVisitor, -) -from neptune.internal.threading.daemon import Daemon - -if TYPE_CHECKING: - from neptune.internal.signals_processing.signals import Signal - from neptune.objects import NeptuneObject - - -class SignalsProcessor(Daemon, SignalsVisitor): - def __init__( - self, - *, - period: float, - container: "NeptuneObject", - queue: "Queue[Signal]", - async_lag_threshold: float, - async_no_progress_threshold: float, - async_lag_callback: Optional[Callable[["NeptuneObject"], None]] = None, - async_no_progress_callback: Optional[Callable[["NeptuneObject"], None]] = None, - callbacks_interval: float = IN_BETWEEN_CALLBACKS_MINIMUM_INTERVAL, - in_async: bool = True, - ) -> None: - super().__init__(sleep_time=period, name="CallbacksMonitor") - - self._container: "NeptuneObject" = container - self._queue: "Queue[Signal]" = queue - self._async_lag_threshold: float = async_lag_threshold - self._async_no_progress_threshold: float = async_no_progress_threshold - self._async_lag_callback: Optional[Callable[["NeptuneObject"], None]] = async_lag_callback - self._async_no_progress_callback: Optional[Callable[["NeptuneObject"], None]] = async_no_progress_callback - self._callbacks_interval: float = callbacks_interval - self._in_async: bool = in_async - - self._last_batch_started_at: Optional[float] = None - self._last_no_progress_callback_at: Optional[float] = None - self._last_lag_callback_at: Optional[float] = None - - def visit_batch_started(self, signal: "Signal") -> None: - if self._last_batch_started_at is None: - self._last_batch_started_at = signal.occured_at - - def visit_batch_processed(self, signal: "Signal") -> None: - if self._last_batch_started_at is not None: - self._check_no_progress(at_timestamp=signal.occured_at) - self._last_batch_started_at = None - - def visit_batch_lag(self, signal: "Signal") -> None: - if self._async_lag_callback is None or not isinstance(signal, BatchLagSignal): - return - - if signal.lag > self._async_lag_threshold: - current_time = monotonic() - if ( - self._last_lag_callback_at is None - or current_time - self._last_lag_callback_at > self._callbacks_interval - ): - execute_callback(callback=self._async_lag_callback, container=self._container, in_async=self._in_async) - self._last_lag_callback_at = current_time - - def _check_callbacks(self) -> None: - self._check_no_progress(at_timestamp=monotonic()) - - def _check_no_progress(self, at_timestamp: float) -> None: - if self._async_no_progress_callback is None: - return - - if self._last_batch_started_at is not None: - if at_timestamp - self._last_batch_started_at > self._async_no_progress_threshold: - if ( - self._last_no_progress_callback_at is None - or at_timestamp - self._last_no_progress_callback_at > self._callbacks_interval - ): - execute_callback( - callback=self._async_no_progress_callback, container=self._container, in_async=self._in_async - ) - self._last_no_progress_callback_at = monotonic() - - def work(self) -> None: - try: - while not self._queue.empty(): - signal = self._queue.get_nowait() - signal.accept(self) - self._check_callbacks() - except Empty: - pass - - -def execute_callback( - *, callback: Callable[["NeptuneObject"], None], container: "NeptuneObject", in_async: bool -) -> None: - if in_async: - Thread(target=callback, name="CallbackExecution", args=(container,), daemon=True).start() - else: - callback(container) diff --git a/src/neptune/internal/signals_processing/utils.py b/src/neptune/internal/signals_processing/utils.py deleted file mode 100644 index dbb6d9991..000000000 --- a/src/neptune/internal/signals_processing/utils.py +++ /dev/null @@ -1,53 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["signal_batch_processed", "signal_batch_started", "signal_batch_lag"] - -from queue import ( - Full, - Queue, -) -from time import monotonic -from typing import Optional - -from neptune.internal.signals_processing.signals import ( - BatchLagSignal, - BatchProcessedSignal, - BatchStartedSignal, - Signal, -) -from neptune.internal.warnings import ( - NeptuneWarning, - warn_once, -) - - -def signal(*, queue: "Queue[Signal]", obj: "Signal") -> None: - try: - queue.put_nowait(item=obj) - except Full: - warn_once("Signal queue is full. Some signals will be lost.", exception=NeptuneWarning) - - -def signal_batch_started(*, queue: "Queue[Signal]", occured_at: Optional[float] = None) -> None: - signal(queue=queue, obj=BatchStartedSignal(occured_at=occured_at or monotonic())) - - -def signal_batch_processed(*, queue: "Queue[Signal]", occured_at: Optional[float] = None) -> None: - signal(queue=queue, obj=BatchProcessedSignal(occured_at=occured_at or monotonic())) - - -def signal_batch_lag(*, queue: "Queue[Signal]", lag: float, occured_at: Optional[float] = None) -> None: - signal(queue=queue, obj=BatchLagSignal(occured_at=occured_at or monotonic(), lag=lag)) diff --git a/src/neptune/internal/state.py b/src/neptune/internal/state.py deleted file mode 100644 index c45a694f9..000000000 --- a/src/neptune/internal/state.py +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ContainerState"] - -from enum import Enum - - -class ContainerState(Enum): - CREATED = "created" - STARTED = "started" - STOPPING = "stopping" - STOPPED = "stopped" diff --git a/src/neptune/internal/storage/__init__.py b/src/neptune/internal/storage/__init__.py deleted file mode 100644 index 04fd084f8..000000000 --- a/src/neptune/internal/storage/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "AttributeUploadConfiguration", - "UploadEntry", - "normalize_file_name", - "scan_unique_upload_entries", - "split_upload_files", - "FileChunk", - "FileChunker", - "compress_to_tar_gz_in_memory", -] - -from neptune.internal.storage.datastream import ( - FileChunk, - FileChunker, - compress_to_tar_gz_in_memory, -) -from neptune.internal.storage.storage_utils import ( - AttributeUploadConfiguration, - UploadEntry, - normalize_file_name, - scan_unique_upload_entries, - split_upload_files, -) diff --git a/src/neptune/internal/storage/datastream.py b/src/neptune/internal/storage/datastream.py deleted file mode 100644 index 519178a71..000000000 --- a/src/neptune/internal/storage/datastream.py +++ /dev/null @@ -1,90 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import dataclasses -import io -import math -import os -import tarfile -from typing import ( - Any, - Generator, - Optional, -) - -from neptune.internal.backends.api_model import MultipartConfig -from neptune.internal.exceptions import ( - InternalClientError, - UploadedFileChanged, -) - - -@dataclasses.dataclass -class FileChunk: - data: bytes - start: int - end: int - - -class FileChunker: - def __init__(self, filename: Optional[str], fobj, total_size, multipart_config: MultipartConfig): - self._filename: Optional[str] = filename - self._fobj = fobj - self._total_size = total_size - self._min_chunk_size = multipart_config.min_chunk_size - self._max_chunk_size = multipart_config.max_chunk_size - self._max_chunk_count = multipart_config.max_chunk_count - - def _get_chunk_size(self) -> int: - if self._total_size > self._max_chunk_count * self._max_chunk_size: - # can't fit it - max_size = self._max_chunk_count * self._max_chunk_size - raise InternalClientError( - f"File {self._filename or 'stream'} is too big to upload:" - f" {self._total_size} bytes exceeds max size {max_size}" - ) - if self._total_size <= self._max_chunk_count * self._min_chunk_size: - # can be done as minimal size chunks -- go for it! - return self._min_chunk_size - else: - # need larger chunks -- split more or less equally - return math.ceil(self._total_size / self._max_chunk_count) - - def generate(self) -> Generator[FileChunk, Any, None]: - chunk_size = self._get_chunk_size() - last_offset = 0 - last_change: Optional = os.stat(self._filename).st_mtime if self._filename else None - while last_offset < self._total_size: - chunk = self._fobj.read(chunk_size) - if chunk: - if last_change and last_change < os.stat(self._filename).st_mtime: - raise UploadedFileChanged(self._filename) - if isinstance(chunk, str): - chunk = chunk.encode("utf-8") - new_offset = last_offset + len(chunk) - yield FileChunk(data=chunk, start=last_offset, end=new_offset) - last_offset = new_offset - - -def compress_to_tar_gz_in_memory(upload_entries) -> bytes: - f = io.BytesIO(b"") - - with tarfile.TarFile.open(fileobj=f, mode="w|gz", dereference=True) as archive: - for entry in upload_entries: - archive.add(name=entry.source, arcname=entry.target_path, recursive=True) - - f.seek(0) - data = f.read() - return data diff --git a/src/neptune/internal/storage/storage_utils.py b/src/neptune/internal/storage/storage_utils.py deleted file mode 100644 index bd9d11819..000000000 --- a/src/neptune/internal/storage/storage_utils.py +++ /dev/null @@ -1,269 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import stat -import time -from abc import ( - ABCMeta, - abstractmethod, -) -from dataclasses import dataclass -from io import BytesIO -from pprint import pformat -from typing import ( - BinaryIO, - Generator, - List, - Set, - Union, -) - -from neptune.internal.utils.logger import get_logger - -_logger = get_logger() - - -@dataclass -class AttributeUploadConfiguration: - chunk_size: int - - -class UploadEntry(object): - def __init__(self, source: Union[str, BytesIO], target_path: str): - self.source = source - self.target_path = target_path - - def length(self) -> int: - if self.is_stream(): - return self.source.getbuffer().nbytes - else: - return os.path.getsize(self.source) - - def get_stream(self) -> Union[BinaryIO, io.BytesIO]: - if self.is_stream(): - return self.source - else: - return io.open(self.source, "rb") - - def get_permissions(self) -> str: - if self.is_stream(): - return "----------" - else: - return self.permissions_to_unix_string(self.source) - - @classmethod - def permissions_to_unix_string(cls, path): - st = 0 - if os.path.exists(path): - st = os.lstat(path).st_mode - is_dir = "d" if stat.S_ISDIR(st) else "-" - dic = { - "7": "rwx", - "6": "rw-", - "5": "r-x", - "4": "r--", - "3": "-wx", - "2": "-w-", - "1": "--x", - "0": "---", - } - perm = ("%03o" % st)[-3:] - return is_dir + "".join(dic.get(x, x) for x in perm) - - def __eq__(self, other): - """ - Returns true if both objects are equal - """ - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """ - Returns true if both objects are not equal - """ - return not self == other - - def __hash__(self): - """ - Returns the hash of source and target path - """ - return hash((self.source, self.target_path)) - - def to_str(self): - """ - Returns the string representation of the model - """ - return pformat(self.__dict__) - - def __repr__(self): - """ - For `print` and `pprint` - """ - return self.to_str() - - def is_stream(self): - return hasattr(self.source, "read") - - -class UploadPackage(object): - def __init__(self): - self.items: List[UploadEntry] = [] - self.size: int = 0 - self.len: int = 0 - - def reset(self): - self.items = [] - self.size = 0 - self.len = 0 - - def update(self, entry: UploadEntry, size: int): - self.items.append(entry) - self.size += size - self.len += 1 - - def is_empty(self): - return self.len == 0 - - def __eq__(self, other): - """ - Returns true if both objects are equal - """ - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """ - Returns true if both objects are not equal - """ - return not self == other - - def to_str(self): - """ - Returns the string representation of the model - """ - return pformat(self.__dict__) - - def __repr__(self): - """ - For `print` and `pprint` - """ - return self.to_str() - - -class ProgressIndicator(metaclass=ABCMeta): - @abstractmethod - def progress(self, steps): - pass - - @abstractmethod - def complete(self): - pass - - -class LoggingProgressIndicator(ProgressIndicator): - def __init__(self, total, frequency=10): - self.current = 0 - self.total = total - self.last_warning = time.time() - self.frequency = frequency - _logger.warning( - "You are sending %dMB of source code to Neptune. " - "It is pretty uncommon - please make sure it's what you wanted.", - self.total / (1024 * 1024), - ) - - def progress(self, steps): - self.current += steps - if time.time() - self.last_warning > self.frequency: - _logger.warning( - "%d MB / %d MB (%d%%) of source code was sent to Neptune.", - self.current / (1024 * 1024), - self.total / (1024 * 1024), - 100 * self.current / self.total, - ) - self.last_warning = time.time() - - def complete(self): - _logger.warning( - "%d MB (100%%) of source code was sent to Neptune.", - self.total / (1024 * 1024), - ) - - -class SilentProgressIndicator(ProgressIndicator): - def __init__(self): - pass - - def progress(self, steps): - pass - - def complete(self): - pass - - -def scan_unique_upload_entries(upload_entries): - """ - Returns upload entries for all files that could be found for given upload entries. - In case of directory as upload entry, files we be taken from all subdirectories recursively. - Any duplicated entries are removed. - """ - walked_entries = set() - for entry in upload_entries: - if entry.is_stream() or not os.path.isdir(entry.source): - walked_entries.add(entry) - else: - for root, _, files in os.walk(entry.source): - path_relative_to_entry_source = os.path.relpath(root, entry.source) - target_root = os.path.normpath(os.path.join(entry.target_path, path_relative_to_entry_source)) - for filename in files: - walked_entries.add( - UploadEntry( - os.path.join(root, filename), - os.path.join(target_root, filename), - ) - ) - - return walked_entries - - -def split_upload_files( - upload_entries: Set[UploadEntry], - upload_configuration: AttributeUploadConfiguration, - max_files=500, -) -> Generator[UploadPackage, None, None]: - current_package = UploadPackage() - - for entry in upload_entries: - if entry.is_stream(): - if current_package.len > 0: - yield current_package - current_package.reset() - current_package.update(entry, 0) - yield current_package - current_package.reset() - else: - size = os.path.getsize(entry.source) - if ( - size + current_package.size > upload_configuration.chunk_size or current_package.len > max_files - ) and not current_package.is_empty(): - yield current_package - current_package.reset() - current_package.update(entry, size) - - yield current_package - - -def normalize_file_name(name): - return name.replace(os.sep, "/") diff --git a/src/neptune/internal/streams/__init__.py b/src/neptune/internal/streams/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/internal/streams/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/streams/std_capture_background_job.py b/src/neptune/internal/streams/std_capture_background_job.py deleted file mode 100644 index 99816de12..000000000 --- a/src/neptune/internal/streams/std_capture_background_job.py +++ /dev/null @@ -1,72 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["StdoutCaptureBackgroundJob", "StderrCaptureBackgroundJob"] - -from typing import ( - TYPE_CHECKING, - Optional, -) - -from neptune.internal.background_job import BackgroundJob -from neptune.internal.streams.std_stream_capture_logger import ( - StderrCaptureLogger, - StdoutCaptureLogger, -) - -if TYPE_CHECKING: - from neptune.objects import NeptuneObject - - -class StdoutCaptureBackgroundJob(BackgroundJob): - def __init__(self, attribute_name: str): - self._attribute_name = attribute_name - self._logger = None - - def start(self, container: "NeptuneObject"): - self._logger = StdoutCaptureLogger(container, self._attribute_name) - - def stop(self): - self._logger.close() - - def pause(self): - self._logger.pause() - - def resume(self): - self._logger.resume() - - def join(self, seconds: Optional[float] = None): - pass - - -class StderrCaptureBackgroundJob(BackgroundJob): - def __init__(self, attribute_name: str): - self._attribute_name = attribute_name - self._logger = None - - def start(self, container: "NeptuneObject"): - self._logger = StderrCaptureLogger(container, self._attribute_name) - - def stop(self): - self._logger.close() - - def pause(self): - self._logger.pause() - - def resume(self): - self._logger.resume() - - def join(self, seconds: Optional[float] = None): - pass diff --git a/src/neptune/internal/streams/std_stream_capture_logger.py b/src/neptune/internal/streams/std_stream_capture_logger.py deleted file mode 100644 index 9a18c83ea..000000000 --- a/src/neptune/internal/streams/std_stream_capture_logger.py +++ /dev/null @@ -1,93 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["StdoutCaptureLogger", "StderrCaptureLogger"] - -import sys -import threading -from queue import Queue -from typing import TextIO - -from neptune.internal.threading.daemon import Daemon -from neptune.objects import NeptuneObject - - -class StdStreamCaptureLogger: - def __init__(self, container: NeptuneObject, attribute_name: str, stream: TextIO): - self._container = container - self._attribute_name = attribute_name - self.stream = stream - self._thread_local = threading.local() - self.enabled = True - self._log_data_queue = Queue() - self._logging_thread = self.ReportingThread(self, "NeptuneThread_" + attribute_name) - self._logging_thread.start() - - def log_data(self, data): - self._container[self._attribute_name].append(data) - - def pause(self): - self._log_data_queue.put_nowait(None) - self._logging_thread.pause() - - def resume(self): - self._logging_thread.resume() - - def write(self, data: str): - self.stream.write(data) - self._log_data_queue.put_nowait(data) - - def __getattr__(self, attr): - return getattr(self.stream, attr) - - def close(self): - if self.enabled: - self._logging_thread.interrupt() - self.enabled = False - self._log_data_queue.put_nowait(None) - self._logging_thread.join() - - class ReportingThread(Daemon): - def __init__(self, logger: "StdStreamCaptureLogger", name: str): - super().__init__(sleep_time=0, name=name) - self._logger = logger - - @Daemon.ConnectionRetryWrapper(kill_message="Killing Neptune STD capturing thread.") - def work(self) -> None: - while True: - data = self._logger._log_data_queue.get() - if data is None: - break - self._logger.log_data(data) - - -class StdoutCaptureLogger(StdStreamCaptureLogger): - def __init__(self, container: NeptuneObject, attribute_name: str): - super().__init__(container, attribute_name, sys.stdout) - sys.stdout = self - - def close(self): - sys.stdout = self.stream - super().close() - - -class StderrCaptureLogger(StdStreamCaptureLogger): - def __init__(self, container: NeptuneObject, attribute_name: str): - super().__init__(container, attribute_name, sys.stderr) - sys.stderr = self - - def close(self, wait_for_all_logs=True): - sys.stderr = self.stream - super().close() diff --git a/src/neptune/internal/threading/__init__.py b/src/neptune/internal/threading/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/internal/threading/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/threading/daemon.py b/src/neptune/internal/threading/daemon.py deleted file mode 100644 index b134133a8..000000000 --- a/src/neptune/internal/threading/daemon.py +++ /dev/null @@ -1,147 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Daemon"] - -import abc -import functools -import threading -from enum import Enum - -from neptune.internal.exceptions import NeptuneConnectionLostException -from neptune.internal.utils.logger import get_logger - -logger = get_logger() - - -class Daemon(threading.Thread): - class DaemonState(Enum): - INIT = 1 - WORKING = 2 - PAUSING = 3 - PAUSED = 4 - INTERRUPTED = 5 - STOPPED = 6 - - def __init__(self, sleep_time: float, name): - super().__init__(daemon=True, name=name) - self._sleep_time = sleep_time - self._state: Daemon.DaemonState = Daemon.DaemonState.INIT - self._wait_condition = threading.Condition() - self.last_backoff_time = 0 # used only with ConnectionRetryWrapper decorator - - def interrupt(self): - with self._wait_condition: - self._state = Daemon.DaemonState.INTERRUPTED - self._wait_condition.notify_all() - - def pause(self): - with self._wait_condition: - if self._state != Daemon.DaemonState.PAUSED: - if not self._is_interrupted(): - self._state = Daemon.DaemonState.PAUSING - self._wait_condition.notify_all() - self._wait_condition.wait_for(lambda: self._state != Daemon.DaemonState.PAUSING) - - def resume(self): - with self._wait_condition: - if not self._is_interrupted(): - self._state = Daemon.DaemonState.WORKING - self._wait_condition.notify_all() - - def wake_up(self): - with self._wait_condition: - self._wait_condition.notify_all() - - def disable_sleep(self): - self._sleep_time = 0 - - def is_running(self) -> bool: - with self._wait_condition: - return self._state in ( - Daemon.DaemonState.WORKING, - Daemon.DaemonState.PAUSING, - Daemon.DaemonState.PAUSED, - ) - - def _is_interrupted(self) -> bool: - with self._wait_condition: - return self._state in (Daemon.DaemonState.INTERRUPTED, Daemon.DaemonState.STOPPED) - - def run(self): - with self._wait_condition: - if not self._is_interrupted(): - self._state = Daemon.DaemonState.WORKING - try: - while not self._is_interrupted(): - with self._wait_condition: - if self._state == Daemon.DaemonState.PAUSING: - self._state = Daemon.DaemonState.PAUSED - self._wait_condition.notify_all() - self._wait_condition.wait_for(lambda: self._state != Daemon.DaemonState.PAUSED) - - if self._state == Daemon.DaemonState.WORKING: - self.work() - with self._wait_condition: - if self._sleep_time > 0 and self._state == Daemon.DaemonState.WORKING: - self._wait_condition.wait(timeout=self._sleep_time) - finally: - with self._wait_condition: - self._state = Daemon.DaemonState.STOPPED - self._wait_condition.notify_all() - - @abc.abstractmethod - def work(self): - pass - - class ConnectionRetryWrapper: - INITIAL_RETRY_BACKOFF = 2 - MAX_RETRY_BACKOFF = 120 - - def __init__(self, kill_message): - self.kill_message = kill_message - - def __call__(self, func): - @functools.wraps(func) - def wrapper(self_: Daemon, *args, **kwargs): - while not self_._is_interrupted(): - try: - result = func(self_, *args, **kwargs) - if self_.last_backoff_time > 0: - self_.last_backoff_time = 0 - logger.info("Communication with Neptune restored!") - return result - except NeptuneConnectionLostException as e: - if self_.last_backoff_time == 0: - logger.warning( - "Experiencing connection interruptions." - " Will try to reestablish communication with Neptune." - " Internal exception was: %s", - e.cause.__class__.__name__, - ) - self_.last_backoff_time = self.INITIAL_RETRY_BACKOFF - else: - self_.last_backoff_time = min(self_.last_backoff_time * 2, self.MAX_RETRY_BACKOFF) - - with self_._wait_condition: - self_._wait_condition.wait(self_.last_backoff_time) - except Exception: - logger.error( - "Unexpected error occurred in Neptune background thread: %s", - self.kill_message, - ) - raise - - return wrapper diff --git a/src/neptune/internal/types/__init__.py b/src/neptune/internal/types/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/internal/types/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/types/file_types.py b/src/neptune/internal/types/file_types.py deleted file mode 100644 index 5d1a26f1c..000000000 --- a/src/neptune/internal/types/file_types.py +++ /dev/null @@ -1,168 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "FileComposite", - "LocalFileComposite", - "InMemoryComposite", - "FileComposite", - "StreamComposite", -] - -import abc -import enum -import io -import os -from functools import wraps -from io import IOBase -from typing import ( - Optional, - Union, -) - -from neptune.exceptions import StreamAlreadyUsedException -from neptune.internal.exceptions import NeptuneException -from neptune.internal.utils import verify_type - - -class FileType(enum.Enum): - LOCAL_FILE = "LOCAL_FILE" - IN_MEMORY = "IN_MEMORY" - STREAM = "STREAM" - - -class FileComposite(abc.ABC): - """ - Composite class defining behaviour of neptune.types.atoms.file.File - """ - - file_type: FileType = None - - def __init__(self, extension: str): - verify_type("extension", extension, str) - self._extension = extension - - @property - def extension(self): - return self._extension - - @property - def path(self): - raise NeptuneException(f"`path` attribute is not supported for {self.file_type}") - - @property - def content(self): - raise NeptuneException(f"`content` attribute is not supported for {self.file_type}") - - def save(self, path): - raise NeptuneException(f"`save` method is not supported for {self.file_type}") - - -class LocalFileComposite(FileComposite): - file_type = FileType.LOCAL_FILE - - def __init__(self, path: str, extension: Optional[str] = None): - try: - ext = os.path.splitext(path)[1] - ext = ext[1:] if ext else "" - except ValueError: - ext = "" - super().__init__(extension or ext) - - self._path = path - - @property - def path(self): - return self._path - - def __str__(self): - return f"File(path={self.path})" - - -class InMemoryComposite(FileComposite): - file_type = FileType.IN_MEMORY - - def __init__(self, content: Union[str, bytes], extension: Optional[str] = None): - if isinstance(content, str): - ext = "txt" - content = content.encode("utf-8") - else: - ext = "bin" - super().__init__(extension or ext) - - self._content = content - - @property - def content(self): - return self._content - - def save(self, path): - with open(path, "wb") as f: - f.write(self._content) - - def __str__(self): - return "File(content=...)" - - -def read_once(f): - """Decorator for validating read once on STREAM objects""" - - @wraps(f) - def func(self: "StreamComposite", *args, **kwargs): - if self._stream_read: - raise StreamAlreadyUsedException() - self._stream_read = True - return f(self, *args, **kwargs) - - return func - - -class StreamComposite(FileComposite): - file_type = FileType.STREAM - - def __init__(self, stream: IOBase, seek: Optional[int] = 0, extension: Optional[str] = None): - verify_type("stream", stream, (IOBase, type(None))) - verify_type("extension", extension, (str, type(None))) - - if seek is not None and stream.seekable(): - stream.seek(seek) - if extension is None: - extension = "txt" if isinstance(stream, io.TextIOBase) else "bin" - super().__init__(extension) - - self._stream = stream - self._stream_read = False - - @property - @read_once - def content(self): - val = self._stream.read() - if isinstance(self._stream, io.TextIOBase): - val = val.encode() - return val - - @read_once - def save(self, path): - with open(path, "wb") as f: - buffer_ = self._stream.read(io.DEFAULT_BUFFER_SIZE) - while buffer_: - # TODO: replace with Walrus Operator once python3.7 support is dropped - if isinstance(self._stream, io.TextIOBase): - buffer_ = buffer_.encode() - f.write(buffer_) - buffer_ = self._stream.read(io.DEFAULT_BUFFER_SIZE) - - def __str__(self): - return f"File(stream={self._stream})" diff --git a/src/neptune/internal/types/stringify_value.py b/src/neptune/internal/types/stringify_value.py deleted file mode 100644 index 518fcc8e4..000000000 --- a/src/neptune/internal/types/stringify_value.py +++ /dev/null @@ -1,82 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["StringifyValue", "extract_if_stringify_value"] - -import math -from typing import Any - -from neptune.constants import ( - MAX_32_BIT_INT, - MIN_32_BIT_INT, -) -from neptune.internal.utils.logger import get_logger - -logger = get_logger() - - -def is_unsupported_float(value) -> bool: - if isinstance(value, float): - return math.isinf(value) or math.isnan(value) - return False - - -class StringifyValue: - def __init__(self, value: Any): - # check if it's an integer outside 32bit range and cast it to float - if isinstance(value, int) and (value > MAX_32_BIT_INT or value < MIN_32_BIT_INT): - logger.info( - "Value '%d' is outside the range of 32-bit integers ('%d' to '%d') and will be logged as float", - value, - MIN_32_BIT_INT, - MAX_32_BIT_INT, - ) - value = float(value) - if is_unsupported_float(value): - value = str(value) - - self.__value = value - - @property - def value(self): - return self.__value - - def __str__(self): - return str(self.__value) - - def __repr__(self): - return repr(self.__value) - - -def extract_if_stringify_value(val): - if isinstance(val, StringifyValue): - return val.value - return val diff --git a/src/neptune/internal/types/utils.py b/src/neptune/internal/types/utils.py deleted file mode 100644 index 0ff1dcaa8..000000000 --- a/src/neptune/internal/types/utils.py +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = ["is_unsupported_float"] - -import math - - -def is_unsupported_float(value: float) -> bool: - if isinstance(value, float): - return math.isinf(value) or math.isnan(value) - return False diff --git a/src/neptune/internal/utils/__init__.py b/src/neptune/internal/utils/__init__.py deleted file mode 100644 index f445d942e..000000000 --- a/src/neptune/internal/utils/__init__.py +++ /dev/null @@ -1,205 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "replace_patch_version", - "verify_type", - "verify_value", - "is_stream", - "is_bool", - "is_int", - "is_float", - "is_string", - "is_float_like", - "is_dict_like", - "is_string_like", - "is_stringify_value", - "verify_collection_type", - "verify_optional_callable", - "is_collection", - "base64_encode", - "base64_decode", - "get_absolute_paths", - "get_common_root", - "does_paths_share_common_drive", - "is_ipython", - "as_list", -] - -import base64 -import os -from glob import glob -from io import IOBase -from typing import ( - Any, - Iterable, - List, - Mapping, - Optional, - Set, - TypeVar, - Union, -) - -from neptune.internal.types.stringify_value import StringifyValue -from neptune.internal.utils.logger import get_logger - -T = TypeVar("T") - -_logger = get_logger() - - -def replace_patch_version(version: str): - return version[: version.index(".", version.index(".") + 1)] + ".0" - - -def verify_type(var_name: str, var, expected_type: Union[type, tuple]): - try: - if isinstance(expected_type, tuple): - type_name = " or ".join(get_type_name(t) for t in expected_type) - else: - type_name = get_type_name(expected_type) - except Exception as e: - # Just to be sure that nothing weird will be raised here - raise TypeError("Incorrect type of {}".format(var_name)) from e - - if not isinstance(var, expected_type): - raise TypeError("{} must be a {} (was {})".format(var_name, type_name, type(var))) - - if isinstance(var, IOBase) and not hasattr(var, "read"): - raise TypeError("{} is a stream, which does not implement read method".format(var_name)) - - -def verify_value(var_name: str, var: Any, expected_values: Iterable[T]) -> None: - if var not in expected_values: - raise ValueError(f"{var_name} must be one of {expected_values} (was `{var}`)") - - -def is_stream(var): - return isinstance(var, IOBase) and hasattr(var, "read") - - -def is_bool(var): - return isinstance(var, bool) - - -def is_int(var): - return isinstance(var, int) - - -def is_float(var): - return isinstance(var, (float, int)) - - -def is_string(var): - return isinstance(var, str) - - -def is_float_like(var): - try: - _ = float(var) - return True - except (ValueError, TypeError): - return False - - -def is_dict_like(var): - return isinstance(var, (dict, Mapping)) - - -def is_string_like(var): - try: - _ = str(var) - return True - except ValueError: - return False - - -def is_stringify_value(var): - return isinstance(var, StringifyValue) - - -def get_type_name(_type: Union[type, tuple]): - return _type.__name__ if hasattr(_type, "__name__") else str(_type) - - -def verify_collection_type(var_name: str, var, expected_type: Union[type, tuple]): - verify_type(var_name, var, (list, set, tuple)) - for value in var: - verify_type("elements of collection '{}'".format(var_name), value, expected_type) - - -def verify_optional_callable(var_name: str, var): - if var and not callable(var): - raise TypeError("{} must be a callable (was {})".format(var_name, type(var))) - - -def is_collection(var) -> bool: - return isinstance(var, (list, set, tuple)) - - -def base64_encode(data: bytes) -> str: - return base64.b64encode(data).decode("utf-8") - - -def base64_decode(data: str) -> bytes: - return base64.b64decode(data.encode("utf-8")) - - -def get_absolute_paths(file_globs: Iterable[str]) -> List[str]: - expanded_paths: Set[str] = set() - for file_glob in file_globs: - expanded_paths |= set(glob(file_glob, recursive=True)) - return list(os.path.abspath(expanded_file) for expanded_file in expanded_paths) - - -def get_common_root(absolute_paths: List[str]) -> Optional[str]: - try: - common_root = os.path.commonpath(absolute_paths) - if os.path.isfile(common_root): - common_root = os.path.dirname(common_root) - if common_root.startswith(os.getcwd() + os.sep): - common_root = os.getcwd() - return common_root - except ValueError: - return None - - -def does_paths_share_common_drive(paths: List[str]) -> bool: - return len(set(map(lambda path: os.path.splitdrive(path)[0], paths))) == 1 - - -def is_ipython() -> bool: - try: - import IPython - - ipython = IPython.core.getipython.get_ipython() - return ipython is not None - except ImportError: - return False - - -def as_list(name: str, value: Optional[Union[str, Iterable[str]]]) -> Iterable[str]: - verify_type(name, value, (type(None), str, Iterable)) - - if value is None: - return [] - - if isinstance(value, str): - return [value] - - verify_collection_type(name, value, str) - - return value diff --git a/src/neptune/internal/utils/dependency_tracking.py b/src/neptune/internal/utils/dependency_tracking.py deleted file mode 100644 index e813dedcd..000000000 --- a/src/neptune/internal/utils/dependency_tracking.py +++ /dev/null @@ -1,85 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = [ - "DependencyTrackingStrategy", - "InferDependenciesStrategy", - "FileDependenciesStrategy", -] - -import os -import sys -from abc import ( - ABC, - abstractmethod, -) -from typing import ( - TYPE_CHECKING, - Union, -) - -from neptune.internal.utils.logger import get_logger - -if sys.version_info >= (3, 8): - from importlib.metadata import ( - Distribution, - distributions, - ) -else: - from importlib_metadata import Distribution, distributions - -from neptune.types import File - -if TYPE_CHECKING: - from neptune import Run - -logger = get_logger() - - -class DependencyTrackingStrategy(ABC): - @abstractmethod - def log_dependencies(self, run: "Run") -> None: ... - - -class InferDependenciesStrategy(DependencyTrackingStrategy): - def log_dependencies(self, run: "Run") -> None: - dependencies = [] - - def sorting_key_func(d: Distribution) -> str: - _name = d.metadata["Name"] - return _name.lower() if isinstance(_name, str) else "" - - dists = sorted(distributions(), key=sorting_key_func) - - for dist in dists: - if dist.metadata["Name"]: - dependencies.append(f'{dist.metadata["Name"]}=={dist.metadata["Version"]}') - - dependencies_str = "\n".join(dependencies) - - if dependencies_str: - run["source_code/requirements"].upload(File.from_content(dependencies_str)) - - -class FileDependenciesStrategy(DependencyTrackingStrategy): - def __init__(self, path: Union[str, os.PathLike]): - self._path = path - - def log_dependencies(self, run: "Run") -> None: - if os.path.isfile(self._path): - run["source_code/requirements"].upload(self._path) - else: - logger.error("[ERROR] File '%s' does not exist - skipping dependency file upload.", self._path) diff --git a/src/neptune/internal/utils/deprecation.py b/src/neptune/internal/utils/deprecation.py deleted file mode 100644 index be0615ba7..000000000 --- a/src/neptune/internal/utils/deprecation.py +++ /dev/null @@ -1,63 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from functools import wraps -from typing import Optional - -from neptune.exceptions import NeptuneParametersCollision -from neptune.internal.warnings import warn_once - -__all__ = ["deprecated", "deprecated_parameter"] - - -def deprecated(*, alternative: Optional[str] = None): - def deco(func): - @wraps(func) - def inner(*args, **kwargs): - additional_info = f", use `{alternative}` instead" if alternative else " and will be removed" - - warn_once( - message=f"`{func.__name__}` is deprecated{additional_info}." - f" We'll end support of it in next major release." - ) - - return func(*args, **kwargs) - - return inner - - return deco - - -def deprecated_parameter(*, deprecated_kwarg_name, required_kwarg_name): - def deco(f): - @wraps(f) - def inner(*args, **kwargs): - if deprecated_kwarg_name in kwargs: - if required_kwarg_name in kwargs: - raise NeptuneParametersCollision(required_kwarg_name, deprecated_kwarg_name, method_name=f.__name__) - - warn_once( - message=f"Parameter `{deprecated_kwarg_name}` is deprecated, use `{required_kwarg_name}` instead." - " We'll end support of it in next major release." - ) - - kwargs[required_kwarg_name] = kwargs[deprecated_kwarg_name] - del kwargs[deprecated_kwarg_name] - - return f(*args, **kwargs) - - return inner - - return deco diff --git a/src/neptune/internal/utils/disk_utilization.py b/src/neptune/internal/utils/disk_utilization.py deleted file mode 100644 index e184124f6..000000000 --- a/src/neptune/internal/utils/disk_utilization.py +++ /dev/null @@ -1,173 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ensure_disk_not_overutilize"] - - -import os -from abc import ( - ABC, - abstractmethod, -) -from functools import wraps -from typing import ( - Any, - Callable, - Optional, -) - -import psutil -from psutil import Error - -from neptune.constants import NEPTUNE_DATA_DIRECTORY -from neptune.envs import ( - NEPTUNE_MAX_DISK_USAGE, - NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED, -) -from neptune.exceptions import NeptuneMaxDiskUtilizationExceeded -from neptune.internal.warnings import ( - NeptuneWarning, - warn_once, -) - - -def get_neptune_data_directory() -> str: - return os.getenv("NEPTUNE_DATA_DIRECTORY", NEPTUNE_DATA_DIRECTORY) - - -def get_disk_utilization_percent(path: Optional[str] = None) -> Optional[float]: - try: - if path is None: - path = get_neptune_data_directory() - - return float(psutil.disk_usage(path).percent) - except (ValueError, TypeError, Error): - return None - - -def get_max_disk_utilization_from_env() -> Optional[float]: - env_limit_disk_utilization = os.getenv(NEPTUNE_MAX_DISK_USAGE) - - if env_limit_disk_utilization is None: - return None - - try: - limit_disk_utilization = float(env_limit_disk_utilization) - if limit_disk_utilization <= 0 or limit_disk_utilization > 100: - raise ValueError - - return limit_disk_utilization - except (ValueError, TypeError): - warn_once( - f"Provided invalid value of '{NEPTUNE_MAX_DISK_USAGE}': '{env_limit_disk_utilization}'. " - "Check of disk utilization will not be applied.", - exception=NeptuneWarning, - ) - return None - - -class DiskUtilizationErrorHandlerTemplate(ABC): - def __init__(self, max_disk_utilization: Optional[float], func: Callable[..., None], *args: Any, **kwargs: Any): - self.max_disk_utilization = max_disk_utilization - self.func = func - self.args = args - self.kwargs = kwargs - - @abstractmethod - def handle_limit_not_set(self) -> None: ... # pragma: no cover - - @abstractmethod - def handle_utilization_calculation_error(self) -> None: ... # pragma: no cover - - @abstractmethod - def handle_limit_not_exceeded(self) -> None: ... # pragma: no cover - - @abstractmethod - def handle_limit_exceeded(self, current_utilization: float) -> None: ... # pragma: no cover - - def run(self) -> None: - if not self.max_disk_utilization: - return self.handle_limit_not_set() - - current_utilization = get_disk_utilization_percent() - - if current_utilization is None: - return self.handle_utilization_calculation_error() - - if current_utilization < self.max_disk_utilization: - return self.handle_limit_not_exceeded() - - self.handle_limit_exceeded(current_utilization) - - -class NonRaisingErrorHandler(DiskUtilizationErrorHandlerTemplate): - DISK_ISSUE_MSG = "Encountered disk issue. Neptune will not save your data." - - def handle_limit_not_set(self) -> None: - try: - return self.func(*self.args, **self.kwargs) - except (OSError, Error): - warn_once(self.DISK_ISSUE_MSG, exception=NeptuneWarning) - - def handle_utilization_calculation_error(self) -> None: - try: - return self.func(*self.args, **self.kwargs) - except (OSError, Error): - warn_once(self.DISK_ISSUE_MSG, exception=NeptuneWarning) - - def handle_limit_not_exceeded(self) -> None: - try: - return self.func(*self.args, **self.kwargs) - except (OSError, Error): - warn_once(self.DISK_ISSUE_MSG, exception=NeptuneWarning) - - def handle_limit_exceeded(self, current_utilization: float) -> None: - warn_once( - f"Disk usage is at {current_utilization}%, which exceeds the maximum allowed utilization " - + f"of {self.max_disk_utilization}%. Neptune will not save your data.", - exception=NeptuneWarning, - ) - - -class RaisingErrorHandler(DiskUtilizationErrorHandlerTemplate): - def handle_limit_not_set(self) -> None: - return self.func(*self.args, **self.kwargs) - - def handle_utilization_calculation_error(self) -> None: - return self.func(*self.args, **self.kwargs) - - def handle_limit_not_exceeded(self) -> None: - return self.func(*self.args, **self.kwargs) - - def handle_limit_exceeded(self, current_utilization: float) -> None: - if isinstance(self.max_disk_utilization, float): - - raise NeptuneMaxDiskUtilizationExceeded( - disk_utilization=current_utilization, - utilization_limit=self.max_disk_utilization, - ) - - -def ensure_disk_not_overutilize(func: Callable[..., None]) -> Callable[..., None]: - raising_on_disk_issue = os.getenv(NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED, "True").lower() in ("true", "t", "1") - max_disk_utilization = get_max_disk_utilization_from_env() - - error_handler = RaisingErrorHandler if raising_on_disk_issue else NonRaisingErrorHandler - - @wraps(func) - def wrapper(*args: Any, **kwargs: Any) -> None: - error_handler(max_disk_utilization, func, *args, **kwargs).run() - - return wrapper diff --git a/src/neptune/internal/utils/generic_attribute_mapper.py b/src/neptune/internal/utils/generic_attribute_mapper.py deleted file mode 100644 index da40ba9b2..000000000 --- a/src/neptune/internal/utils/generic_attribute_mapper.py +++ /dev/null @@ -1,67 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["NoValue", "atomic_attribute_types_map", "map_attribute_result_to_value"] - -from neptune.api.models import FieldType - - -class NoValue: - pass - - -VALUE = "value" -LAST_VALUE = "last" -VALUES = "values" - -atomic_attribute_types_map = { - FieldType.FLOAT.value: "floatProperties", - FieldType.INT.value: "intProperties", - FieldType.BOOL.value: "boolProperties", - FieldType.STRING.value: "stringProperties", - FieldType.DATETIME.value: "datetimeProperties", - FieldType.OBJECT_STATE.value: "experimentStateProperties", - FieldType.NOTEBOOK_REF.value: "notebookRefProperties", -} - -value_series_attribute_types_map = { - FieldType.FLOAT_SERIES.value: "floatSeriesProperties", - FieldType.STRING_SERIES.value: "stringSeriesProperties", -} - -value_set_attribute_types_map = { - FieldType.STRING_SET.value: "stringSetProperties", -} - -# TODO: nicer mapping? -_unmapped_attribute_types_map = { - FieldType.FILE_SET.value: "fileSetProperties", # TODO: return size? - FieldType.FILE.value: "fileProperties", # TODO: name? size? - FieldType.IMAGE_SERIES.value: "imageSeriesProperties", # TODO: return last step? - FieldType.GIT_REF.value: "gitRefProperties", # TODO: commit? branch? -} - - -def map_attribute_result_to_value(attribute): - for attribute_map, value_key in [ - (atomic_attribute_types_map, VALUE), - (value_series_attribute_types_map, LAST_VALUE), - (value_set_attribute_types_map, VALUES), - ]: - source_property = attribute_map.get(attribute.type) - if source_property is not None: - mapped_attribute_entry = getattr(attribute, source_property) - return getattr(mapped_attribute_entry, value_key) - return NoValue diff --git a/src/neptune/internal/utils/git.py b/src/neptune/internal/utils/git.py deleted file mode 100644 index c8cb5307b..000000000 --- a/src/neptune/internal/utils/git.py +++ /dev/null @@ -1,217 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "to_git_info", - "GitInfo", - "track_uncommitted_changes", -] - -import warnings -from dataclasses import dataclass -from datetime import datetime -from typing import ( - TYPE_CHECKING, - List, - Optional, - Union, -) - -from neptune.attributes.constants import ( - DIFF_HEAD_INDEX_PATH, - UPSTREAM_INDEX_DIFF, -) -from neptune.internal.utils.logger import get_logger -from neptune.types import File -from neptune.types.atoms.git_ref import ( - GitRef, - GitRefDisabled, -) - -if TYPE_CHECKING: - import git - - from neptune import Run - -_logger = get_logger() - - -@dataclass -class GitInfo: - commit_id: str - message: str - author_name: str - author_email: str - commit_date: datetime - dirty: bool - branch: Optional[str] - remotes: Optional[List[str]] - - -def get_git_repo(repo_path): - # WARN: GitPython asserts the existence of `git` executable - # which consists in failure during the preparation of conda package - try: - import git - - return git.Repo(repo_path, search_parent_directories=True) - except ImportError: - warnings.warn("GitPython could not be initialized") - - -def get_repo_from_git_ref(git_ref: Union[GitRef, GitRefDisabled]) -> Optional["git.Repo"]: - if git_ref == GitRef.DISABLED: - return None - - initial_repo_path = git_ref.resolve_path() - if initial_repo_path is None: - return None - - try: - from git.exc import ( - InvalidGitRepositoryError, - NoSuchPathError, - ) - - try: - return get_git_repo(repo_path=initial_repo_path) - except (NoSuchPathError, InvalidGitRepositoryError): - return None - except ImportError: - return None - - -def to_git_info(git_ref: Union[GitRef, GitRefDisabled]) -> Optional[GitInfo]: - try: - repo = get_repo_from_git_ref(git_ref) - commit = repo.head.commit - - active_branch = "" - - try: - active_branch = repo.active_branch.name - except TypeError as e: - if str(e.args[0]).startswith("HEAD is a detached symbolic reference as it points to"): - active_branch = "Detached HEAD" - - remote_urls = [remote.url for remote in repo.remotes] - - return GitInfo( - commit_id=commit.hexsha, - message=commit.message, - author_name=commit.author.name, - author_email=commit.author.email, - commit_date=commit.committed_datetime, - dirty=repo.is_dirty(index=False, untracked_files=True), - branch=active_branch, - remotes=remote_urls, - ) - except: # noqa: E722 - return None - - -@dataclass -class UncommittedChanges: - diff_head: Optional[str] - diff_upstream: Optional[str] - upstream_sha: Optional[str] - - -def get_diff(repo: "git.Repo", commit_ref: str) -> Optional[str]: - try: - from git.exc import GitCommandError - - try: - diff = repo.git.diff(commit_ref, index=False) - - # add a newline at the end (required to be a valid `patch` file) - if diff and diff[-1] != "\n": - diff += "\n" - return diff - except GitCommandError: - return - except ImportError: - return None - - -def get_relevant_upstream_commit(repo: "git.Repo") -> Optional["git.Commit"]: - try: - tracking_branch = repo.active_branch.tracking_branch() - except (TypeError, ValueError): - return - - if tracking_branch: - return tracking_branch.commit - - return search_for_most_recent_ancestor(repo) - - -def search_for_most_recent_ancestor(repo: "git.Repo") -> Optional["git.Commit"]: - most_recent_ancestor: Optional["git.Commit"] = None - - try: - from git.exc import GitCommandError - - try: - for branch in repo.heads: - tracking_branch = branch.tracking_branch() - if tracking_branch: - for ancestor in repo.merge_base(repo.head, tracking_branch.commit): - if not most_recent_ancestor or repo.is_ancestor(most_recent_ancestor, ancestor): - most_recent_ancestor = ancestor - except GitCommandError: - pass - except ImportError: - return None - - return most_recent_ancestor - - -def get_upstream_index_sha(repo: "git.Repo") -> Optional[str]: - upstream_commit = get_relevant_upstream_commit(repo) - - if upstream_commit and upstream_commit != repo.head.commit: - - return upstream_commit.hexsha - - -def get_uncommitted_changes(repo: Optional["git.Repo"]) -> Optional[UncommittedChanges]: - head_index_diff = get_diff(repo, repo.head.name) - - upstream_sha = get_upstream_index_sha(repo) - - upstream_index_diff = get_diff(repo, upstream_sha) if upstream_sha else None - if head_index_diff or upstream_sha or upstream_index_diff: - return UncommittedChanges(head_index_diff, upstream_index_diff, upstream_sha) - - -def track_uncommitted_changes(git_ref: Union[GitRef, GitRefDisabled], run: "Run") -> None: - repo = get_repo_from_git_ref(git_ref) - - if not repo: - return - - uncommitted_changes = get_uncommitted_changes(repo) - - if not uncommitted_changes: - return - - if uncommitted_changes.diff_head: - run[DIFF_HEAD_INDEX_PATH].upload(File.from_content(uncommitted_changes.diff_head, extension="patch")) - - if uncommitted_changes.diff_upstream: - run[f"{UPSTREAM_INDEX_DIFF}{uncommitted_changes.upstream_sha}"].upload( - File.from_content(uncommitted_changes.diff_upstream, extension="patch") - ) diff --git a/src/neptune/internal/utils/git_info.py b/src/neptune/internal/utils/git_info.py deleted file mode 100644 index b2b8174e0..000000000 --- a/src/neptune/internal/utils/git_info.py +++ /dev/null @@ -1,70 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -class GitInfo(object): - """Class that keeps information about a git repository in experiment. - - When :meth:`~neptune.projects.Project.create_experiment` is invoked, instance of this class is created to - store information about git repository. - This information is later presented in the experiment details tab in the Neptune web application. - - Args: - commit_id (:obj:`str`): commit id sha. - message (:obj:`str`, optional, default is ``""``): commit message. - author_name (:obj:`str`, optional, default is ``""``): commit author username. - author_email (:obj:`str`, optional, default is ``""``): commit author email. - commit_date (:obj:`datetime.datetime`, optional, default is ``""``): commit datetime. - repository_dirty (:obj:`bool`, optional, default is ``True``): - ``True``, if the repository has uncommitted changes, ``False`` otherwise. - """ - - def __init__( - self, - commit_id, - message="", - author_name="", - author_email="", - commit_date="", - repository_dirty=True, - active_branch="", - remote_urls=None, - ): - if remote_urls is None: - remote_urls = [] - if commit_id is None: - raise TypeError("commit_id must not be None") - - self.commit_id = commit_id - self.message = message - self.author_name = author_name - self.author_email = author_email - self.commit_date = commit_date - self.repository_dirty = repository_dirty - self.active_branch = active_branch - self.remote_urls = remote_urls - - def __eq__(self, o): - return o is not None and self.__dict__ == o.__dict__ - - def __ne__(self, o): - return not self.__eq__(o) - - def __str__(self): - return "GitInfo({})".format(self.commit_id) - - def __repr__(self): - return str(self) diff --git a/src/neptune/internal/utils/hashing.py b/src/neptune/internal/utils/hashing.py deleted file mode 100644 index 878389297..000000000 --- a/src/neptune/internal/utils/hashing.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["generate_hash"] - -import hashlib - - -def generate_hash(*descriptors, length: int) -> str: - hasher = hashlib.sha256() - for descriptor in descriptors: - hasher.update(str(descriptor).encode()) - - return hasher.hexdigest()[-length:] diff --git a/src/neptune/internal/utils/images.py b/src/neptune/internal/utils/images.py deleted file mode 100644 index 8657c54da..000000000 --- a/src/neptune/internal/utils/images.py +++ /dev/null @@ -1,349 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -__all__ = [ - "get_image_content", - "get_html_content", - "get_pickle_content", - "is_numpy_array", - "is_pil_image", - "is_matplotlib_figure", - "is_plotly_figure", - "is_altair_chart", - "is_bokeh_figure", - "is_pandas_dataframe", - "is_seaborn_figure", -] - -import base64 -import io -import pickle -import warnings -from io import ( - BytesIO, - StringIO, -) -from typing import Optional - -import numpy as np -from packaging import version -from pandas import DataFrame - -from neptune.exceptions import PlotlyIncompatibilityException -from neptune.internal.utils.logger import get_logger - -logger = get_logger() -SEABORN_GRID_CLASSES = {"FacetGrid", "PairGrid", "JointGrid"} -ALLOWED_IMG_PIXEL_RANGES = ("[0, 255]", "[0.0, 1.0]") - -try: - from numpy import array as numpy_array - from numpy import ndarray as numpy_ndarray - from numpy import uint8 as numpy_uint8 -except ImportError: - numpy_ndarray = None - numpy_array = None - numpy_uint8 = None - -try: - from PIL.Image import Image as PILImage - from PIL.Image import fromarray as pilimage_fromarray -except ImportError: - PILImage = None - - def pilimage_fromarray(): - pass - - -def get_image_content(image, autoscale=True) -> Optional[bytes]: - content = _image_to_bytes(image, autoscale) - - return content - - -def get_html_content(chart) -> Optional[str]: - content = _to_html(chart) - - return content - - -def get_pickle_content(obj) -> Optional[bytes]: - content = _export_pickle(obj) - - return content - - -def _image_to_bytes(image, autoscale) -> bytes: - if image is None: - raise ValueError("image is None") - - elif is_numpy_array(image): - return _get_numpy_as_image(image, autoscale) - - elif is_pil_image(image): - return _get_pil_image_data(image) - - elif is_matplotlib_figure(image): - return _get_figure_image_data(image) - - elif _is_torch_tensor(image): - return _get_numpy_as_image(image.detach().numpy(), autoscale) - - elif _is_tensorflow_tensor(image): - return _get_numpy_as_image(image.numpy(), autoscale) - - elif is_seaborn_figure(image): - return _get_figure_image_data(image.figure) - - raise TypeError("image is {}".format(type(image))) - - -def _to_html(chart) -> str: - if _is_matplotlib_pyplot(chart): - chart = chart.gcf() - - if is_matplotlib_figure(chart): - try: - chart = _matplotlib_to_plotly(chart) - return _export_plotly_figure(chart) - except ImportError: - logger.warning("Plotly not installed. Logging plot as an image.") - return _image_content_to_html(_get_figure_image_data(chart)) - except UserWarning: - logger.warning( - "Couldn't convert Matplotlib plot to interactive Plotly plot. Logging plot as an image instead." - ) - return _image_content_to_html(_get_figure_image_data(chart)) - - elif is_pandas_dataframe(chart): - return _export_pandas_dataframe_to_html(chart) - - elif is_plotly_figure(chart): - return _export_plotly_figure(chart) - - elif is_altair_chart(chart): - return _export_altair_chart(chart) - - elif is_bokeh_figure(chart): - return _export_bokeh_figure(chart) - - elif is_seaborn_figure(chart): - return _export_seaborn_figure(chart) - - else: - raise ValueError("Currently supported are matplotlib, plotly, altair, bokeh and seaborn figures") - - -def _matplotlib_to_plotly(chart): - import matplotlib - import plotly - - # When Plotly cannot accurately convert a matplotlib plot, it emits a warning. - # Then we want to fallback on logging the plot as an image. - # - # E.g. when trying to convert a Seaborn confusion matrix or a hist2d, it emits a UserWarning with message - # "Dang! That path collection is out of this world. I totally don't know what to do with it yet! - # Plotly can only import path collections linked to 'data' coordinates" - # - # Plotly is not compatible with the latest matplotlib (3.5.0+) - # due to fact that mpl_to_plotly uses deprecated matplotlib functionalities - plotly_version = plotly.__version__ - matplotlib_version = matplotlib.__version__ - if version.parse(matplotlib_version) >= version.parse("3.3.0") and version.parse(plotly_version) < version.parse( - "5.0.0" - ): - raise PlotlyIncompatibilityException( - matplotlib_version, - plotly_version, - "Downgrade matplotlib to version 3.2, upgrade plotly to 5.0+, or upload the chart as a static " - "image: run['chart'].upload(File.as_image(plotly_chart)). " - "For details, see https://github.com/plotly/plotly.py/issues/1568.", - ) - - with warnings.catch_warnings(): - warnings.filterwarnings( - "error", - category=UserWarning, - message=".*Plotly can only import path collections linked to 'data' coordinates.*", - ) - try: - chart = plotly.tools.mpl_to_plotly(chart) - except AttributeError as e: - if "'PathCollection' object has no attribute 'get_offset_position'" in str(e): - raise PlotlyIncompatibilityException( - matplotlib_version, - plotly_version, - "Due to plotly using some deprecated matplotlib methods, we recommend downgrading matplotlib" - " to version 3.4. See https://github.com/plotly/plotly.py/issues/3624 for details.", - ) from e - raise e - - return chart - - -def _image_content_to_html(content: bytes) -> str: - str_equivalent_image = base64.b64encode(content).decode() - return "" - - -def _get_numpy_as_image(array: np.ndarray, autoscale: bool) -> bytes: - array = array.copy() # prevent original array from modifying - if autoscale: - array = _scale_array(array) - - if len(array.shape) == 2: - return _get_pil_image_data(pilimage_fromarray(array.astype(numpy_uint8))) - if len(array.shape) == 3: - if array.shape[2] == 1: - array2d = numpy_array([[col[0] for col in row] for row in array]) - return _get_pil_image_data(pilimage_fromarray(array2d.astype(numpy_uint8))) - if array.shape[2] in (3, 4): - return _get_pil_image_data(pilimage_fromarray(array.astype(numpy_uint8))) - raise ValueError( - "Incorrect size of numpy.ndarray. Should be 2-dimensional or" - "3-dimensional with 3rd dimension of size 1, 3 or 4." - ) - - -def _scale_array(array: np.ndarray) -> np.ndarray: - array_min = array.min() - array_max = array.max() - - if array_min >= 0 and 1 < array_max <= 255: - return array - - if array_min >= 0 and array_max <= 1: - return array * 255 - - _warn_about_incorrect_image_data_range(array_min, array_max) - return array - - -def _warn_about_incorrect_image_data_range(array_min: int | float, array_max: int | float) -> None: - msg = f"Image data is in range [{array_min}, {array_max}]." - logger.warning( - "%s To be interpreted as colors correctly values in the array need to be in the %s or %s range.", - msg, - *ALLOWED_IMG_PIXEL_RANGES, - ) - - -def _get_pil_image_data(image: PILImage) -> bytes: - with io.BytesIO() as image_buffer: - image.save(image_buffer, format="PNG") - return image_buffer.getvalue() - - -def _get_figure_image_data(figure) -> bytes: - with io.BytesIO() as image_buffer: - figure.savefig(image_buffer, format="png", bbox_inches="tight") - return image_buffer.getvalue() - - -def _is_torch_tensor(image): - return ( - image.__class__.__module__.startswith("torch") - and image.__class__.__name__ == "Tensor" - and hasattr(image, "numpy") - ) - - -def _is_tensorflow_tensor(image): - return ( - image.__class__.__module__.startswith("tensorflow.") - and "Tensor" in image.__class__.__name__ - and hasattr(image, "numpy") - ) - - -def _is_matplotlib_pyplot(chart): - return chart.__class__.__module__.startswith("matplotlib.pyplot") - - -def is_numpy_array(image) -> bool: - return numpy_ndarray is not None and isinstance(image, numpy_ndarray) - - -def is_pil_image(image) -> bool: - return PILImage is not None and isinstance(image, PILImage) - - -def is_matplotlib_figure(image): - return image.__class__.__module__.startswith("matplotlib.") and image.__class__.__name__ == "Figure" - - -def is_plotly_figure(chart): - return chart.__class__.__module__.startswith("plotly.") and chart.__class__.__name__ == "Figure" - - -def is_altair_chart(chart): - return chart.__class__.__module__.startswith("altair.") and "Chart" in chart.__class__.__name__ - - -def is_bokeh_figure(chart): - return chart.__class__.__module__.startswith("bokeh.") and chart.__class__.__name__.lower() == "figure" - - -def is_seaborn_figure(chart): - return ( - chart.__class__.__module__.startswith("seaborn.axisgrid") and chart.__class__.__name__ in SEABORN_GRID_CLASSES - ) - - -def is_pandas_dataframe(table): - return isinstance(table, DataFrame) - - -def _export_pandas_dataframe_to_html(table): - buffer = StringIO(table.to_html()) - buffer.seek(0) - return buffer.getvalue() - - -def _export_plotly_figure(image): - buffer = StringIO() - image.write_html(buffer) - buffer.seek(0) - return buffer.getvalue() - - -def _export_altair_chart(chart): - buffer = StringIO() - chart.save(buffer, format="html") - buffer.seek(0) - return buffer.getvalue() - - -def _export_bokeh_figure(chart): - from bokeh.embed import file_html - from bokeh.resources import CDN - - html = file_html(chart, CDN) - buffer = StringIO(html) - buffer.seek(0) - return buffer.getvalue() - - -def _export_pickle(obj): - buffer = BytesIO() - pickle.dump(obj, buffer) - buffer.seek(0) - return buffer.getvalue() - - -def _export_seaborn_figure(chart): - return _export_plotly_figure(_matplotlib_to_plotly(chart.figure)) diff --git a/src/neptune/internal/utils/iso_dates.py b/src/neptune/internal/utils/iso_dates.py deleted file mode 100644 index 5dd4f539e..000000000 --- a/src/neptune/internal/utils/iso_dates.py +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = ["parse_iso_date"] - -import datetime -from typing import Union - -DATE_FORMAT_LONG: str = "%Y-%m-%dT%H:%M:%S.%fZ" -DATE_FORMAT_SHORT: str = "%Y-%m-%dT%H:%M:%SZ" - - -def parse_iso_date(date: Union[str, datetime.datetime]) -> datetime.datetime: - if isinstance(date, datetime.datetime): - return date - - date_format = DATE_FORMAT_LONG if _is_long_date_format(date) else DATE_FORMAT_SHORT - - return datetime.datetime.strptime(date, date_format) - - -def _is_long_date_format(date_string: str) -> bool: - return len(date_string.split(".")) == 2 diff --git a/src/neptune/internal/utils/iteration.py b/src/neptune/internal/utils/iteration.py deleted file mode 100644 index cc4378d77..000000000 --- a/src/neptune/internal/utils/iteration.py +++ /dev/null @@ -1,43 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["get_batches"] - -from itertools import ( - chain, - islice, -) -from typing import ( - Iterable, - List, - TypeVar, -) - -T = TypeVar("T") - - -def get_batches(iterable: Iterable[T], *, batch_size: int) -> Iterable[List[T]]: - assert batch_size > 0 - - source_iter = iter(iterable) - while True: - # return consequent slices of `batch_size` elements - slices = islice(source_iter, batch_size) - try: - first_from_slice = next(slices) - except StopIteration: - # but if there's nothing to return in last slice, close generator instead of returning empty list - return - yield list(chain([first_from_slice], slices)) diff --git a/src/neptune/internal/utils/limits.py b/src/neptune/internal/utils/limits.py deleted file mode 100644 index 294973ade..000000000 --- a/src/neptune/internal/utils/limits.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["custom_run_id_exceeds_length", "image_size_exceeds_limit_for_logging"] - -import warnings - -from neptune.internal.utils.logger import get_logger - -_logger = get_logger() - - -_CUSTOM_RUN_ID_LENGTH = 36 -_LOGGED_IMAGE_SIZE_LIMIT_MB = 32 - -BYTES_IN_MB = 1024 * 1024 - - -def custom_run_id_exceeds_length(custom_run_id): - if custom_run_id and len(custom_run_id) > _CUSTOM_RUN_ID_LENGTH: - _logger.warning( - "Given custom_run_id exceeds %s" " characters and it will be ignored.", - _CUSTOM_RUN_ID_LENGTH, - ) - return True - return False - - -def image_size_exceeds_limit_for_logging(content_size): - if content_size > _LOGGED_IMAGE_SIZE_LIMIT_MB * BYTES_IN_MB: - warnings.warn( - f"You are attempting to log an image that is {content_size / BYTES_IN_MB:.2f}MB large. " - f"Neptune supports logging images smaller than {_LOGGED_IMAGE_SIZE_LIMIT_MB}MB. " - "Resize or increase compression of this image.", - category=UserWarning, - ) - return True - return False diff --git a/src/neptune/internal/utils/logger.py b/src/neptune/internal/utils/logger.py deleted file mode 100644 index 1c5af5427..000000000 --- a/src/neptune/internal/utils/logger.py +++ /dev/null @@ -1,95 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["get_logger", "get_disabled_logger", "NEPTUNE_LOGGER_NAME"] - -import logging -import sys - -NEPTUNE_LOGGER_NAME = "neptune" -NEPTUNE_NO_PREFIX_LOGGER_NAME = "neptune_no_prefix" -NEPTUNE_NOOP_LOGGER_NAME = "neptune_noop" -LOG_FORMAT = "[%(name)s] [%(levelname)s] %(message)s" -NO_PREFIX_FORMAT = "%(message)s" - - -class CustomFormatter(logging.Formatter): - def format(self, record): - record.levelname = record.levelname.lower().ljust(len("warning")) - formatter = logging.Formatter(LOG_FORMAT) - return formatter.format(record) - - -class GrabbableStdoutHandler(logging.StreamHandler): - """ - This class is like a StreamHandler using sys.stdout, but always uses - whatever sys.stdout is currently set to rather than the value of - sys.stderr at handler construction time. - This enables Neptune to capture stdout regardless - of logging configuration time. - Based on logging._StderrHandler from standard library. - """ - - def __init__(self, level=logging.NOTSET): - logging.Handler.__init__(self, level) - - @property - def stream(self): - return sys.stdout - - -def get_logger(with_prefix: bool = True) -> logging.Logger: - name = NEPTUNE_LOGGER_NAME if with_prefix else NEPTUNE_NO_PREFIX_LOGGER_NAME - - return logging.getLogger(name) - - -def get_disabled_logger() -> logging.Logger: - return logging.getLogger(NEPTUNE_NOOP_LOGGER_NAME) - - -def _set_up_logging(): - # setup neptune logger so that logging.getLogger(NEPTUNE_LOGGER_NAME) - # returns configured logger - neptune_logger = logging.getLogger(NEPTUNE_LOGGER_NAME) - neptune_logger.propagate = False - - stdout_handler = GrabbableStdoutHandler() - stdout_handler.setFormatter(CustomFormatter()) - neptune_logger.addHandler(stdout_handler) - - neptune_logger.setLevel(logging.INFO) - - -def _set_up_no_prefix_logging(): - neptune_logger = logging.getLogger(NEPTUNE_NO_PREFIX_LOGGER_NAME) - neptune_logger.propagate = False - - stdout_handler = GrabbableStdoutHandler() - stdout_handler.setFormatter(logging.Formatter(NO_PREFIX_FORMAT)) - neptune_logger.addHandler(stdout_handler) - - neptune_logger.setLevel(logging.INFO) - - -def _set_up_disabled_logging(): - neptune_logger = logging.getLogger(NEPTUNE_NOOP_LOGGER_NAME) - - neptune_logger.setLevel(logging.CRITICAL) - - -_set_up_logging() -_set_up_no_prefix_logging() -_set_up_disabled_logging() diff --git a/src/neptune/internal/utils/paths.py b/src/neptune/internal/utils/paths.py deleted file mode 100644 index ebc7c6b25..000000000 --- a/src/neptune/internal/utils/paths.py +++ /dev/null @@ -1,34 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["parse_path", "path_to_str", "join_paths"] - -from typing import List - - -def _remove_empty_paths(paths: List[str]) -> List[str]: - return list(filter(bool, paths)) - - -def parse_path(path: str) -> List[str]: - return _remove_empty_paths(str(path).split("/")) - - -def path_to_str(path: List[str]) -> str: - return "/".join(_remove_empty_paths(path)) - - -def join_paths(*paths: str) -> str: - return "/".join(_remove_empty_paths([str(path) for path in paths])) diff --git a/src/neptune/internal/utils/patterns.py b/src/neptune/internal/utils/patterns.py deleted file mode 100644 index d3edfeccf..000000000 --- a/src/neptune/internal/utils/patterns.py +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -PROJECT_QUALIFIED_NAME_PATTERN = "^((?P[^/]+)/){0,1}(?P[^/]+)$" - -__all__ = ["PROJECT_QUALIFIED_NAME_PATTERN"] diff --git a/src/neptune/internal/utils/ping_background_job.py b/src/neptune/internal/utils/ping_background_job.py deleted file mode 100644 index ff3304701..000000000 --- a/src/neptune/internal/utils/ping_background_job.py +++ /dev/null @@ -1,72 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["PingBackgroundJob"] - -from typing import ( - TYPE_CHECKING, - Optional, -) - -from neptune.internal.background_job import BackgroundJob -from neptune.internal.threading.daemon import Daemon -from neptune.internal.utils.logger import get_logger - -if TYPE_CHECKING: - from neptune.objects import NeptuneObject - -_logger = get_logger() - - -class PingBackgroundJob(BackgroundJob): - def __init__(self, period: float = 10): - self._period = period - self._thread: PingBackgroundJob.ReportingThread = None - self._started = False - - def start(self, container: "NeptuneObject"): - self._thread = self.ReportingThread(self._period, container) - self._thread.start() - self._started = True - - def stop(self): - if not self._started: - return - self._thread.interrupt() - - def pause(self): - self._thread.pause() - - def resume(self): - self._thread.resume() - - def join(self, seconds: Optional[float] = None): - if not self._started: - return - self._thread.join(seconds) - - class ReportingThread(Daemon): - def __init__(self, period: float, container: "NeptuneObject"): - super().__init__(sleep_time=period, name="NeptunePing") - self._container = container - - @Daemon.ConnectionRetryWrapper( - kill_message=( - "Killing Neptune ping thread. Your run's status will not be updated and" - " the run will be shown as inactive." - ) - ) - def work(self) -> None: - self._container.ping() diff --git a/src/neptune/internal/utils/process_killer.py b/src/neptune/internal/utils/process_killer.py deleted file mode 100644 index be5b15907..000000000 --- a/src/neptune/internal/utils/process_killer.py +++ /dev/null @@ -1,72 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["kill_me"] - -import os -import signal - -from neptune.envs import NEPTUNE_SUBPROCESS_KILL_TIMEOUT - -try: - import psutil - - PSUTIL_INSTALLED = True -except ImportError: - PSUTIL_INSTALLED = False - - -KILL_TIMEOUT = int(os.getenv(NEPTUNE_SUBPROCESS_KILL_TIMEOUT, "5")) - - -def kill_me(): - if PSUTIL_INSTALLED: - process = psutil.Process(os.getpid()) - try: - children = _get_process_children(process) - except psutil.NoSuchProcess: - children = [] - - for child_proc in children: - _terminate(child_proc) - _, alive = psutil.wait_procs(children, timeout=KILL_TIMEOUT) - for child_proc in alive: - _kill(child_proc) - # finish with terminating self - _terminate(process) - else: - os.kill(os.getpid(), signal.SIGINT) - - -def _terminate(process): - try: - process.terminate() - except psutil.NoSuchProcess: - pass - - -def _kill(process): - try: - if process.is_running(): - process.kill() - except psutil.NoSuchProcess: - pass - - -def _get_process_children(process): - try: - return process.children(recursive=True) - except psutil.NoSuchProcess: - return [] diff --git a/src/neptune/internal/utils/requirement_check.py b/src/neptune/internal/utils/requirement_check.py deleted file mode 100644 index a1bacadde..000000000 --- a/src/neptune/internal/utils/requirement_check.py +++ /dev/null @@ -1,37 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "require_installed", - "is_installed", -] - -from functools import lru_cache -from importlib.util import find_spec -from typing import Optional - -from neptune.exceptions import NeptuneMissingRequirementException - - -@lru_cache(maxsize=32) -def is_installed(requirement_name: str) -> bool: - return find_spec(requirement_name) is not None - - -def require_installed(requirement_name: str, *, suggestion: Optional[str] = None) -> None: - if is_installed(requirement_name): - return - - raise NeptuneMissingRequirementException(requirement_name, suggestion) diff --git a/src/neptune/internal/utils/run_state.py b/src/neptune/internal/utils/run_state.py deleted file mode 100644 index f4768d974..000000000 --- a/src/neptune/internal/utils/run_state.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["RunState"] - -import enum - -from neptune.internal.exceptions import NeptuneException - - -class RunState(enum.Enum): - active = "Active" - inactive = "Inactive" - - _api_active = "running" - _api_inactive = "idle" - - @classmethod - def from_string(cls, value: str) -> "RunState": - try: - return cls(value.capitalize()) - except ValueError as e: - raise NeptuneException(f"Can't map RunState to API: {value}") from e - - @staticmethod - def from_api(value: str) -> "RunState": - if value == RunState._api_active.value: - return RunState.active - elif value == RunState._api_inactive.value: - return RunState.inactive - else: - raise NeptuneException(f"Unknown RunState: {value}") - - def to_api(self) -> str: - if self is RunState.active: - return self._api_active.value - if self is RunState.inactive: - return self._api_inactive.value diff --git a/src/neptune/internal/utils/runningmode.py b/src/neptune/internal/utils/runningmode.py deleted file mode 100644 index 0359a06fc..000000000 --- a/src/neptune/internal/utils/runningmode.py +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["in_interactive", "in_notebook"] - -import sys - - -def in_interactive() -> bool: - """Based on: https://stackoverflow.com/a/2356427/1565454""" - return hasattr(sys, "ps1") - - -def in_notebook() -> bool: - """Based on: https://stackoverflow.com/a/22424821/1565454""" - try: - from IPython import get_ipython - - ipy = get_ipython() - return ( - ipy is not None and hasattr(ipy, "config") and isinstance(ipy.config, dict) and "IPKernelApp" in ipy.config - ) - except ImportError: - return False diff --git a/src/neptune/internal/utils/s3.py b/src/neptune/internal/utils/s3.py deleted file mode 100644 index 02293fe36..000000000 --- a/src/neptune/internal/utils/s3.py +++ /dev/null @@ -1,39 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["get_boto_s3_client"] - -import os - -from neptune.envs import S3_ENDPOINT_URL - - -def get_boto_s3_client(): - """ - User might want to use other than `AWS` `S3` providers, so we should be able to override `endpoint_url`. - Unfortunately `boto3` doesn't support this parameter in configuration, so we'll have to create our env variable. - boto3 supported config envs: - * https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#using-environment-variables - boto3 `endpoint_url` support PR: - * https://github.com/boto/boto3/pull/2746 - """ - endpoint_url = os.getenv(S3_ENDPOINT_URL) - - import boto3 - - return boto3.resource( - service_name="s3", - endpoint_url=endpoint_url, - ) diff --git a/src/neptune/internal/utils/source_code.py b/src/neptune/internal/utils/source_code.py deleted file mode 100644 index 5c03c2352..000000000 --- a/src/neptune/internal/utils/source_code.py +++ /dev/null @@ -1,63 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["upload_source_code"] - -import os -from typing import ( - TYPE_CHECKING, - List, - Optional, -) - -from neptune.attributes import constants as attr_consts -from neptune.internal.storage import normalize_file_name -from neptune.internal.utils import ( - does_paths_share_common_drive, - get_absolute_paths, - get_common_root, -) -from neptune.internal.utils.utils import is_ipython -from neptune.vendor.lib_programname import ( - empty_path, - get_path_executed_script, -) - -if TYPE_CHECKING: - from neptune import Run - - -def upload_source_code(source_files: Optional[List[str]], run: "Run") -> None: - entrypoint_filepath = get_path_executed_script() - - if not is_ipython() and entrypoint_filepath != empty_path and os.path.isfile(entrypoint_filepath): - if source_files is None: - entrypoint = os.path.basename(entrypoint_filepath) - source_files = str(entrypoint_filepath) - elif not source_files: - entrypoint = os.path.basename(entrypoint_filepath) - else: - common_root = get_common_root(get_absolute_paths(source_files)) - entrypoint_filepath = os.path.abspath(entrypoint_filepath) - - if common_root is not None and does_paths_share_common_drive([common_root, entrypoint_filepath]): - entrypoint_filepath = normalize_file_name(os.path.relpath(path=entrypoint_filepath, start=common_root)) - - entrypoint = normalize_file_name(entrypoint_filepath) - - run[attr_consts.SOURCE_CODE_ENTRYPOINT_ATTRIBUTE_PATH] = entrypoint - - if source_files is not None: - run[attr_consts.SOURCE_CODE_FILES_ATTRIBUTE_PATH].upload_files(source_files) diff --git a/src/neptune/internal/utils/traceback_job.py b/src/neptune/internal/utils/traceback_job.py deleted file mode 100644 index 217b6f2bf..000000000 --- a/src/neptune/internal/utils/traceback_job.py +++ /dev/null @@ -1,66 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["TracebackJob"] - -import uuid -from typing import ( - TYPE_CHECKING, - List, - Optional, -) - -from neptune.attributes.constants import SYSTEM_FAILED_ATTRIBUTE_PATH -from neptune.internal.background_job import BackgroundJob -from neptune.internal.utils.logger import get_logger -from neptune.internal.utils.uncaught_exception_handler import instance as traceback_handler - -if TYPE_CHECKING: - from neptune.objects import NeptuneObject - -_logger = get_logger() - - -class TracebackJob(BackgroundJob): - def __init__(self, path: str, fail_on_exception: bool = True): - self._uuid = uuid.uuid4() - self._started = False - self._path = path - self._fail_on_exception = fail_on_exception - - def start(self, container: "NeptuneObject"): - if not self._started: - path = self._path - fail_on_exception = self._fail_on_exception - - def log_traceback(stacktrace_lines: List[str]): - container[path].log(stacktrace_lines) - if fail_on_exception: - container[SYSTEM_FAILED_ATTRIBUTE_PATH] = True - - traceback_handler.register(self._uuid, log_traceback) - self._started = True - - def stop(self): - traceback_handler.unregister(self._uuid) - - def join(self, seconds: Optional[float] = None): - pass - - def pause(self): - pass - - def resume(self): - pass diff --git a/src/neptune/internal/utils/uncaught_exception_handler.py b/src/neptune/internal/utils/uncaught_exception_handler.py deleted file mode 100644 index 98a6ca3da..000000000 --- a/src/neptune/internal/utils/uncaught_exception_handler.py +++ /dev/null @@ -1,79 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["instance"] - -import sys -import threading -import traceback -import uuid -from platform import node as get_hostname -from typing import ( - TYPE_CHECKING, - Callable, - List, -) - -from neptune.internal.utils.logger import get_logger - -if TYPE_CHECKING: - pass - -_logger = get_logger() - - -class UncaughtExceptionHandler: - def __init__(self): - self._previous_uncaught_exception_handler = None - self._handlers = dict() - self._lock = threading.Lock() - - def activate(self): - with self._lock: - this = self - - def exception_handler(exc_type, exc_val, exc_tb): - header_lines = [ - f"An uncaught exception occurred while run was active on worker {get_hostname()}.", - "Marking run as failed", - "Traceback:", - ] - - traceback_lines = header_lines + traceback.format_tb(exc_tb) + str(exc_val).split("\n") - for _, handler in self._handlers.items(): - handler(traceback_lines) - - this._previous_uncaught_exception_handler(exc_type, exc_val, exc_tb) - - if self._previous_uncaught_exception_handler is None: - self._previous_uncaught_exception_handler = sys.excepthook - sys.excepthook = exception_handler - - def deactivate(self): - with self._lock: - sys.excepthook = self._previous_uncaught_exception_handler - self._previous_uncaught_exception_handler = None - - def register(self, uid: uuid.UUID, handler: Callable[[List[str]], None]): - with self._lock: - self._handlers[uid] = handler - - def unregister(self, uid: uuid.UUID): - with self._lock: - if uid in self._handlers: - del self._handlers[uid] - - -instance = UncaughtExceptionHandler() diff --git a/src/neptune/internal/utils/utils.py b/src/neptune/internal/utils/utils.py deleted file mode 100644 index 38d3515fc..000000000 --- a/src/neptune/internal/utils/utils.py +++ /dev/null @@ -1,270 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import functools -import glob as globlib -import math -import os -import re -import ssl -import sys - -import numpy as np -import pandas as pd - -from neptune.internal import envs -from neptune.internal.exceptions import ( - FileNotFound, - InvalidNotebookPath, - NeptuneIncorrectProjectQualifiedNameException, - NeptuneMissingProjectQualifiedNameException, - NotADirectory, - NotAFile, -) -from neptune.internal.utils.git_info import GitInfo -from neptune.internal.utils.logger import get_logger -from neptune.internal.utils.patterns import PROJECT_QUALIFIED_NAME_PATTERN - -_logger = get_logger() - -IS_WINDOWS = sys.platform == "win32" -IS_MACOS = sys.platform == "darwin" - - -def reset_internal_ssl_state(): - """ - OpenSSL's internal random number generator does not properly handle forked processes. - Applications must change the PRNG state of the parent process if they use any SSL feature with os.fork(). - Any successful call of RAND_add(), RAND_bytes() or RAND_pseudo_bytes() is sufficient. - https://docs.python.org/3/library/ssl.html#multi-processing - """ - ssl.RAND_bytes(100) - - -def map_values(f_value, dictionary): - return dict((k, f_value(v)) for k, v in dictionary.items()) - - -def map_keys(f_key, dictionary): - return dict((f_key(k), v) for k, v in dictionary.items()) - - -def as_list(value): - if value is None or isinstance(value, list): - return value - else: - return [value] - - -def validate_notebook_path(path): - if not path.endswith(".ipynb"): - raise InvalidNotebookPath(path) - - if not os.path.exists(path): - raise FileNotFound(path) - - if not os.path.isfile(path): - raise NotAFile(path) - - -def assure_directory_exists(destination_dir): - """Checks if `destination_dir` DIRECTORY exists, or creates one""" - if not destination_dir: - destination_dir = os.getcwd() - - if not os.path.exists(destination_dir): - os.makedirs(destination_dir) - elif not os.path.isdir(destination_dir): - raise NotADirectory(destination_dir) - - return destination_dir - - -def align_channels_on_x(dataframe): - channel_dfs, common_x = _split_df_by_stems(dataframe) - return merge_dataframes([common_x] + channel_dfs, on="x", how="outer") - - -def get_channel_name_stems(columns): - return list(set([col[2:] for col in columns])) - - -def merge_dataframes(dataframes, on, how="outer"): - merged_df = functools.reduce(lambda left, right: pd.merge(left, right, on=on, how=how), dataframes) - return merged_df - - -def is_float(value): - try: - _ = float(value) - except ValueError: - return False - else: - return True - - -def is_nan_or_inf(value): - return math.isnan(value) or math.isinf(value) - - -def is_notebook(): - try: - get_ipython # noqa: F821 - return True - except Exception: - return False - - -def _split_df_by_stems(df): - channel_dfs, x_vals = [], [] - for stem in get_channel_name_stems(df.columns): - channel_df = df[["x_{}".format(stem), "y_{}".format(stem)]] - channel_df.columns = ["x", stem] - channel_df = channel_df.dropna() - channel_dfs.append(channel_df) - x_vals.extend(channel_df["x"].tolist()) - common_x = pd.DataFrame({"x": np.unique(x_vals)}, dtype=float) - return channel_dfs, common_x - - -def discover_git_repo_location(): - import __main__ - - if hasattr(__main__, "__file__"): - return os.path.dirname(os.path.abspath(__main__.__file__)) - return None - - -def update_session_proxies(session, proxies): - if proxies is not None: - try: - session.proxies.update(proxies) - except (TypeError, ValueError): - raise ValueError("Wrong proxies format: {}".format(proxies)) - - -def get_git_info(repo_path=None): - """Retrieve information about git repository. - - If the attempt fails, ``None`` will be returned. - - Args: - repo_path (:obj:`str`, optional, default is ``None``): - - | Path to the repository from which extract information about git. - | If ``None`` is passed, calling ``get_git_info`` is equivalent to calling - ``git.Repo(search_parent_directories=True)``. - Check `GitPython `_ - docs for more information. - - Returns: - :class:`~neptune.git_info.GitInfo` - An object representing information about git repository. - - Examples: - - .. code:: python3 - - # Get git info from the current directory - git_info = get_git_info('.') - - """ - try: - import git - - repo = git.Repo(repo_path, search_parent_directories=True) - - commit = repo.head.commit - - active_branch = "" - - try: - active_branch = repo.active_branch.name - except TypeError as e: - if str(e.args[0]).startswith("HEAD is a detached symbolic reference as it points to"): - active_branch = "Detached HEAD" - - remote_urls = [remote.url for remote in repo.remotes] - - return GitInfo( - commit_id=commit.hexsha, - message=commit.message, - author_name=commit.author.name, - author_email=commit.author.email, - commit_date=commit.committed_datetime, - repository_dirty=repo.is_dirty(index=False, untracked_files=True), - active_branch=active_branch, - remote_urls=remote_urls, - ) - except: # noqa: E722 - return None - - -def file_contains(filename, text): - with open(filename) as f: - for line in f: - if text in line: - return True - return False - - -def in_docker(): - cgroup_file = "/proc/self/cgroup" - return os.path.exists("./dockerenv") or (os.path.exists(cgroup_file) and file_contains(cgroup_file, text="docker")) - - -def is_ipython(): - try: - import IPython - - ipython = IPython.core.getipython.get_ipython() - return ipython is not None - except ImportError: - return False - - -def glob(pathname): - if sys.version_info.major < 3 or (sys.version_info.major == 3 and sys.version_info.minor < 5): - return globlib.glob(pathname) - else: - return globlib.glob(pathname, recursive=True) - - -def assure_project_qualified_name(project_qualified_name): - project_qualified_name = project_qualified_name or os.getenv(envs.PROJECT_ENV_NAME) - - if not project_qualified_name: - raise NeptuneMissingProjectQualifiedNameException() - if not re.match(PROJECT_QUALIFIED_NAME_PATTERN, project_qualified_name): - raise NeptuneIncorrectProjectQualifiedNameException(project_qualified_name) - - return project_qualified_name - - -class NoopObject(object): - def __getattr__(self, name): - return self - - def __getitem__(self, key): - return self - - def __call__(self, *args, **kwargs): - return self - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - pass diff --git a/src/neptune/internal/value_to_attribute_visitor.py b/src/neptune/internal/value_to_attribute_visitor.py deleted file mode 100644 index 236469128..000000000 --- a/src/neptune/internal/value_to_attribute_visitor.py +++ /dev/null @@ -1,109 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ValueToAttributeVisitor"] - -from typing import ( - TYPE_CHECKING, - List, - Type, -) - -from neptune.attributes.atoms.artifact import Artifact as ArtifactAttr -from neptune.attributes.atoms.boolean import Boolean as BooleanAttr -from neptune.attributes.atoms.datetime import Datetime as DatetimeAttr -from neptune.attributes.atoms.file import File as FileAttr -from neptune.attributes.atoms.float import Float as FloatAttr -from neptune.attributes.atoms.integer import Integer as IntegerAttr -from neptune.attributes.atoms.string import String as StringAttr -from neptune.attributes.attribute import Attribute -from neptune.attributes.file_set import FileSet as FileSetAttr -from neptune.attributes.namespace import Namespace as NamespaceAttr -from neptune.attributes.series.file_series import FileSeries as ImageSeriesAttr -from neptune.attributes.series.float_series import FloatSeries as FloatSeriesAttr -from neptune.attributes.series.string_series import StringSeries as StringSeriesAttr -from neptune.attributes.sets.string_set import StringSet as StringSetAttr -from neptune.exceptions import OperationNotSupported -from neptune.types import ( - Boolean, - Integer, -) -from neptune.types.atoms import GitRef -from neptune.types.atoms.artifact import Artifact -from neptune.types.atoms.datetime import Datetime -from neptune.types.atoms.file import File -from neptune.types.atoms.float import Float -from neptune.types.atoms.string import String -from neptune.types.file_set import FileSet -from neptune.types.namespace import Namespace -from neptune.types.series.file_series import FileSeries -from neptune.types.series.float_series import FloatSeries -from neptune.types.series.string_series import StringSeries -from neptune.types.sets.string_set import StringSet -from neptune.types.value_visitor import ValueVisitor - -if TYPE_CHECKING: - from neptune.objects import NeptuneObject - - -class ValueToAttributeVisitor(ValueVisitor[Attribute]): - def __init__(self, container: "NeptuneObject", path: List[str]): - self._container = container - self._path = path - - def visit_float(self, _: Float) -> Attribute: - return FloatAttr(self._container, self._path) - - def visit_integer(self, _: Integer) -> Attribute: - return IntegerAttr(self._container, self._path) - - def visit_boolean(self, _: Boolean) -> Attribute: - return BooleanAttr(self._container, self._path) - - def visit_string(self, _: String) -> Attribute: - return StringAttr(self._container, self._path) - - def visit_datetime(self, _: Datetime) -> Attribute: - return DatetimeAttr(self._container, self._path) - - def visit_artifact(self, _: Artifact) -> Attribute: - return ArtifactAttr(self._container, self._path) - - def visit_file(self, _: File) -> Attribute: - return FileAttr(self._container, self._path) - - def visit_file_set(self, _: FileSet) -> Attribute: - return FileSetAttr(self._container, self._path) - - def visit_float_series(self, _: FloatSeries) -> Attribute: - return FloatSeriesAttr(self._container, self._path) - - def visit_string_series(self, _: StringSeries) -> Attribute: - return StringSeriesAttr(self._container, self._path) - - def visit_image_series(self, _: FileSeries) -> Attribute: - return ImageSeriesAttr(self._container, self._path) - - def visit_string_set(self, _: StringSet) -> Attribute: - return StringSetAttr(self._container, self._path) - - def visit_git_ref(self, _: GitRef) -> Attribute: - raise OperationNotSupported("Cannot create custom attribute of type GitRef") - - def visit_namespace(self, _: Namespace) -> Attribute: - return NamespaceAttr(self._container, self._path) - - def copy_value(self, source_type: Type[Attribute], source_path: List[str]) -> Attribute: - return source_type(self._container, self._path) diff --git a/src/neptune/internal/warnings.py b/src/neptune/internal/warnings.py deleted file mode 100644 index 4d7712a89..000000000 --- a/src/neptune/internal/warnings.py +++ /dev/null @@ -1,108 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "warn_once", - "warn_about_unsupported_type", - "NeptuneDeprecationWarning", - "NeptuneWarning", - "NeptuneUnsupportedType", - "NeptuneUnsupportedValue", -] - -import os -import traceback -import warnings - -import neptune -from neptune.internal.utils.logger import NEPTUNE_LOGGER_NAME -from neptune.internal.utils.runningmode import in_interactive - -DEFAULT_FORMAT = "[%(name)s] [warning] %(filename)s:%(lineno)d: %(category)s: %(message)s\n" -INTERACTIVE_FORMAT = "[%(name)s] [warning] %(category)s: %(message)s\n" - - -class NeptuneDeprecationWarning(DeprecationWarning): - pass - - -class NeptuneUnsupportedValue(Warning): - pass - - -class NeptuneWarning(Warning): - pass - - -class NeptuneUnsupportedType(Warning): - pass - - -warnings.simplefilter("always", category=NeptuneDeprecationWarning) - -MAX_WARNED_ONCE_CAPACITY = 1_000 -warned_once = set() -path_to_root_module = os.path.dirname(os.path.realpath(neptune.__file__)) - - -def get_user_code_stack_level(): - call_stack = traceback.extract_stack() - for level, stack_frame in enumerate(reversed(call_stack)): - if path_to_root_module not in stack_frame.filename: - return level - return 2 - - -def format_message(message, category, filename, lineno, line=None) -> str: - variables = { - "message": message, - "category": category.__name__, - "filename": filename, - "lineno": lineno, - "name": NEPTUNE_LOGGER_NAME, - } - - message_format = INTERACTIVE_FORMAT if in_interactive() else DEFAULT_FORMAT - - return message_format % variables - - -def warn_once(message: str, *, exception: type(Exception) = None): - if len(warned_once) < MAX_WARNED_ONCE_CAPACITY: - if exception is None: - exception = NeptuneDeprecationWarning - - message_hash = hash(message) - - if message_hash not in warned_once: - old_formatting = warnings.formatwarning - warnings.formatwarning = format_message - warnings.warn( - message=message, - category=exception, - stacklevel=get_user_code_stack_level(), - ) - warnings.formatwarning = old_formatting - warned_once.add(message_hash) - - -def warn_about_unsupported_type(type_str: str): - warn_once( - message=f"""You're attempting to log a type that is not directly supported by Neptune ({type_str}). - Convert the value to a supported type, such as a string or float, or use stringify_unsupported(obj) - for dictionaries or collections that contain unsupported values. - For more, see https://docs.neptune.ai/help/value_of_unsupported_type""", - exception=NeptuneUnsupportedType, - ) diff --git a/src/neptune/internal/websockets/__init__.py b/src/neptune/internal/websockets/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/src/neptune/internal/websockets/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/internal/websockets/reconnecting_websocket.py b/src/neptune/internal/websockets/reconnecting_websocket.py deleted file mode 100644 index 0079cbd3e..000000000 --- a/src/neptune/internal/websockets/reconnecting_websocket.py +++ /dev/null @@ -1,114 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import random - -from websocket import ( - WebSocketConnectionClosedException, - WebSocketTimeoutException, -) - -from neptune.internal.websockets.websocket_client_adapter import ( - WebsocketClientAdapter, - WebsocketNotConnectedException, -) - - -class ReconnectingWebsocket(object): - def __init__(self, url, oauth2_session, shutdown_event, proxies=None): - self.url = url - self.client = WebsocketClientAdapter() - self._shutdown_event = shutdown_event - self._oauth2_session = oauth2_session - self._reconnect_counter = ReconnectCounter() - self._token = oauth2_session.token - self._proxies = proxies - - def shutdown(self): - self._shutdown_event.set() - self.client.close() - self.client.abort() - self.client.shutdown() - - def recv(self): - if not self.client.connected: - self._try_to_establish_connection() - while self._is_active(): - try: - data = self.client.recv() - self._on_successful_connect() - return data - except WebSocketTimeoutException: - raise - except WebSocketConnectionClosedException: - if self._is_active(): - self._handle_lost_connection() - else: - raise - except WebsocketNotConnectedException: - if self._is_active(): - self._handle_lost_connection() - except Exception: - if self._is_active(): - self._handle_lost_connection() - - def _is_active(self): - return not self._shutdown_event.is_set() - - def _on_successful_connect(self): - self._reconnect_counter.clear() - - def _try_to_establish_connection(self): - try: - self._request_token_refresh() - if self.client.connected: - self.client.shutdown() - self.client.connect(url=self.url, token=self._token, proxies=self._proxies) - except Exception: - self._shutdown_event.wait(self._reconnect_counter.calculate_delay()) - - def _handle_lost_connection(self): - self._reconnect_counter.increment() - self._try_to_establish_connection() - - def _request_token_refresh(self): - self._token = self._oauth2_session.refresh_token(token_url=self._oauth2_session.auto_refresh_url) - - -class ReconnectCounter(object): - MAX_RETRY_DELAY = 128 - - def __init__(self): - self.retries = 0 - - def clear(self): - self.retries = 0 - - def increment(self): - self.retries += 1 - - def calculate_delay(self): - return self._compute_delay(self.retries, self.MAX_RETRY_DELAY) - - @classmethod - def _compute_delay(cls, attempt, max_delay): - delay = cls._full_jitter_delay(attempt, max_delay) - return delay - - @classmethod - def _full_jitter_delay(cls, attempt, cap): - exp = min(2 ** (attempt - 1), cap) - return random.uniform(0, exp) diff --git a/src/neptune/internal/websockets/websocket_client_adapter.py b/src/neptune/internal/websockets/websocket_client_adapter.py deleted file mode 100644 index d6dd0a5d0..000000000 --- a/src/neptune/internal/websockets/websocket_client_adapter.py +++ /dev/null @@ -1,89 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import os -import ssl -import urllib.parse - -from websocket import ( - ABNF, - create_connection, -) - - -class WebsocketClientAdapter(object): - def __init__(self): - self._ws_client = None - - def connect(self, url, token, proxies=None): - sslopt = None - if os.getenv("NEPTUNE_ALLOW_SELF_SIGNED_CERTIFICATE"): - sslopt = {"cert_reqs": ssl.CERT_NONE} - - proto = url.split(":")[0].replace("ws", "http") - proxy = proxies[proto] if proxies and proto in proxies else os.getenv("{}_PROXY".format(proto.upper())) - - if proxy: - proxy_split = urllib.parse.urlparse(proxy).netloc.split(":") - proxy_host = proxy_split[0] - proxy_port = proxy_split[1] if len(proxy_split) > 1 else "80" if proto == "http" else "443" - else: - proxy_host = None - proxy_port = None - - self._ws_client = create_connection( - url, - header=self._auth_header(token), - sslopt=sslopt, - http_proxy_host=proxy_host, - http_proxy_port=proxy_port, - ) - - def recv(self): - if self._ws_client is None: - raise WebsocketNotConnectedException() - - opcode, data = None, None - - while opcode != ABNF.OPCODE_TEXT: - opcode, data = self._ws_client.recv_data() - - return data.decode("utf-8") - - @property - def connected(self): - return self._ws_client and self._ws_client.connected - - def close(self): - if self._ws_client: - return self._ws_client.close() - - def abort(self): - if self._ws_client: - return self._ws_client.abort() - - def shutdown(self): - if self._ws_client: - return self._ws_client.shutdown() - - @classmethod - def _auth_header(cls, token): - return ["Authorization: Bearer " + token["access_token"]] - - -class WebsocketNotConnectedException(Exception): - def __init__(self): - super(WebsocketNotConnectedException, self).__init__("Websocket client is not connected!") diff --git a/src/neptune/internal/websockets/websocket_signals_background_job.py b/src/neptune/internal/websockets/websocket_signals_background_job.py deleted file mode 100644 index 998637616..000000000 --- a/src/neptune/internal/websockets/websocket_signals_background_job.py +++ /dev/null @@ -1,138 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["WebsocketSignalsBackgroundJob"] - -import json -import threading -from json.decoder import JSONDecodeError -from typing import ( - TYPE_CHECKING, - Optional, -) - -from websocket import WebSocketConnectionClosedException - -from neptune.attributes.constants import ( - SIGNAL_TYPE_ABORT, - SIGNAL_TYPE_STOP, - SYSTEM_FAILED_ATTRIBUTE_PATH, -) -from neptune.internal.background_job import BackgroundJob -from neptune.internal.threading.daemon import Daemon -from neptune.internal.utils import process_killer -from neptune.internal.utils.logger import get_logger -from neptune.internal.websockets.reconnecting_websocket import ReconnectingWebsocket -from neptune.internal.websockets.websockets_factory import WebsocketsFactory - -if TYPE_CHECKING: - from neptune.objects import NeptuneObject - -logger = get_logger() - - -class WebsocketSignalsBackgroundJob(BackgroundJob): - def __init__(self, ws_factory: WebsocketsFactory): - self._ws_factory = ws_factory - self._thread: "Optional[WebsocketSignalsBackgroundJob._ListenerThread]" = None - self._started = False - - def start(self, container: "NeptuneObject"): - self._thread = self._ListenerThread(container, self._ws_factory.create()) - self._thread.start() - self._started = True - - def stop(self): - if not self._started: - return - self._thread.interrupt() - self._thread.shutdown_ws_client() - - def pause(self): - pass - - def resume(self): - pass - - def join(self, seconds: Optional[float] = None): - if not self._started or threading.get_ident() == self._thread.ident: - return - self._thread.join(seconds) - # Just in case. There is possible race condition when connection can be reestablished after being shutdown. - self._thread.shutdown_ws_client() - - class _ListenerThread(Daemon): - def __init__(self, container: "NeptuneObject", ws_client: ReconnectingWebsocket): - super().__init__(sleep_time=0, name="NeptuneWebhooks") - self._container = container - self._ws_client = ws_client - - def work(self) -> None: - try: - raw_message = self._ws_client.recv() - if raw_message is None or self._is_heartbeat(raw_message): - return - else: - self._handler_message(raw_message) - except WebSocketConnectionClosedException: - pass - - def _handler_message(self, msg: str): - try: - json_msg = json.loads(msg) - msg_type = json_msg.get("type") - msg_body = json_msg.get("body") - if not msg_type: - logger.error("Malformed websocket signal: missing type") - return - if not isinstance(msg_type, str): - logger.error("Malformed websocket signal: type is %s, should be str", type(msg_type)) - return - if msg_type.lower() == SIGNAL_TYPE_STOP: - self._handle_stop(msg_body) - elif msg_type.lower() == SIGNAL_TYPE_ABORT: - self._handle_abort(msg_body) - except JSONDecodeError as ex: - logger.error("Malformed websocket signal: %s, message: %s", ex, msg) - - def _handle_stop(self, msg_body): - msg_body = msg_body or dict() - if not isinstance(msg_body, dict): - logger.error("Malformed websocket signal: body is %s, should be dict", type(msg_body)) - return - run_id = self._container["sys/id"].fetch() - logger.error("Run %s received stop signal. Exiting", run_id) - seconds = msg_body.get("seconds") - self._container.stop(seconds=seconds) - process_killer.kill_me() - - def _handle_abort(self, msg_body): - msg_body = msg_body or dict() - if not isinstance(msg_body, dict): - logger.error("Malformed websocket signal: body is %s, should be dict", type(msg_body)) - return - run_id = self._container["sys/id"].fetch() - logger.error("Run %s received abort signal. Exiting", run_id) - seconds = msg_body.get("seconds") - self._container[SYSTEM_FAILED_ATTRIBUTE_PATH] = True - self._container.stop(seconds=seconds) - process_killer.kill_me() - - def shutdown_ws_client(self): - self._ws_client.shutdown() - - @staticmethod - def _is_heartbeat(message: str): - return message.strip() == "" diff --git a/src/neptune/internal/websockets/websockets_factory.py b/src/neptune/internal/websockets/websockets_factory.py deleted file mode 100644 index 384ea86d2..000000000 --- a/src/neptune/internal/websockets/websockets_factory.py +++ /dev/null @@ -1,38 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["WebsocketsFactory"] - -import threading -from typing import Optional - -from requests_oauthlib import OAuth2Session - -from neptune.internal.websockets.reconnecting_websocket import ReconnectingWebsocket - - -class WebsocketsFactory: - def __init__(self, url: str, session: OAuth2Session, proxies: Optional[dict] = None): - self._url = url - self._session = session - self._proxies = proxies - - def create(self): - return ReconnectingWebsocket( - url=self._url, - oauth2_session=self._session, - shutdown_event=threading.Event(), - proxies=self._proxies, - ) diff --git a/src/neptune/management/__init__.py b/src/neptune/management/__init__.py deleted file mode 100644 index 6a265bbee..000000000 --- a/src/neptune/management/__init__.py +++ /dev/null @@ -1,152 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""The management API lets you perform various neptune.ai administration actions. - -- Create and delete projects -- List projects you can access -- Add and remove project members -- List members of projects and workspaces - -Most actions can also be performed by service accounts. - -Functions: - get_project_list() - create_project() - delete_project() - get_project_member_list() - add_project_member() - remove_project_member() - get_workspace_member_list() - add_project_service_account() - remove_project_service_account() - get_project_service_account_list() - get_workspace_service_account_list() - trash_objects() - -Usage examples --------------- - -Import management API ->>> from neptune import management - -Getting projects in a workspace as a list: ->>> projects = management.get_project_list() - -Creating a new project: ->>> management.create_project( -... name="ml-team/classification", -... key="CLS", -... visibility="workspace", -... ) - -Deleting a project: ->>> management.delete_project(project="ml-team/classification") - -Get project members list as dictionary, with usernames as keys and roles as values: ->>> members = management.get_project_member_list(project="ml-team/classification") - -Assign a user to a project and specify a role: ->>> management.add_project_member( -... project="ml-team/classification", username="jackie", role="contributor" -... ) - -Remove a user from a project: ->>> management.remove_project_member(project="ml-team/classification", username="janus") - -Get workspace members list as dictionary, with usernames as keys and roles as values: ->>> management.get_workspace_member_list(workspace="ml-team") - -Assign service account to project: ->>> management.add_project_service_account( -... project="ml-team/classification", -... service_account_name="cicd@ml-team", -... role="contributor", -... ) - -Remove service account from project: ->>> management.remove_project_service_account( -... project="ml-team/classification", service_account_name="cicd@ml-team" -... ) - -Get list of project service accounts as dictionary, with usernames as keys and roles as values: ->>> management.get_project_service_account_list(project="ml-team/classification") - -Get list of workspace service accounts as dictionary, with usernames as keys and roles as values: ->>> management.get_workspace_service_account_list(workspace="ml-team") - -Move one or more Neptune objects to the project trash: ->>> project_name = "ml-team/classification" ->>> # Connect to your project: -... project = neptune.init_project(project=project_name) ->>> # Fetch runs tagged as "trash": -... runs_to_trash_df = project.fetch_runs_table(tag="trash").to_pandas() ->>> # Turn run IDs into a list: -... runs_to_trash = runs_to_trash_df["sys/id"].tolist() ->>> # Move the runs to trash: -... management.trash_objects(project=project_name, ids=runs_to_trash) - -Get information about a workspace, including storage usage and limits: ->>> management.get_workspace_status(workspace="ml-team") - ---- - -See also the API reference in the docs: https://docs.neptune.ai/api/management -""" -from .internal.api import ( - WorkspaceMemberRole, - add_project_member, - add_project_service_account, - clear_trash, - create_project, - delete_objects_from_trash, - delete_project, - get_project_list, - get_project_member_list, - get_project_service_account_list, - get_workspace_member_list, - get_workspace_service_account_list, - get_workspace_status, - invite_to_workspace, - remove_project_member, - remove_project_service_account, - trash_objects, -) -from .internal.types import ( - MemberRole, - ProjectVisibility, -) - -__all__ = [ - "clear_trash", - "get_project_list", - "create_project", - "delete_project", - "delete_objects_from_trash", - "get_project_member_list", - "add_project_member", - "remove_project_member", - "get_workspace_member_list", - "invite_to_workspace", - "WorkspaceMemberRole", - "add_project_service_account", - "remove_project_service_account", - "get_project_service_account_list", - "get_workspace_service_account_list", - "get_workspace_status", - "trash_objects", - "MemberRole", - "ProjectVisibility", -] diff --git a/src/neptune/management/exceptions.py b/src/neptune/management/exceptions.py deleted file mode 100644 index 8cd58ae1b..000000000 --- a/src/neptune/management/exceptions.py +++ /dev/null @@ -1,212 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -REGISTERED_CODES = dict() - - -class ManagementOperationFailure(Exception): - code = -1 - description = "Unknown error" - - def __init__(self, **kwargs): - super().__init__() - self._properties: dict = kwargs or {} - - def __str__(self): - return f"{self.description.format(**self._properties)} (code: {self.code})" - - def __init_subclass__(cls): - previous = REGISTERED_CODES.get(cls.code) - assert previous is None, f"{cls} cannot have code {cls.code} already used by {previous}" - REGISTERED_CODES[cls.code] = cls - - @property - def details(self): - return { - "code": self.code, - "description": self.description.format(**self._properties), - } - - -class InvalidProjectName(ManagementOperationFailure): - code = 1 - description = 'Provided project name "{name}" could not be parsed.' - - -class MissingWorkspaceName(ManagementOperationFailure): - code = 2 - description = 'Cannot resolve project "{name}", you have to provide a workspace name.' - - -class ConflictingWorkspaceName(ManagementOperationFailure): - code = 3 - description = 'Project name "{name}" conflicts with provided workspace "{workspace}".' - - -class ProjectNotFound(ManagementOperationFailure): - code = 4 - description = 'Project "{name}" could not be found.' - - -class WorkspaceNotFound(ManagementOperationFailure): - code = 5 - description = 'Workspace "{workspace}" could not be found.' - - -class ProjectAlreadyExists(ManagementOperationFailure): - code = 6 - description = 'Project "{name}" already exists.' - - -class AccessRevokedOnDeletion(ManagementOperationFailure): - code = 7 - description = 'Not enough permissions to delete project "{name}".' - - -class AccessRevokedOnMemberRemoval(ManagementOperationFailure): - code = 8 - description = 'Not enough permissions to remove user "{user}" from project "{project}".' - - -class UserNotExistsOrWithoutAccess(ManagementOperationFailure): - code = 9 - description = ( - 'User "{user}" does not exist or has no access to project "{project}". ' - "If the project visibility is set to 'workspace', a user cannot be added or removed." - ) - - -class UserAlreadyHasAccess(ManagementOperationFailure): - code = 10 - description = 'User "{user}" already has access to the project "{project}". Their role is "{role}".' - - -class ProjectsLimitReached(ManagementOperationFailure): - code = 11 - description = "Project number limit reached." - - -class UnsupportedValue(ManagementOperationFailure): - code = 12 - description = "{enum} cannot have value {value}" - - -class ServiceAccountAlreadyHasAccess(ManagementOperationFailure): - code = 13 - description = ( - 'The service account "{service_account_name}" already has access to the project "{project}", ' - "either because it was already added or because of the project's visibility setting. " - 'The role of the service account is "{role}".' - ) - - -class AccessRevokedOnServiceAccountRemoval(ManagementOperationFailure): - code = 14 - description = ( - 'Not enough permissions to remove service account "{service_account_name}" from project "{project}". ' - "The account that performs the removal must be a project owner." - ) - - -class ServiceAccountNotExistsOrWithoutAccess(ManagementOperationFailure): - code = 15 - description = ( - 'Service account "{service_account_name}" does not exist or did not have access to project "{project}" ' - 'in the first place. If the project visibility is set to "workspace", you cannot revoke access for ' - "invididual workspace members." - ) - - -class ServiceAccountNotFound(ManagementOperationFailure): - code = 16 - description = 'Service account "{service_account_name}" could not be found in workspace "{workspace}"' - - -class ProjectKeyCollision(ManagementOperationFailure): - code = 17 - description = 'Project with key "{key}" already exists.' - - -class ProjectNameCollision(ManagementOperationFailure): - code = 18 - description = 'Project with name "{name}" already exists.' - - -class ProjectKeyInvalid(ManagementOperationFailure): - code = 19 - description = 'Invalid project key "{key}": {reason}' - - -class ProjectNameInvalid(ManagementOperationFailure): - code = 20 - description = 'Invalid project name "{name}": {reason}' - - -class BadRequestException(ManagementOperationFailure): - code = 400 - description = "Your request has encountered the following validation errors: {validation_errors}" - - -class IncorrectIdentifierException(ManagementOperationFailure): - code = 21 - description = "Can not parse '{identifier}' as identifier." - - -class ObjectNotFound(ManagementOperationFailure): - code = 22 - description = "Object not found." - - -class WorkspaceOrUserNotFound(ManagementOperationFailure): - code = 23 - description = "Workspace '{workspace}' or user '{user}' could not be found." - - -class UserAlreadyInvited(ManagementOperationFailure): - code = 24 - description = "User '{user}' has already been invited to the workspace '{workspace}'." - - -class ProjectPrivacyRestrictedException(ManagementOperationFailure): - code = 25 - description = ( - "Cannot set {requested} visibility for project. {followup}This might be caused by workspace " - "settings or limited by your plan." - ) - - def __init__(self, **kwargs): - modified_kwargs = {"followup": ""} - allowed = kwargs.get("allowed") - if allowed and isinstance(allowed, list): - modified_kwargs["followup"] = "Allowed values are: {allowed}. ".format( - allowed=", ".join(['"' + option + '"' for option in allowed]) - ) - modified_kwargs.update(kwargs) - requested = modified_kwargs.get("requested") - if not requested: - modified_kwargs["requested"] = "the selected" - else: - modified_kwargs["requested"] = '"' + requested + '"' - super().__init__(**modified_kwargs) - - -class ActiveProjectsLimitReachedException(ManagementOperationFailure): - code = 26 - description = ( - "Limit of active projects reached. You can have up to {currentQuota} active projects simultaneously. " - "To create a new project, you need to either archive an active project or increase the quota of active " - "projects in the workspace." - ) diff --git a/src/neptune/management/internal/__init__.py b/src/neptune/management/internal/__init__.py deleted file mode 100644 index d71b3273e..000000000 --- a/src/neptune/management/internal/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/src/neptune/management/internal/api.py b/src/neptune/management/internal/api.py deleted file mode 100644 index 90bfdb54f..000000000 --- a/src/neptune/management/internal/api.py +++ /dev/null @@ -1,1099 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = [ - "clear_trash", - "get_project_list", - "create_project", - "delete_project", - "delete_objects_from_trash", - "get_project_member_list", - "add_project_member", - "remove_project_member", - "get_workspace_member_list", - "invite_to_workspace", - "WorkspaceMemberRole", - "add_project_service_account", - "remove_project_service_account", - "get_project_service_account_list", - "get_workspace_service_account_list", - "trash_objects", - "get_workspace_status", -] - -import os -from typing import ( - Dict, - Iterable, - List, - Optional, - Union, -) - -from bravado.exception import ( - HTTPBadRequest, - HTTPConflict, - HTTPForbidden, - HTTPNotFound, - HTTPUnprocessableEntity, -) - -from neptune.internal.backends.hosted_client import ( - DEFAULT_REQUEST_KWARGS, - create_backend_client, - create_http_client_with_auth, - create_leaderboard_client, -) -from neptune.internal.backends.swagger_client_wrapper import SwaggerClientWrapper -from neptune.internal.backends.utils import ( - parse_validation_errors, - ssl_verify, - with_api_exceptions_handler, -) -from neptune.internal.credentials import Credentials -from neptune.internal.envs import API_TOKEN_ENV_NAME -from neptune.internal.id_formats import QualifiedName -from neptune.internal.utils import ( - verify_collection_type, - verify_type, -) -from neptune.internal.utils.iteration import get_batches -from neptune.internal.utils.logger import get_logger -from neptune.management.exceptions import ( - AccessRevokedOnDeletion, - AccessRevokedOnMemberRemoval, - AccessRevokedOnServiceAccountRemoval, - BadRequestException, - ProjectAlreadyExists, - ProjectNotFound, - ServiceAccountAlreadyHasAccess, - ServiceAccountNotExistsOrWithoutAccess, - ServiceAccountNotFound, - UserAlreadyHasAccess, - UserAlreadyInvited, - UserNotExistsOrWithoutAccess, - WorkspaceNotFound, - WorkspaceOrUserNotFound, -) -from neptune.management.internal.dto import ( - ProjectMemberRoleDTO, - ProjectVisibilityDTO, - ServiceAccountDTO, - WorkspaceMemberRoleDTO, -) -from neptune.management.internal.types import ProjectVisibility -from neptune.management.internal.utils import ( - WorkspaceMemberRole, - extract_project_and_workspace, - normalize_project_name, -) - -logger = get_logger() -TRASH_BATCH_SIZE = 100 - - -def _get_token(api_token: Optional[str] = None) -> str: - return api_token or os.getenv(API_TOKEN_ENV_NAME) - - -def _get_http_client_and_config(api_token: Optional[str] = None): - credentials = Credentials.from_token(api_token=_get_token(api_token=api_token)) - http_client, client_config = create_http_client_with_auth( - credentials=credentials, ssl_verify=ssl_verify(), proxies={} - ) - return http_client, client_config - - -def _get_backend_client(api_token: Optional[str] = None) -> SwaggerClientWrapper: - http_client, client_config = _get_http_client_and_config(api_token) - return create_backend_client(client_config=client_config, http_client=http_client) - - -def _get_leaderboard_client(api_token: Optional[str] = None) -> SwaggerClientWrapper: - http_client, client_config = _get_http_client_and_config(api_token) - return create_leaderboard_client(client_config=client_config, http_client=http_client) - - -def get_project_list(*, api_token: Optional[str] = None) -> List[str]: - """Lists projects that the account has access to. - - Args: - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Returns: - List of project names in the form "workspace-name/project-name". - - Example: - >>> from neptune import management - >>> management.get_project_list() - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management - """ - verify_type("api_token", api_token, (str, type(None))) - backend_client = _get_backend_client(api_token=api_token) - params = { - "userRelation": "viewerOrHigher", - "sortBy": ["lastViewed"], - **DEFAULT_REQUEST_KWARGS, - } - projects = _get_projects(backend_client, params) - - return [normalize_project_name(name=project.name, workspace=project.organizationName) for project in projects] - - -@with_api_exceptions_handler -def _get_projects(backend_client, params) -> List: - return backend_client.api.listProjects(**params).response().result.entries - - -def create_project( - name: str, - *, - key: Optional[str] = None, - workspace: Optional[str] = None, - visibility: str = ProjectVisibility.PRIVATE, - description: Optional[str] = None, - api_token: Optional[str] = None, -) -> str: - """Creates a new project in a Neptune workspace. - - Args: - name: The name for the project in Neptune. Can contain letters and hyphens. For example, "classification". - If you leave out the workspace argument, include the workspace name here, - in the form "workspace-name/project-name". For example, "ml-team/classification". - key: Project identifier. Must contain 1-10 upper case letters or numbers (at least one letter). - For example, "CLS2". If you leave it out, Neptune generates a project key for you. - workspace: Name of your Neptune workspace. - If None, it will be parsed from the name argument. - visibility: Level of privacy for the project. Options: - - "pub": Public. Anyone on the internet can see it. - - "priv": Private. Only users specifically assigned to the project can access it. Requires a plan with - project-level access control. - - "workspace" (team workspaces only): Accessible to all workspace members. - If None, it will be set to "priv". - description: Project description. - If None, it will be left empty. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Returns: - The name of the new project you created. - - Example: - >>> from neptune import management - >>> management.create_project( - ... workspace="ml-team", - ... name="classification", - ... key="CLS", - ... visibility="pub", - ... ) - 'ml-team/classification' - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management - """ - verify_type("name", name, str) - verify_type("key", key, (str, type(None))) - verify_type("workspace", workspace, (str, type(None))) - verify_type("visibility", visibility, str) - verify_type("description", description, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - - backend_client = _get_backend_client(api_token=api_token) - workspace, name = extract_project_and_workspace(name=name, workspace=workspace) - project_qualified_name = f"{workspace}/{name}" - workspace_id = _get_workspace_id(backend_client, workspace) - - params = { - "projectToCreate": { - "name": name, - "description": description, - "projectKey": key, - "organizationId": workspace_id, - "visibility": ProjectVisibilityDTO.from_str(visibility).value, - }, - **DEFAULT_REQUEST_KWARGS, - } - - project_response = _create_project(backend_client, project_qualified_name, params) - - return normalize_project_name( - name=project_response.result.name, - workspace=project_response.result.organizationName, - ) - - -def _get_workspace_id(backend_client, workspace) -> str: - workspaces = _get_workspaces(backend_client) - workspace_name_to_id = {f"{f.name}": f.id for f in workspaces} - if workspace not in workspace_name_to_id: - raise WorkspaceNotFound(workspace=workspace) - return workspace_name_to_id[workspace] - - -@with_api_exceptions_handler -def _get_workspaces(backend_client): - return backend_client.api.listOrganizations(**DEFAULT_REQUEST_KWARGS).response().result - - -@with_api_exceptions_handler -def _create_project(backend_client, project_qualified_name: str, params: dict): - try: - return backend_client.api.createProject(**params).response() - except HTTPBadRequest as e: - validation_errors = parse_validation_errors(error=e) - if "ERR_NOT_UNIQUE" in validation_errors: - raise ProjectAlreadyExists(name=project_qualified_name) from e - raise BadRequestException(validation_errors=validation_errors) - - -@with_api_exceptions_handler -def delete_project(project: str, *, workspace: Optional[str] = None, api_token: Optional[str] = None): - """Deletes a project from a Neptune workspace. - - To delete projects, the user must be a workspace admin. - - Args: - project: The name of the project in Neptune in the form "workspace-name/project-name". - If you pass the workspace argument, the "project" argument should only contain "project-name" - instead of "workspace-name/project-name". - workspace: Name of your Neptune workspace. If you specify it, - change the format of the project argument to "project-name" instead of "workspace-name/project-name". - If None, it will be parsed from the project argument. - api_token: User's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Example: - >>> from neptune import management - >>> management.delete_project(project="ml-team/classification") - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management - """ - verify_type("project", project, str) - verify_type("workspace", workspace, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - - backend_client = _get_backend_client(api_token=api_token) - project_identifier = normalize_project_name(name=project, workspace=workspace) - - params = {"projectIdentifier": project_identifier, **DEFAULT_REQUEST_KWARGS} - - try: - backend_client.api.deleteProject(**params).response() - except HTTPNotFound as e: - raise ProjectNotFound(name=project_identifier) from e - except HTTPForbidden as e: - raise AccessRevokedOnDeletion(name=project_identifier) from e - - -@with_api_exceptions_handler -def add_project_member( - project: str, - username: str, - role: str, - *, - workspace: Optional[str] = None, - api_token: Optional[str] = None, -): - """Adds a member to a Neptune project. - - Only project owners can add members. - - Args: - project: The name of the project in Neptune in the form "workspace-name/project-name". - If you pass the workspace argument, the project argument should only contain "project-name" - instead of "workspace-name/project-name". - username: Name of the user to add to the project. - role: level of permissions the user should have in a project. Options: - - "viewer": Can only view project content and members. - - "contributor": Can view and edit project content. Can view members. - - "owner": Can view and edit project content and members. - For more information, see https://docs.neptune.ai/management/roles/ - workspace: Name of your Neptune workspace. If you specify it, - change the format of the project argument to "project-name" instead of "workspace-name/project-name". - If None, it will be parsed from the project argument. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Examples: - >>> from neptune import management - >>> management.add_project_member( - ... workspace="ml-team", - ... project="classification", - ... username="johnny", - ... role="contributor", - ... ) - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management - """ - verify_type("project", project, str) - verify_type("username", username, str) - verify_type("role", role, str) - verify_type("workspace", workspace, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - - backend_client = _get_backend_client(api_token=api_token) - project_identifier = normalize_project_name(name=project, workspace=workspace) - - params = { - "projectIdentifier": project_identifier, - "member": { - "userId": username, - "role": ProjectMemberRoleDTO.from_str(role).value, - }, - **DEFAULT_REQUEST_KWARGS, - } - - try: - backend_client.api.addProjectMember(**params).response() - except HTTPNotFound as e: - raise ProjectNotFound(name=project_identifier) from e - except HTTPConflict as e: - members = get_project_member_list(project=project, workspace=workspace, api_token=api_token) - user_role = members.get(username) - raise UserAlreadyHasAccess(user=username, project=project_identifier, role=user_role) from e - - -@with_api_exceptions_handler -def get_project_member_list( - project: str, *, workspace: Optional[str] = None, api_token: Optional[str] = None -) -> Dict[str, str]: - """Lists members of a Neptune project. - - Args: - project: The name of the project in Neptune in the form "workspace-name/project-name". - If you pass the workspace argument, the project argument should only contain "project-name" - instead of "workspace-name/project-name". - workspace: Name of your Neptune workspace. If you specify it, - change the format of the project argument to "project-name" instead of "workspace-name/project-name". - If None, it will be parsed from the project argument. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Returns: - Dictionary with usernames as keys and project member roles - ("owner", "contributor", "viewer") as values. - - Example: - >>> from neptune import management - >>> management.get_project_member_list(project="ml-team/classification") - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management - """ - verify_type("project", project, str) - verify_type("workspace", workspace, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - - backend_client = _get_backend_client(api_token=api_token) - project_identifier = normalize_project_name(name=project, workspace=workspace) - - params = {"projectIdentifier": project_identifier, **DEFAULT_REQUEST_KWARGS} - - try: - result = backend_client.api.listProjectMembers(**params).response().result - return {f"{m.registeredMemberInfo.username}": ProjectMemberRoleDTO.to_domain(m.role) for m in result} - except HTTPNotFound as e: - raise ProjectNotFound(name=project_identifier) from e - - -@with_api_exceptions_handler -def remove_project_member( - project: str, - username: str, - *, - workspace: Optional[str] = None, - api_token: Optional[str] = None, -): - """Removes member from a Neptune project. - - Only project owners can remove members. - - Args: - project: The name of the project in Neptune in the form "workspace-name/project-name". - If you pass the workspace argument, the project argument should only contain "project-name" - instead of "workspace-name/project-name". - username: Name of the user to remove from the project. - workspace: Name of your Neptune workspace. If you specify it, - change the format of the project argument to "project-name" instead of "workspace-name/project-name". - If None, it will be parsed from the project argument. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Example: - >>> from neptune import management - >>> management.remove_project_member( - ... project="ml-team/classification", - ... username="johnny", - ... ) - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management - """ - verify_type("project", project, str) - verify_type("username", username, str) - verify_type("workspace", workspace, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - - backend_client = _get_backend_client(api_token=api_token) - project_identifier = normalize_project_name(name=project, workspace=workspace) - - params = { - "projectIdentifier": project_identifier, - "userId": username, - **DEFAULT_REQUEST_KWARGS, - } - - try: - backend_client.api.deleteProjectMember(**params).response() - except HTTPNotFound as e: - raise ProjectNotFound(name=project_identifier) from e - except HTTPUnprocessableEntity as e: - raise UserNotExistsOrWithoutAccess(user=username, project=project_identifier) from e - except HTTPForbidden as e: - raise AccessRevokedOnMemberRemoval(user=username, project=project_identifier) from e - - -@with_api_exceptions_handler -def get_workspace_member_list(workspace: str, *, api_token: Optional[str] = None) -> Dict[str, str]: - """Lists members of a Neptune workspace. - - Args: - workspace: Name of the Neptune workspace. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Returns: - Dictionary with usernames as keys and workspace member roles ("admin", "member") as values. - - Example: - >>> from neptune import management - >>> management.get_workspace_member_list(workspace="ml-team") - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management - """ - verify_type("workspace", workspace, str) - verify_type("api_token", api_token, (str, type(None))) - - backend_client = _get_backend_client(api_token=api_token) - - params = {"organizationIdentifier": workspace, **DEFAULT_REQUEST_KWARGS} - - try: - result = backend_client.api.listOrganizationMembers(**params).response().result - return {f"{m.registeredMemberInfo.username}": WorkspaceMemberRoleDTO.to_domain(m.role) for m in result} - except HTTPNotFound as e: - raise WorkspaceNotFound(workspace=workspace) from e - - -@with_api_exceptions_handler -def _get_raw_workspace_service_account_list( - workspace_name: str, api_token: Optional[str] = None -) -> Dict[str, ServiceAccountDTO]: - verify_type("workspace_name", workspace_name, str) - verify_type("api_token", api_token, (str, type(None))) - - backend_client = _get_backend_client(api_token=api_token) - - params = { - "organizationIdentifier": workspace_name, - "deactivated": False, - **DEFAULT_REQUEST_KWARGS, - } - - try: - result = backend_client.api.listServiceAccounts(**params).response().result - return {f"{sa.displayName}": ServiceAccountDTO(name=sa.displayName, id=sa.id) for sa in result} - except HTTPNotFound as e: - raise WorkspaceNotFound(workspace=workspace_name) from e - - -@with_api_exceptions_handler -def get_workspace_service_account_list(workspace: str, *, api_token: Optional[str] = None) -> Dict[str, str]: - """Lists service accounts of a Neptune workspace. - - Args: - workspace: Name of the Neptune workspace. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - Returns: - Dictionary with account names as keys and workspace member roles as values. - Service accounts can only have the "member" role in workspaces. - - Example: - >>> from neptune import management - >>> management.get_workspace_service_account_list(workspace="ml-team") - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management - """ - service_accounts = _get_raw_workspace_service_account_list(workspace_name=workspace, api_token=api_token) - - return { - service_account_name: WorkspaceMemberRoleDTO.to_domain("member") - for service_account_name, _ in service_accounts.items() - } - - -@with_api_exceptions_handler -def invite_to_workspace( - *, - username: Optional[str] = None, - email: Optional[str] = None, - workspace: str, - api_token: Optional[str] = None, - role: Union[WorkspaceMemberRole, str] = WorkspaceMemberRole.MEMBER, - add_to_all_projects: bool = False, -) -> None: - """Creates an invitation to a Neptune workspace. - - Provide either the username or email of the person to invite. - - Args: - username: username of the user to invite. Note: If you provide this, leave out the email argument. - email: email of the user to invite. Note: If you provide this, leave out the username argument. - workspace: Name of your Neptune workspace. - api_token: User's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - role: The workspace role that is to be granted to the invited user. - You can choose between the following values: "admin" and "member". - add_to_all_projects: Whether to add the invited user to all projects in the workspace. - - Example: - >>> from neptune import management - >>> management.invite_to_workspace( - ... username="user", - ... workspace="ml-team", - ... role="admin", - ... ) - - Learn more in the docs: - - https://docs.neptune.ai/api/management/#invite_to_workspace - - https://docs.neptune.ai/management/inviting_people/ - """ - verify_type("workspace", workspace, str) - verify_type("role", role, (WorkspaceMemberRole, str)) - verify_type("add_to_all_projects", add_to_all_projects, bool) - verify_type("username", username, (str, type(None))) - verify_type("email", email, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - - if username and email: - raise ValueError("Cannot specify both `username` and `email`.") - - if username: - invitee = username - invitation_type = "user" - elif email: - invitee = email - invitation_type = "emailRecipient" - else: - raise ValueError("Neither `username` nor `email` arguments filled. At least one needs to be passed") - - if isinstance(role, str): - role = WorkspaceMemberRole(role) - - params = { - "newOrganizationInvitations": { - "invitationsEntries": [ - { - "invitee": invitee, - "invitationType": invitation_type, - "roleGrant": role.to_api(), - "addToAllProjects": add_to_all_projects, - } - ], - "organizationIdentifier": workspace, - }, - **DEFAULT_REQUEST_KWARGS, - } - - backend_client = _get_backend_client(api_token=api_token) - try: - backend_client.api.createOrganizationInvitations(**params) - except HTTPNotFound: - raise WorkspaceOrUserNotFound(workspace=workspace, user=invitee) - except HTTPConflict: - raise UserAlreadyInvited(user=invitee, workspace=workspace) - - -@with_api_exceptions_handler -def get_project_service_account_list( - project: str, *, workspace: Optional[str] = None, api_token: Optional[str] = None -) -> Dict[str, str]: - """Lists service accounts assigned to a Neptune project. - - Args: - project: The name of the project in Neptune in the form "workspace-name/project-name". - If you pass the workspace argument, the project argument should only contain "project-name" - instead of "workspace-name/project-name". - workspace: Name of your Neptune workspace. If you specify it, - change the format of the project argument to "project-name" instead of "workspace-name/project-name". - If None, it will be parsed from the project argument. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - Returns: - Dictionary with account names as keys and project member roles - ("owner", "contributor", "viewer") as values. - - Example: - >>> from neptune import management - >>> management.get_project_service_account_list( - ... workspace="ml-team", - ... project="classification", - ... ) - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management - """ - verify_type("project", project, str) - verify_type("workspace", workspace, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - - backend_client = _get_backend_client(api_token=api_token) - project_identifier = normalize_project_name(name=project, workspace=workspace) - - params = {"projectIdentifier": project_identifier, **DEFAULT_REQUEST_KWARGS} - - try: - result = backend_client.api.listProjectServiceAccounts(**params).response().result - return {f"{sa.serviceAccountInfo.displayName}": ProjectMemberRoleDTO.to_domain(sa.role) for sa in result} - except HTTPNotFound as e: - raise ProjectNotFound(name=project_identifier) from e - - -@with_api_exceptions_handler -def add_project_service_account( - project: str, - service_account_name: str, - role: str, - *, - workspace: Optional[str] = None, - api_token: Optional[str] = None, -): - """Adds a service account to a Neptune project. - - Only project owners can add accounts as members. - - Args: - project: The name of the project in Neptune in the form "workspace-name/project-name". - If you pass the workspace argument, the project argument should only contain "project-name" - instead of "workspace-name/project-name". - service_account_name: Name of the service account to add to the project. - role: level of permissions the user or service account should have in a project. - Can be set to: - - "viewer": can only view project content and members - - "contributor": can view and edit project content and only view members - - "owner": can view and edit project content and members - For more information, see https://docs.neptune.ai/management/roles/ - workspace: Name of your Neptune workspace. If you specify it, - change the format of the project argument to "project-name" instead of "workspace-name/project-name". - If None, it will be parsed from the project argument. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Examples: - >>> from neptune import management - >>> management.add_project_service_account( - ... workspace="ml-team", - ... project="classification", - ... service_account_name="cicd@ml-team", - ... role="contributor", - ... ) - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management - """ - verify_type("project", project, str) - verify_type("service_account_name", service_account_name, str) - verify_type("role", role, str) - verify_type("workspace", workspace, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - - backend_client = _get_backend_client(api_token=api_token) - workspace, project_name = extract_project_and_workspace(name=project, workspace=workspace) - project_qualified_name = f"{workspace}/{project_name}" - - try: - service_account = _get_raw_workspace_service_account_list(workspace_name=workspace, api_token=api_token)[ - service_account_name - ] - except KeyError as e: - raise ServiceAccountNotFound(service_account_name=service_account_name, workspace=workspace) from e - - params = { - "projectIdentifier": project_qualified_name, - "account": { - "serviceAccountId": service_account.id, - "role": ProjectMemberRoleDTO.from_str(role).value, - }, - **DEFAULT_REQUEST_KWARGS, - } - - try: - backend_client.api.addProjectServiceAccount(**params).response() - except HTTPNotFound as e: - raise ProjectNotFound(name=project_qualified_name) from e - except HTTPConflict as e: - service_accounts = get_project_service_account_list(project=project, workspace=workspace, api_token=api_token) - service_account_role = service_accounts.get(service_account_name) - - raise ServiceAccountAlreadyHasAccess( - service_account_name=service_account_name, - project=project_qualified_name, - role=service_account_role, - ) from e - - -@with_api_exceptions_handler -def remove_project_service_account( - project: str, - service_account_name: str, - *, - workspace: Optional[str] = None, - api_token: Optional[str] = None, -): - """Removes a service account from a Neptune project. - - Only project owners can remove accounts. - - Args: - project: The name of the project in Neptune in the form "workspace-name/project-name". - If you pass the workspace argument, the project argument should only contain "project-name" - instead of "workspace-name/project-name". - service_account_name: Name of the service account to remove from the project. - workspace: Name of your Neptune workspace. If you specify it, - change the format of the project argument to "project-name" instead of "workspace-name/project-name". - If None, it will be parsed from the project argument. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Examples: - >>> from neptune import management - >>> management.remove_project_service_account( - ... workspace="ml-team", - ... project="classification", - ... service_account_name="cicd@ml-team", - ... ) - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management - """ - verify_type("project", project, str) - verify_type("service_account_name", service_account_name, str) - verify_type("workspace", workspace, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - - backend_client = _get_backend_client(api_token=api_token) - workspace, project_name = extract_project_and_workspace(name=project, workspace=workspace) - project_qualified_name = f"{workspace}/{project_name}" - - try: - service_account = _get_raw_workspace_service_account_list(workspace_name=workspace, api_token=api_token)[ - service_account_name - ] - except KeyError as e: - raise ServiceAccountNotFound(service_account_name=service_account_name, workspace=workspace) from e - - params = { - "projectIdentifier": project_qualified_name, - "serviceAccountId": service_account.id, - **DEFAULT_REQUEST_KWARGS, - } - - try: - backend_client.api.deleteProjectServiceAccount(**params).response() - except HTTPNotFound as e: - raise ProjectNotFound(name=project_qualified_name) from e - except HTTPUnprocessableEntity as e: - raise ServiceAccountNotExistsOrWithoutAccess( - service_account_name=service_account_name, project=project_qualified_name - ) from e - except HTTPForbidden as e: - raise AccessRevokedOnServiceAccountRemoval( - service_account_name=service_account_name, project=project_qualified_name - ) from e - - -def trash_objects( - project: str, - ids: Union[str, Iterable[str]], - *, - workspace: str = None, - api_token: str = None, -) -> None: - """Moves the specified Neptune objects to the project trash. - - Args: - project: The name of the project in Neptune in the form "workspace-name/project-name". - If you pass the workspace argument, the project argument should only contain "project-name" - instead of "workspace-name/project-name". - ids: Neptune ID of object to trash (or list of multiple IDs). - You can find the ID in the leftmost column of the table view, and in the "sys/id" field of each object. - workspace: Name of your Neptune workspace. If you specify it, - change the format of the project argument to "project-name" instead of "workspace-name/project-name". - If None, it will be parsed from the project argument. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Examples: - - Trashing a run with the ID "CLS-1": - >>> from neptune import management - >>> management.trash_objects(project="ml-team/classification", ids="CLS-1") - - Trashing two runs and a model with the key "PRETRAINED": - >>> management.trash_objects( - ... project="ml-team/classification", - ... ids=["CLS-2", "CLS-3", "CLS-PRETRAINED"] - ... ) - Note: Trashing a model object also trashes all of its versions. - - For more, see the docs: https://docs.neptune.ai/api/management/#trash_objects - """ - verify_type("project", project, str) - verify_type("workspace", workspace, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - if ids is not None: - if isinstance(ids, str): - ids = [ids] - else: - verify_collection_type("ids", ids, str) - - leaderboard_client = _get_leaderboard_client(api_token=api_token) - workspace, project_name = extract_project_and_workspace(name=project, workspace=workspace) - project_qualified_name = f"{workspace}/{project_name}" - - qualified_name_ids = [QualifiedName(f"{workspace}/{project_name}/{container_id}") for container_id in ids] - errors = list() - succeeded = 0 - for batch_ids in get_batches(qualified_name_ids, batch_size=TRASH_BATCH_SIZE): - params = { - "projectIdentifier": project_qualified_name, - "experimentIdentifiers": batch_ids, - **DEFAULT_REQUEST_KWARGS, - } - try: - response = leaderboard_client.api.trashExperiments(**params).response() - except HTTPNotFound as e: - raise ProjectNotFound(name=project_qualified_name) from e - errors += response.result.errors - succeeded += len(response.result.updatedExperimentIdentifiers) - - for error in errors: - logger.warning(error) - - logger.info("Successfully trashed objects: %d. Number of failures: %d.", succeeded, len(ids) - succeeded) - - -def delete_objects_from_trash( - project: str, - ids: Union[str, Iterable[str]], - *, - workspace: Optional[str] = None, - api_token: Optional[str] = None, -) -> None: - """Permanently deletes the specified objects from the project trash. - - Args: - project: The name of the project in Neptune in the form "workspace-name/project-name". - If you pass the workspace argument, the project argument should only contain "project-name" - instead of "workspace-name/project-name". - ids: Neptune ID of object to delete from trash (or list of multiple IDs). - You can find the ID in the leftmost column of the table view, and in the "sys/id" field of each object. - workspace: Name of your Neptune workspace. If you specify it, - change the format of the project argument to "project-name" instead of "workspace-name/project-name". - If None, it will be parsed from the project argument. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Examples: - - Deleting a run with the ID "CLS-1" from trash: - >>> from neptune import management - >>> management.delete_objects_from_trash(project="ml-team/classification", ids="CLS-1") - - Deleting two runs and a model with the key "PRETRAINED" from trash: - >>> management.delete_objects_from_trash( - ... project="ml-team/classification", - ... ids=["CLS-2", "CLS-3", "CLS-PRETRAINED"], - ... ) - """ - verify_type("project", project, str) - verify_type("workspace", workspace, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - - workspace, project_name = extract_project_and_workspace(name=project, workspace=workspace) - project_qualified_name = f"{workspace}/{project_name}" - - if isinstance(ids, str): - ids = [ids] - - verify_collection_type("ids", ids, str) - - leaderboard_client = _get_leaderboard_client(api_token=api_token) - - qualified_name_ids = [QualifiedName(f"{workspace}/{project_name}/{container_id}") for container_id in ids] - for batch_ids in get_batches(qualified_name_ids, batch_size=TRASH_BATCH_SIZE): - params = { - "projectIdentifier": project_qualified_name, - "experimentIdentifiers": batch_ids, - **DEFAULT_REQUEST_KWARGS, - } - response = leaderboard_client.api.deleteExperiments(**params).response() - - for error in response.result.errors: - logger.warning(error) - - -def clear_trash( - project: str, - *, - workspace: Optional[str] = None, - api_token: Optional[str] = None, -) -> None: - """Permanently deletes all Neptune objects from the project trash. - - Args: - project: The name of the project in Neptune in the form "workspace-name/project-name". - If you pass the workspace argument, the project argument should only contain "project-name" - instead of "workspace-name/project-name". - workspace: Name of your Neptune workspace. If you specify it, - change the format of the project argument to "project-name" instead of "workspace-name/project-name". - If None, it will be parsed from the project argument. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Example: - >>> from neptune import management - >>> management.clear_trash(project="ml-team/classification") - """ - verify_type("project", project, str) - verify_type("workspace", workspace, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - - leaderboard_client = _get_leaderboard_client(api_token=api_token) - - workspace, project_name = extract_project_and_workspace(name=project, workspace=workspace) - project_qualified_name = f"{workspace}/{project_name}" - - params = { - "projectIdentifier": project_qualified_name, - **DEFAULT_REQUEST_KWARGS, - } - - response = leaderboard_client.api.deleteAllExperiments(**params).response() - - for error in response.result.errors: - logger.warning(error) - - -def get_workspace_status(workspace: str, *, api_token: Optional[str] = None) -> Dict[str, int]: - """Retrieves status information about a Neptune workspace. - - Includes the following: - - - Storage usage and limit - - Active project count and limit - - Member count - - Args: - workspace: Name of the Neptune workspace. - api_token: Account's API token. - If None, the value of the NEPTUNE_API_TOKEN environment variable is used. - Note: To keep your token secure, use the NEPTUNE_API_TOKEN environment variable rather than placing your - API token in plain text in your source code. - - Returns: - Dictionary with metric name as keys and float values - - Example: - >>> from neptune import management - >>> management.get_workspace_status(workspace="ml-team") - ... {'storageBytesAvailable': 214747451765, - ... 'storageBytesLimit': 214748364800, - ... 'storageBytesUsed': 913035, - ... 'activeProjectsUsage': 1, - ... 'activeProjectsLimit': 1, - ... 'membersCount': 1} - - You may also want to check the management API reference: - https://docs.neptune.ai/api/management/#get_workspace_status - """ - verify_type("workspace", workspace, str) - verify_type("api_token", api_token, (str, type(None))) - - backend_client = _get_backend_client(api_token=api_token) - - params = { - "organizationIdentifier": workspace, - **DEFAULT_REQUEST_KWARGS, - } - - try: - response = backend_client.api.workspaceStatus(**params).response() - - result = dict() - if hasattr(response.result, "storageBytesAvailable"): - result["storageBytesAvailable"] = response.result.storageBytesAvailable - if hasattr(response.result, "storageBytesLimit"): - result["storageBytesLimit"] = response.result.storageBytesLimit - if hasattr(response.result, "storageBytesAvailable") and hasattr(response.result, "storageBytesLimit"): - result["storageBytesUsed"] = response.result.storageBytesLimit - response.result.storageBytesAvailable - if hasattr(response.result, "activeProjectsUsage"): - result["activeProjectsUsage"] = response.result.activeProjectsUsage - if hasattr(response.result, "activeProjectsLimit"): - result["activeProjectsLimit"] = response.result.activeProjectsLimit - if hasattr(response.result, "membersCount"): - result["membersCount"] = response.result.membersCount - return result - except HTTPNotFound as e: - raise WorkspaceNotFound(workspace=workspace) from e diff --git a/src/neptune/management/internal/dto.py b/src/neptune/management/internal/dto.py deleted file mode 100644 index f01bed086..000000000 --- a/src/neptune/management/internal/dto.py +++ /dev/null @@ -1,91 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from dataclasses import dataclass -from enum import Enum - -from neptune.internal.utils import verify_type -from neptune.management.exceptions import UnsupportedValue -from neptune.management.internal.types import ( - ProjectMemberRole, - ProjectVisibility, - WorkspaceMemberRole, -) - - -class ProjectVisibilityDTO(Enum): - PRIVATE = "priv" - PUBLIC = "pub" - WORKSPACE = "workspace" - - @classmethod - def from_str(cls, visibility: str) -> "ProjectVisibilityDTO": - verify_type("visibility", visibility, str) - - try: - return { - ProjectVisibility.PRIVATE: ProjectVisibilityDTO.PRIVATE, - ProjectVisibility.PUBLIC: ProjectVisibilityDTO.PUBLIC, - ProjectVisibility.WORKSPACE: ProjectVisibilityDTO.WORKSPACE, - }[visibility] - except KeyError as e: - raise UnsupportedValue(enum=cls.__name__, value=visibility) from e - - -class ProjectMemberRoleDTO(Enum): - VIEWER = "viewer" - MEMBER = "member" - MANAGER = "manager" - - @classmethod - def from_str(cls, role: str) -> "ProjectMemberRoleDTO": - verify_type("role", role, str) - - try: - return { - ProjectMemberRole.VIEWER: ProjectMemberRoleDTO.VIEWER, - ProjectMemberRole.CONTRIBUTOR: ProjectMemberRoleDTO.MEMBER, - ProjectMemberRole.OWNER: ProjectMemberRoleDTO.MANAGER, - }[role] - except KeyError as e: - raise UnsupportedValue(enum=cls.__name__, value=role) from e - - @staticmethod - def to_domain(role: str) -> str: - verify_type("role", role, str) - - return { - ProjectMemberRoleDTO.VIEWER.value: ProjectMemberRole.VIEWER, - ProjectMemberRoleDTO.MANAGER.value: ProjectMemberRole.OWNER, - ProjectMemberRoleDTO.MEMBER.value: ProjectMemberRole.CONTRIBUTOR, - }.get(role) - - -class WorkspaceMemberRoleDTO(Enum): - OWNER = "owner" - MEMBER = "member" - - @staticmethod - def to_domain(role: str) -> str: - return { - WorkspaceMemberRoleDTO.OWNER.value: WorkspaceMemberRole.ADMIN, - WorkspaceMemberRoleDTO.MEMBER.value: WorkspaceMemberRole.MEMBER, - }.get(role) - - -@dataclass -class ServiceAccountDTO: - name: str - id: str diff --git a/src/neptune/management/internal/types.py b/src/neptune/management/internal/types.py deleted file mode 100644 index 718c43780..000000000 --- a/src/neptune/management/internal/types.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = ( - "ProjectVisibility", - "ProjectMemberRole", - "MemberRole", - "WorkspaceMemberRole", -) - - -class ProjectVisibility: - PRIVATE = "priv" - PUBLIC = "pub" - WORKSPACE = "workspace" - - -class ProjectMemberRole: - VIEWER = "viewer" - OWNER = "owner" - CONTRIBUTOR = "contributor" - - # Deprecated - MEMBER = CONTRIBUTOR - MANAGER = OWNER - - -MemberRole = ProjectMemberRole - - -class WorkspaceMemberRole: - ADMIN = "admin" - MEMBER = "member" diff --git a/src/neptune/management/internal/utils.py b/src/neptune/management/internal/utils.py deleted file mode 100644 index 2dc60a5e7..000000000 --- a/src/neptune/management/internal/utils.py +++ /dev/null @@ -1,63 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import re -from enum import Enum -from typing import Optional - -from neptune.internal.utils.patterns import PROJECT_QUALIFIED_NAME_PATTERN -from neptune.management.exceptions import ( - ConflictingWorkspaceName, - InvalidProjectName, - MissingWorkspaceName, -) - - -def extract_project_and_workspace(name: str, workspace: Optional[str] = None): - project_spec = re.search(PROJECT_QUALIFIED_NAME_PATTERN, name) - - if not project_spec: - raise InvalidProjectName(name=name) - - extracted_workspace, extracted_project_name = ( - project_spec["workspace"], - project_spec["project"], - ) - - if not workspace and not extracted_workspace: - raise MissingWorkspaceName(name=name) - - if workspace and extracted_workspace and workspace != extracted_workspace: - raise ConflictingWorkspaceName(name=name, workspace=workspace) - - final_workspace_name = extracted_workspace or workspace - - return final_workspace_name, extracted_project_name - - -def normalize_project_name(name: str, workspace: Optional[str] = None): - extracted_workspace_name, extracted_project_name = extract_project_and_workspace(name=name, workspace=workspace) - - return f"{extracted_workspace_name}/{extracted_project_name}" - - -class WorkspaceMemberRole(Enum): - MEMBER = "member" - ADMIN = "admin" - - def to_api(self) -> str: - if self.value == "admin": - return "owner" - return self.value diff --git a/src/neptune/objects/__init__.py b/src/neptune/objects/__init__.py deleted file mode 100644 index 4e5341ea0..000000000 --- a/src/neptune/objects/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "NeptuneObject", - "Model", - "ModelVersion", - "Project", - "Run", -] - -from neptune.objects.model import Model -from neptune.objects.model_version import ModelVersion -from neptune.objects.neptune_object import NeptuneObject -from neptune.objects.project import Project -from neptune.objects.run import Run diff --git a/src/neptune/objects/abstract.py b/src/neptune/objects/abstract.py deleted file mode 100644 index 530fa9f9a..000000000 --- a/src/neptune/objects/abstract.py +++ /dev/null @@ -1,62 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["SupportsNamespaces"] - -from abc import ( - ABC, - abstractmethod, -) -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from neptune.handler import Handler - - -class SupportsNamespaces(ABC): - """ - Interface for Neptune objects that supports subscripting (selecting namespaces) - It could be a Run, Model, ModelVersion, Project or already selected namespace (Handler). - - Example: - >>> from neptune import init_run - >>> from neptune.typing import SupportsNamespaces - >>> class NeptuneCallback: - ... # Proper type hinting of `start_from` parameter. - ... def __init__(self, start_from: SupportsNamespaces): - ... self._start_from = start_from - ... - ... def log_accuracy(self, accuracy: float) -> None: - ... self._start_from["train/acc"] = accuracy - ... - >>> run = init_run() - >>> callback = NeptuneCallback(start_from=run) - >>> callback.log_accuracy(0.8) - >>> # or - ... callback = NeptuneCallback(start_from=run["some/random/path"]) - >>> callback.log_accuracy(0.8) - """ - - @abstractmethod - def __getitem__(self, path: str) -> "Handler": ... - - @abstractmethod - def __setitem__(self, key: str, value) -> None: ... - - @abstractmethod - def __delitem__(self, path) -> None: ... - - @abstractmethod - def get_root_object(self) -> "SupportsNamespaces": ... diff --git a/src/neptune/objects/model.py b/src/neptune/objects/model.py deleted file mode 100644 index bcd7fd4b2..000000000 --- a/src/neptune/objects/model.py +++ /dev/null @@ -1,357 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Model"] - -import os -from typing import ( - TYPE_CHECKING, - Iterable, - List, - Optional, -) - -from typing_extensions import Literal - -from neptune.attributes.constants import SYSTEM_NAME_ATTRIBUTE_PATH -from neptune.envs import CONNECTION_MODE -from neptune.exceptions import ( - InactiveModelException, - NeedExistingModelForReadOnlyMode, - NeptuneMissingRequiredInitParameter, - NeptuneModelKeyAlreadyExistsError, - NeptuneObjectCreationConflict, - NeptuneUnsupportedFunctionalityException, -) -from neptune.internal.backends.api_model import ApiExperiment -from neptune.internal.backends.nql import ( - NQLAggregator, - NQLAttributeOperator, - NQLAttributeType, - NQLQueryAggregate, - NQLQueryAttribute, -) -from neptune.internal.container_type import ContainerType -from neptune.internal.exceptions import NeptuneException -from neptune.internal.id_formats import QualifiedName -from neptune.internal.init.parameters import ( - ASYNC_LAG_THRESHOLD, - ASYNC_NO_PROGRESS_THRESHOLD, - DEFAULT_FLUSH_PERIOD, - DEFAULT_NAME, - OFFLINE_PROJECT_QUALIFIED_NAME, -) -from neptune.internal.state import ContainerState -from neptune.internal.utils import verify_type -from neptune.internal.utils.ping_background_job import PingBackgroundJob -from neptune.objects.neptune_object import ( - NeptuneObject, - NeptuneObjectCallback, -) -from neptune.objects.utils import build_raw_query -from neptune.table import Table -from neptune.types.mode import Mode -from neptune.typing import ( - ProgressBarCallback, - ProgressBarType, -) - -if TYPE_CHECKING: - from neptune.internal.background_job import BackgroundJob - - -class Model(NeptuneObject): - """Initializes a Model object from an existing or new model. - - You can use this to create a new model from code or to perform actions on existing models. - - A Model object is suitable for storing model metadata that is common to all versions (you can use ModelVersion - objects to track version-specific metadata). It does not track background metrics or logs automatically, - but you can assign metadata to the Model object just like you can for runs. - To learn more about model registry, see the docs: https://docs.neptune.ai/model_registry/overview/ - - You can also use the Model object as a context manager (see examples). - - Args: - with_id: The Neptune identifier of an existing model to resume, such as "CLS-PRE". - The identifier is stored in the model's "sys/id" field. - If left empty, a new model is created. - name: Custom name for the model. You can add it as a column in the models table ("sys/name"). - You can also edit the name in the app, in the information view. - key: Key for the model. Required when creating a new model. - Used together with the project key to form the model identifier. - Must be uppercase and unique within the project. - project: Name of a project in the form `workspace-name/project-name`. - If None, the value of the NEPTUNE_PROJECT environment variable is used. - api_token: User's API token. - If left empty, the value of the NEPTUNE_API_TOKEN environment variable is used (recommended). - mode: Connection mode in which the tracking will work. - If `None` (default), the value of the NEPTUNE_MODE environment variable is used. - If no value was set for the environment variable, "async" is used by default. - Possible values are `async`, `sync`, `read-only`, and `debug`. - flush_period: In the asynchronous (default) connection mode, how often disk flushing is triggered - (in seconds). - proxies: Argument passed to HTTP calls made via the Requests library, as dictionary of strings. - For more information about proxies, see the Requests documentation. - async_lag_callback: Custom callback which is called if the lag between a queued operation and its - synchronization with the server exceeds the duration defined by `async_lag_threshold`. The callback - should take a Model object as the argument and can contain any custom code, such as calling `stop()` on - the object. - Note: Instead of using this argument, you can use Neptune's default callback by setting the - `NEPTUNE_ENABLE_DEFAULT_ASYNC_LAG_CALLBACK` environment variable to `TRUE`. - async_lag_threshold: In seconds, duration between the queueing and synchronization of an operation. - If a lag callback (default callback enabled via environment variable or custom callback passed to the - `async_lag_callback` argument) is enabled, the callback is called when this duration is exceeded. - async_no_progress_callback: Custom callback which is called if there has been no synchronization progress - whatsoever for the duration defined by `async_no_progress_threshold`. The callback should take a Model - object as the argument and can contain any custom code, such as calling `stop()` on the object. - Note: Instead of using this argument, you can use Neptune's default callback by setting the - `NEPTUNE_ENABLE_DEFAULT_ASYNC_NO_PROGRESS_CALLBACK` environment variable to `TRUE`. - async_no_progress_threshold: In seconds, for how long there has been no synchronization progress since the - object was initialized. If a no-progress callback (default callback enabled via environment variable or - custom callback passed to the `async_no_progress_callback` argument) is enabled, the callback is called - when this duration is exceeded. - - Returns: - Model object that is used to manage the model and log metadata to it. - - Examples: - - >>> import neptune - - Creating a new model: - - >>> model = neptune.init_model(key="PRE") - >>> model["metadata"] = some_metadata - - >>> # Or initialize with the constructor - ... model = Model(key="PRE") - - >>> # You can provide the project parameter as an environment variable - ... # or as an argument to the init_model() function: - ... model = neptune.init_model(key="PRE", project="workspace-name/project-name") - - >>> # When creating a model, you can give it a name: - ... model = neptune.init_model(key="PRE", name="Pre-trained model") - - Connecting to an existing model: - - >>> # Initialize existing model with identifier "CLS-PRE" - ... model = neptune.init_model(with_id="CLS-PRE") - - >>> # To prevent modifications when connecting to an existing model, you can connect in read-only mode - ... model = neptune.init_model(with_id="CLS-PRE", mode="read-only") - - Using the Model object as context manager: - - >>> with Model(key="PRE") as model: - ... model["metadata"] = some_metadata - - For details, see the docs: - Initializing a model: - https://docs.neptune.ai/api/neptune#init_model - Model class reference: - https://docs.neptune.ai/api/model - """ - - container_type = ContainerType.MODEL - - def __init__( - self, - with_id: Optional[str] = None, - *, - name: Optional[str] = None, - key: Optional[str] = None, - project: Optional[str] = None, - api_token: Optional[str] = None, - mode: Optional[Literal["async", "sync", "read-only", "debug"]] = None, - flush_period: float = DEFAULT_FLUSH_PERIOD, - proxies: Optional[dict] = None, - async_lag_callback: Optional[NeptuneObjectCallback] = None, - async_lag_threshold: float = ASYNC_LAG_THRESHOLD, - async_no_progress_callback: Optional[NeptuneObjectCallback] = None, - async_no_progress_threshold: float = ASYNC_NO_PROGRESS_THRESHOLD, - ): - # not yet supported by the backend - raise NeptuneUnsupportedFunctionalityException - - verify_type("with_id", with_id, (str, type(None))) - verify_type("name", name, (str, type(None))) - verify_type("key", key, (str, type(None))) - verify_type("project", project, (str, type(None))) - verify_type("mode", mode, (str, type(None))) - - self._key: Optional[str] = key - self._with_id: Optional[str] = with_id - self._name: Optional[str] = DEFAULT_NAME if with_id is None and name is None else name - - # make mode proper Enum instead of string - mode = Mode(mode or os.getenv(CONNECTION_MODE) or Mode.ASYNC.value) - - if mode == Mode.OFFLINE: - raise NeptuneException("Model can't be initialized in OFFLINE mode") - - if mode == Mode.DEBUG: - project = OFFLINE_PROJECT_QUALIFIED_NAME - - super().__init__( - project=project, - api_token=api_token, - mode=mode, - flush_period=flush_period, - proxies=proxies, - async_lag_callback=async_lag_callback, - async_lag_threshold=async_lag_threshold, - async_no_progress_callback=async_no_progress_callback, - async_no_progress_threshold=async_no_progress_threshold, - ) - - def _get_or_create_api_object(self) -> ApiExperiment: - project_workspace = self._project_api_object.workspace - project_name = self._project_api_object.name - project_qualified_name = f"{project_workspace}/{project_name}" - - if self._with_id is not None: - # with_id (resume existing model) has priority over key (creating a new model) - # additional creation parameters (e.g. name) are simply ignored in this scenario - return self._backend.get_metadata_container( - container_id=QualifiedName(project_qualified_name + "/" + self._with_id), - expected_container_type=self.container_type, - ) - elif self._key is not None: - if self._mode == Mode.READ_ONLY: - raise NeedExistingModelForReadOnlyMode() - - try: - return self._backend.create_model(project_id=self._project_api_object.id, key=self._key) - except NeptuneObjectCreationConflict as e: - base_url = self._backend.get_display_address() - raise NeptuneModelKeyAlreadyExistsError( - model_key=self._key, - models_tab_url=f"{base_url}/{project_workspace}/{project_name}/models", - ) from e - else: - raise NeptuneMissingRequiredInitParameter( - parameter_name="key", - called_function="init_model", - ) - - def _get_background_jobs(self) -> List["BackgroundJob"]: - return [PingBackgroundJob()] - - def _write_initial_attributes(self): - if self._name is not None: - self[SYSTEM_NAME_ATTRIBUTE_PATH] = self._name - - def _raise_if_stopped(self): - if self._state == ContainerState.STOPPED: - raise InactiveModelException(label=self._sys_id) - - def get_url(self) -> str: - """Returns the URL that can be accessed within the browser""" - return self._backend.get_model_url( - model_id=self._id, - workspace=self._workspace, - project_name=self._project_name, - sys_id=self._sys_id, - ) - - def fetch_model_versions_table( - self, - *, - query: Optional[str] = None, - columns: Optional[Iterable[str]] = None, - limit: Optional[int] = None, - sort_by: str = "sys/creation_time", - ascending: bool = False, - progress_bar: Optional[ProgressBarType] = None, - ) -> Table: - """Retrieve all versions of the given model. - - Args: - query: NQL query string. Syntax: https://docs.neptune.ai/usage/nql/ - Example: `"(model_size: float > 100) AND (backbone: string = VGG)"`. - columns: Names of columns to include in the table, as a list of field names. - The Neptune ID ("sys/id") is included automatically. - If `None` (default), all the columns of the model versions table are included, - up to a maximum of 10 000 columns. - limit: How many entries to return at most. If `None`, all entries are returned. - sort_by: Name of the field to sort the results by. - The field must represent a simple type (string, float, datetime, integer, or Boolean). - ascending: Whether to sort the entries in ascending order of the sorting column values. - progress_bar: Set to `False` to disable the download progress bar, - or pass a `ProgressBarCallback` class to use your own progress bar callback. - - Returns: - `Table` object containing `ModelVersion` objects that match the specified criteria. - - Use `to_pandas()` to convert it to a pandas DataFrame. - - Examples: - >>> import neptune - ... # Initialize model with the ID "CLS-FOREST" - ... model = neptune.init_model(with_id="CLS-FOREST") - ... # Fetch the metadata of all the model's versions as a pandas DataFrame - ... model_versions_df = model.fetch_model_versions_table().to_pandas() - - >>> # Include only the fields "params/lr" and "val/loss" as columns: - ... model_versions_df = model.fetch_model_versions_table(columns=["params/lr", "val/loss"]).to_pandas() - - >>> # Sort model versions by size (space they take up in Neptune) - ... model_versions_df = model.fetch_model_versions_table(sort_by="sys/size").to_pandas() - ... # Extract the ID of the largest model version object - ... largest_model_version_id = model_versions_df["sys/id"].values[0] - - >>> # Fetch model versions with VGG backbone - ... models_table_df = project.fetch_model_versions_table( - ... query="(backbone: string = VGG)" - ... ).to_pandas() - - See also the API referene: - https://docs.neptune.ai/api/model/#fetch_model_versions_table - """ - verify_type("query", query, (str, type(None))) - verify_type("limit", limit, (int, type(None))) - verify_type("sort_by", sort_by, str) - verify_type("ascending", ascending, bool) - verify_type("progress_bar", progress_bar, (type(None), bool, type(ProgressBarCallback))) - - if isinstance(limit, int) and limit <= 0: - raise ValueError(f"Parameter 'limit' must be a positive integer or None. Got {limit}.") - - query = query if query is not None else "" - nql = build_raw_query(query=query, trashed=False) - nql = NQLQueryAggregate( - items=[ - nql, - NQLQueryAttribute( - name="sys/model_id", - value=self._sys_id, - operator=NQLAttributeOperator.EQUALS, - type=NQLAttributeType.STRING, - ), - ], - aggregator=NQLAggregator.AND, - ) - return NeptuneObject._fetch_entries( - self, - child_type=ContainerType.MODEL_VERSION, - query=nql, - columns=columns, - limit=limit, - sort_by=sort_by, - ascending=ascending, - progress_bar=progress_bar, - ) diff --git a/src/neptune/objects/model_version.py b/src/neptune/objects/model_version.py deleted file mode 100644 index 1e7f6e39c..000000000 --- a/src/neptune/objects/model_version.py +++ /dev/null @@ -1,299 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ModelVersion"] - -import os -from typing import ( - TYPE_CHECKING, - List, - Optional, -) - -from typing_extensions import Literal - -from neptune.attributes.constants import ( - SYSTEM_NAME_ATTRIBUTE_PATH, - SYSTEM_STAGE_ATTRIBUTE_PATH, -) -from neptune.envs import CONNECTION_MODE -from neptune.exceptions import ( - InactiveModelVersionException, - NeedExistingModelVersionForReadOnlyMode, - NeptuneMissingRequiredInitParameter, - NeptuneOfflineModeChangeStageException, - NeptuneUnsupportedFunctionalityException, -) -from neptune.internal.backends.api_model import ApiExperiment -from neptune.internal.container_type import ContainerType -from neptune.internal.exceptions import NeptuneException -from neptune.internal.id_formats import QualifiedName -from neptune.internal.init.parameters import ( - ASYNC_LAG_THRESHOLD, - ASYNC_NO_PROGRESS_THRESHOLD, - DEFAULT_FLUSH_PERIOD, - DEFAULT_NAME, - OFFLINE_PROJECT_QUALIFIED_NAME, -) -from neptune.internal.operation_processors.offline_operation_processor import OfflineOperationProcessor -from neptune.internal.state import ContainerState -from neptune.internal.utils import verify_type -from neptune.internal.utils.ping_background_job import PingBackgroundJob -from neptune.objects.neptune_object import ( - NeptuneObject, - NeptuneObjectCallback, -) -from neptune.types.mode import Mode -from neptune.types.model_version_stage import ModelVersionStage - -if TYPE_CHECKING: - from neptune.internal.background_job import BackgroundJob - - -class ModelVersion(NeptuneObject): - """Initializes a ModelVersion object from an existing or new model version. - - Before creating model versions, you must first register a model by creating a Model object. - - A ModelVersion object is suitable for storing model metadata that is version-specific. It does not track - background metrics or logs automatically, but you can assign metadata to the model version just like you can - for runs. You can use the parent Model object to store metadata that is common to all versions of the model. - To learn more about model registry, see the docs: https://docs.neptune.ai/model_registry/overview/ - - To manage the stage of a model version, use its `change_stage()` method or use the menu in the web app. - - You can also use the ModelVersion object as a context manager (see examples). - - Args: - with_id: The Neptune identifier of an existing model version to resume, such as "CLS-PRE-3". - The identifier is stored in the model version's "sys/id" field. - If left empty, a new model version is created. - name: Custom name for the model version. You can add it as a column in the model versions table - ("sys/name"). You can also edit the name in the app, in the information view. - model: Identifier of the model for which the new version should be created. - Required when creating a new model version. - You can find the model ID in the leftmost column of the models table, or in a model's "sys/id" field. - project: Name of a project in the form `workspace-name/project-name`. - If None, the value of the NEPTUNE_PROJECT environment variable is used. - api_token: User's API token. - If left empty, the value of the NEPTUNE_API_TOKEN environment variable is used (recommended). - mode: Connection mode in which the tracking will work. - If None (default), the value of the NEPTUNE_MODE environment variable is used. - If no value was set for the environment variable, "async" is used by default. - Possible values are `async`, `sync`, `read-only`, and `debug`. - flush_period: In the asynchronous (default) connection mode, how often disk flushing is triggered - (in seconds). - proxies: Argument passed to HTTP calls made via the Requests library, as dictionary of strings. - For more information about proxies, see the Requests documentation. - async_lag_callback: Custom callback which is called if the lag between a queued operation and its - synchronization with the server exceeds the duration defined by `async_lag_threshold`. The callback - should take a ModelVersion object as the argument and can contain any custom code, such as calling - `stop()` on the object. - Note: Instead of using this argument, you can use Neptune's default callback by setting the - `NEPTUNE_ENABLE_DEFAULT_ASYNC_LAG_CALLBACK` environment variable to `TRUE`. - async_lag_threshold: In seconds, duration between the queueing and synchronization of an operation. - If a lag callback (default callback enabled via environment variable or custom callback passed to the - `async_lag_callback` argument) is enabled, the callback is called when this duration is exceeded. - async_no_progress_callback: Custom callback which is called if there has been no synchronization progress - whatsoever for the duration defined by `async_no_progress_threshold`. The callback should take a - ModelVersion object as the argument and can contain any custom code, such as calling `stop()` on the - object. - Note: Instead of using this argument, you can use Neptune's default callback by setting the - `NEPTUNE_ENABLE_DEFAULT_ASYNC_NO_PROGRESS_CALLBACK` environment variable to `TRUE`. - async_no_progress_threshold: In seconds, for how long there has been no synchronization progress since the - object was initialized. If a no-progress callback (default callback enabled via environment variable or - custom callback passed to the `async_no_progress_callback` argument) is enabled, the callback is called - when this duration is exceeded. - - Returns: - ModelVersion object that is used to manage the model version and log metadata to it. - - Examples: - - >>> import neptune - - Creating a new model version: - - >>> # Create a new model version for a model with identifier "CLS-PRE" - ... model_version = neptune.init_model_version(model="CLS-PRE") - >>> model_version["your/structure"] = some_metadata - - >>> # You can provide the project parameter as an environment variable - ... # or directly in the init_model_version() function: - ... model_version = neptune.init_model_version( - ... model="CLS-PRE", - ... project="ml-team/classification", - ... ) - - >>> # Or initialize with the constructor: - ... model_version = ModelVersion(model="CLS-PRE") - - Connecting to an existing model version: - - >>> # Initialize an existing model version with identifier "CLS-PRE-12" - ... model_version = neptune.init_model_version(with_id="CLS-PRE-12") - - >>> # To prevent modifications when connecting to an existing model version, - ... # you can connect in read-only mode: - ... model_version = neptune.init_model(with_id="CLS-PRE-12", mode="read-only") - - Using the ModelVersion object as context manager: - - >>> with ModelVersion(model="CLS-PRE") as model_version: - ... model_version["metadata"] = some_metadata - - For more, see the docs: - Initializing a model version: - https://docs.neptune.ai/api/neptune#init_model_version - ModelVersion class reference: - https://docs.neptune.ai/api/model_version/ - """ - - container_type = ContainerType.MODEL_VERSION - - def __init__( - self, - with_id: Optional[str] = None, - *, - name: Optional[str] = None, - model: Optional[str] = None, - project: Optional[str] = None, - api_token: Optional[str] = None, - mode: Optional[Literal["async", "sync", "read-only", "debug"]] = None, - flush_period: float = DEFAULT_FLUSH_PERIOD, - proxies: Optional[dict] = None, - async_lag_callback: Optional[NeptuneObjectCallback] = None, - async_lag_threshold: float = ASYNC_LAG_THRESHOLD, - async_no_progress_callback: Optional[NeptuneObjectCallback] = None, - async_no_progress_threshold: float = ASYNC_NO_PROGRESS_THRESHOLD, - ) -> None: - - raise NeptuneUnsupportedFunctionalityException - - verify_type("with_id", with_id, (str, type(None))) - verify_type("name", name, (str, type(None))) - verify_type("model", model, (str, type(None))) - verify_type("project", project, (str, type(None))) - verify_type("mode", mode, (str, type(None))) - - self._model: Optional[str] = model - self._with_id: Optional[str] = with_id - self._name: Optional[str] = DEFAULT_NAME if model is None and name is None else name - - # make mode proper Enum instead of string - mode = Mode(mode or os.getenv(CONNECTION_MODE) or Mode.ASYNC.value) - - if mode == Mode.OFFLINE: - raise NeptuneException("ModelVersion can't be initialized in OFFLINE mode") - - if mode == Mode.DEBUG: - project = OFFLINE_PROJECT_QUALIFIED_NAME - - super().__init__( - project=project, - api_token=api_token, - mode=mode, - flush_period=flush_period, - proxies=proxies, - async_lag_callback=async_lag_callback, - async_lag_threshold=async_lag_threshold, - async_no_progress_callback=async_no_progress_callback, - async_no_progress_threshold=async_no_progress_threshold, - ) - - def _get_or_create_api_object(self) -> ApiExperiment: - project_workspace = self._project_api_object.workspace - project_name = self._project_api_object.name - project_qualified_name = f"{project_workspace}/{project_name}" - - if self._with_id is not None: - # with_id (resume existing model_version) has priority over model (creating a new model_version) - return self._backend.get_metadata_container( - container_id=QualifiedName(project_qualified_name + "/" + self._with_id), - expected_container_type=self.container_type, - ) - elif self._model is not None: - if self._mode == Mode.READ_ONLY: - raise NeedExistingModelVersionForReadOnlyMode() - - api_model = self._backend.get_metadata_container( - container_id=QualifiedName(project_qualified_name + "/" + self._model), - expected_container_type=ContainerType.MODEL, - ) - return self._backend.create_model_version(project_id=self._project_api_object.id, model_id=api_model.id) - else: - raise NeptuneMissingRequiredInitParameter( - parameter_name="model", - called_function="init_model_version", - ) - - def _get_background_jobs(self) -> List["BackgroundJob"]: - return [PingBackgroundJob()] - - def _write_initial_attributes(self): - if self._name is not None: - self[SYSTEM_NAME_ATTRIBUTE_PATH] = self._name - - def _raise_if_stopped(self): - if self._state == ContainerState.STOPPED: - raise InactiveModelVersionException(label=self._sys_id) - - def get_url(self) -> str: - """Returns the URL that can be accessed within the browser""" - return self._backend.get_model_version_url( - model_version_id=self._id, - workspace=self._workspace, - project_name=self._project_name, - sys_id=self._sys_id, - model_id=self["sys/model_id"].fetch(), - ) - - def change_stage(self, stage: str) -> None: - """Changes the stage of the model version. - - This method is always synchronous, which means that Neptune will wait for all other calls to reach the Neptune - servers before executing it. - Args: - stage: The new stage of the model version. - Possible values are `none`, `staging`, `production`, and `archived`. - Examples: - >>> import neptune - >>> model_version = neptune.init_model_version(with_id="CLS-TREE-3") - >>> # If the model is good enough, promote it to the staging - ... val_acc = model_version["validation/metrics/acc"].fetch() - >>> if val_acc >= ACC_THRESHOLD: - ... model_version.change_stage("staging") - - Learn more about stage management in the docs: - https://docs.neptune.ai/model_registry/managing_stage/ - API reference: - https://docs.neptune.ai/api/model_version/#change_stage - """ - mapped_stage = ModelVersionStage(stage) - - if isinstance(self._op_processor, OfflineOperationProcessor): - raise NeptuneOfflineModeChangeStageException() - - self.wait() - - with self.lock(): - attr = self.get_attribute(SYSTEM_STAGE_ATTRIBUTE_PATH) - # We are sure that such attribute exists, because - # SYSTEM_STAGE_ATTRIBUTE_PATH is set by default on ModelVersion creation - assert attr is not None, f"No {SYSTEM_STAGE_ATTRIBUTE_PATH} found in model version" - attr.process_assignment( - value=mapped_stage.value, - wait=True, - ) diff --git a/src/neptune/objects/neptune_object.py b/src/neptune/objects/neptune_object.py deleted file mode 100644 index f5deed87b..000000000 --- a/src/neptune/objects/neptune_object.py +++ /dev/null @@ -1,695 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["NeptuneObject"] - -import abc -import atexit -import itertools -import logging -import os -import threading -import time -import traceback -from contextlib import AbstractContextManager -from functools import ( - partial, - wraps, -) -from queue import Queue -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterable, - List, - Optional, - Union, -) - -from neptune.api.models import FieldType -from neptune.attributes import create_attribute_from_type -from neptune.attributes.attribute import Attribute -from neptune.attributes.namespace import Namespace as NamespaceAttr -from neptune.attributes.namespace import NamespaceBuilder -from neptune.envs import ( - NEPTUNE_ENABLE_DEFAULT_ASYNC_LAG_CALLBACK, - NEPTUNE_ENABLE_DEFAULT_ASYNC_NO_PROGRESS_CALLBACK, -) -from neptune.exceptions import MetadataInconsistency -from neptune.handler import Handler -from neptune.internal.backends.api_model import ( - ApiExperiment, - Project, -) -from neptune.internal.backends.factory import get_backend -from neptune.internal.backends.neptune_backend import NeptuneBackend -from neptune.internal.backends.nql import NQLQuery -from neptune.internal.backends.project_name_lookup import project_name_lookup -from neptune.internal.backgroud_job_list import BackgroundJobList -from neptune.internal.background_job import BackgroundJob -from neptune.internal.container_structure import ContainerStructure -from neptune.internal.container_type import ContainerType -from neptune.internal.exceptions import UNIX_STYLES -from neptune.internal.id_formats import ( - QualifiedName, - SysId, - UniqueId, - conform_optional, -) -from neptune.internal.init.parameters import ( - ASYNC_LAG_THRESHOLD, - ASYNC_NO_PROGRESS_THRESHOLD, - DEFAULT_FLUSH_PERIOD, -) -from neptune.internal.operation import DeleteAttribute -from neptune.internal.operation_processors.factory import get_operation_processor -from neptune.internal.operation_processors.lazy_operation_processor_wrapper import LazyOperationProcessorWrapper -from neptune.internal.operation_processors.operation_processor import OperationProcessor -from neptune.internal.signals_processing.background_job import CallbacksMonitor -from neptune.internal.state import ContainerState -from neptune.internal.utils import ( - verify_optional_callable, - verify_type, -) -from neptune.internal.utils.logger import ( - get_disabled_logger, - get_logger, -) -from neptune.internal.utils.paths import parse_path -from neptune.internal.utils.uncaught_exception_handler import instance as uncaught_exception_handler -from neptune.internal.utils.utils import reset_internal_ssl_state -from neptune.internal.value_to_attribute_visitor import ValueToAttributeVisitor -from neptune.internal.warnings import warn_about_unsupported_type -from neptune.table import Table -from neptune.types.mode import Mode -from neptune.types.type_casting import cast_value -from neptune.typing import ProgressBarType -from neptune.utils import stop_synchronization_callback - -if TYPE_CHECKING: - from neptune.internal.signals_processing.signals import Signal - - -NeptuneObjectCallback = Callable[["NeptuneObject"], None] - - -def ensure_not_stopped(fun): - @wraps(fun) - def inner_fun(self: "NeptuneObject", *args, **kwargs): - self._raise_if_stopped() - return fun(self, *args, **kwargs) - - return inner_fun - - -class NeptuneObject(AbstractContextManager): - container_type: ContainerType - - def __init__( - self, - *, - project: Optional[str] = None, - api_token: Optional[str] = None, - mode: Mode = Mode.ASYNC, - flush_period: float = DEFAULT_FLUSH_PERIOD, - proxies: Optional[dict] = None, - async_lag_callback: Optional[NeptuneObjectCallback] = None, - async_lag_threshold: float = ASYNC_LAG_THRESHOLD, - async_no_progress_callback: Optional[NeptuneObjectCallback] = None, - async_no_progress_threshold: float = ASYNC_NO_PROGRESS_THRESHOLD, - ): - verify_type("project", project, (str, type(None))) - verify_type("api_token", api_token, (str, type(None))) - verify_type("mode", mode, Mode) - verify_type("flush_period", flush_period, (int, float)) - verify_type("proxies", proxies, (dict, type(None))) - verify_type("async_lag_threshold", async_lag_threshold, (int, float)) - verify_optional_callable("async_lag_callback", async_lag_callback) - verify_type("async_no_progress_threshold", async_no_progress_threshold, (int, float)) - verify_optional_callable("async_no_progress_callback", async_no_progress_callback) - - self._mode: Mode = mode - self._flush_period = flush_period - self._lock: threading.RLock = threading.RLock() - self._forking_cond: threading.Condition = threading.Condition() - self._forking_state: bool = False - self._state: ContainerState = ContainerState.CREATED - self._signals_queue: "Queue[Signal]" = Queue() - self._logger: logging.Logger = get_logger() - - self._backend: NeptuneBackend = get_backend(mode=mode, api_token=api_token, proxies=proxies) - - self._project_qualified_name: Optional[str] = conform_optional(project, QualifiedName) - self._project_api_object: Project = project_name_lookup( - backend=self._backend, name=self._project_qualified_name - ) - self._project_id: UniqueId = self._project_api_object.id - - self._api_object: ApiExperiment = self._get_or_create_api_object() - self._id: UniqueId = self._api_object.id - self._sys_id: SysId = self._api_object.sys_id - self._workspace: str = self._api_object.workspace - self._project_name: str = self._api_object.project_name - - self._async_lag_threshold = async_lag_threshold - self._async_lag_callback = NeptuneObject._get_callback( - provided=async_lag_callback, - env_name=NEPTUNE_ENABLE_DEFAULT_ASYNC_LAG_CALLBACK, - ) - self._async_no_progress_threshold = async_no_progress_threshold - self._async_no_progress_callback = NeptuneObject._get_callback( - provided=async_no_progress_callback, - env_name=NEPTUNE_ENABLE_DEFAULT_ASYNC_NO_PROGRESS_CALLBACK, - ) - - self._op_processor: OperationProcessor = get_operation_processor( - mode=mode, - container_id=self._id, - container_type=self.container_type, - backend=self._backend, - lock=self._lock, - flush_period=flush_period, - queue=self._signals_queue, - ) - - self._bg_job: BackgroundJobList = self._prepare_background_jobs_if_non_read_only() - self._structure: ContainerStructure[Attribute, NamespaceAttr] = ContainerStructure(NamespaceBuilder(self)) - - if self._mode != Mode.OFFLINE: - self.sync(wait=False) - - if self._mode != Mode.READ_ONLY: - self._write_initial_attributes() - - self._startup(debug_mode=mode == Mode.DEBUG) - - try: - os.register_at_fork( - before=self._before_fork, - after_in_child=self._handle_fork_in_child, - after_in_parent=self._handle_fork_in_parent, - ) - except AttributeError: - pass - - """ - OpenSSL's internal random number generator does not properly handle forked processes. - Applications must change the PRNG state of the parent process if they use any SSL feature with os.fork(). - Any successful call of RAND_add(), RAND_bytes() or RAND_pseudo_bytes() is sufficient. - https://docs.python.org/3/library/ssl.html#multi-processing - - On Linux it looks like it does not help much but does not break anything either. - """ - - @staticmethod - def _get_callback(provided: Optional[NeptuneObjectCallback], env_name: str) -> Optional[NeptuneObjectCallback]: - if provided is not None: - return provided - if os.getenv(env_name, "") == "TRUE": - return stop_synchronization_callback - return None - - def _handle_fork_in_parent(self): - reset_internal_ssl_state() - if self._state == ContainerState.STARTED: - self._op_processor.resume() - self._bg_job.resume() - - with self._forking_cond: - self._forking_state = False - self._forking_cond.notify_all() - - def _handle_fork_in_child(self): - reset_internal_ssl_state() - self._logger = get_disabled_logger() - if self._state == ContainerState.STARTED: - self._op_processor.close() - self._signals_queue = Queue() - self._op_processor = LazyOperationProcessorWrapper( - operation_processor_getter=partial( - get_operation_processor, - mode=self._mode, - container_id=self._id, - container_type=self.container_type, - backend=self._backend, - lock=self._lock, - flush_period=self._flush_period, - queue=self._signals_queue, - ), - ) - # TODO: Every implementation of background job should handle fork by itself. - jobs = [] - if self._mode == Mode.ASYNC: - jobs.append( - CallbacksMonitor( - queue=self._signals_queue, - async_lag_threshold=self._async_lag_threshold, - async_no_progress_threshold=self._async_no_progress_threshold, - async_lag_callback=self._async_lag_callback, - async_no_progress_callback=self._async_no_progress_callback, - ) - ) - self._bg_job = BackgroundJobList(jobs) - - with self._forking_cond: - self._forking_state = False - self._forking_cond.notify_all() - - def _before_fork(self): - with self._forking_cond: - self._forking_cond.wait_for(lambda: self._state != ContainerState.STOPPING) - self._forking_state = True - - if self._state == ContainerState.STARTED: - self._bg_job.pause() - self._op_processor.pause() - - def _prepare_background_jobs_if_non_read_only(self) -> BackgroundJobList: - jobs = [] - - if self._mode != Mode.READ_ONLY: - jobs.extend(self._get_background_jobs()) - - if self._mode == Mode.ASYNC: - jobs.append( - CallbacksMonitor( - queue=self._signals_queue, - async_lag_threshold=self._async_lag_threshold, - async_no_progress_threshold=self._async_no_progress_threshold, - async_lag_callback=self._async_lag_callback, - async_no_progress_callback=self._async_no_progress_callback, - ) - ) - - return BackgroundJobList(jobs) - - @abc.abstractmethod - def _get_or_create_api_object(self) -> ApiExperiment: - raise NotImplementedError - - def _get_background_jobs(self) -> List["BackgroundJob"]: - return [] - - def _write_initial_attributes(self): - pass - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_tb is not None: - traceback.print_exception(exc_type, exc_val, exc_tb) - self.stop() - - def __getattr__(self, item): - raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{item}'") - - @abc.abstractmethod - def _raise_if_stopped(self): - raise NotImplementedError - - def _get_subpath_suggestions(self, path_prefix: str = None, limit: int = 1000) -> List[str]: - parsed_path = parse_path(path_prefix or "") - return list(itertools.islice(self._structure.iterate_subpaths(parsed_path), limit)) - - def _ipython_key_completions_(self): - return self._get_subpath_suggestions() - - @ensure_not_stopped - def __getitem__(self, path: str) -> "Handler": - return Handler(self, path) - - @ensure_not_stopped - def __setitem__(self, key: str, value) -> None: - self.__getitem__(key).assign(value) - - @ensure_not_stopped - def __delitem__(self, path) -> None: - self.pop(path) - - @ensure_not_stopped - def assign(self, value, *, wait: bool = False) -> None: - """Assigns values to multiple fields from a dictionary. - - You can use this method to quickly log all parameters at once. - - Args: - value (dict): A dictionary with values to assign, where keys become paths of the fields. - The dictionary can be nested, in which case the path will be a combination of all the keys. - wait: If `True`, Neptune waits to send all tracked metadata to the server before executing the call. - - Examples: - >>> import neptune - >>> run = neptune.init_run() - >>> # Assign a single value with the Python "=" operator - >>> run["parameters/learning_rate"] = 0.8 - >>> # or the assign() method - >>> run["parameters/learning_rate"].assign(0.8) - >>> # Assign a dictionary with the Python "=" operator - >>> run["parameters"] = {"max_epochs": 10, "optimizer": "Adam", "learning_rate": 0.8} - >>> # or the assign() method - >>> run.assign({"parameters": {"max_epochs": 10, "optimizer": "Adam", "learning_rate": 0.8}}) - - When operating on a handler object, you can use assign() to circumvent normal Python variable assignment. - >>> params = run["params"] - >>> params.assign({"max_epochs": 10, "optimizer": "Adam", "learning_rate": 0.8}) - - See also the API reference: - https://docs.neptune.ai/api/universal/#assign - """ - self._get_root_handler().assign(value, wait=wait) - - @ensure_not_stopped - def fetch(self) -> dict: - """Fetch values of all non-File Atom fields as a dictionary. - - You can use this method to retrieve metadata from a started or resumed run. - The result preserves the hierarchical structure of the run's metadata, but only contains Atom fields. - This means fields that contain single values, as opposed to series, files, or sets. - - Returns: - `dict` containing the values of all non-File Atom fields. - - Examples: - Resuming an existing run and fetching metadata from it: - >>> import neptune - >>> resumed_run = neptune.init_run(with_id="CLS-3") - >>> params = resumed_run["model/parameters"].fetch() - >>> run_data = resumed_run.fetch() - >>> print(run_data) - >>> # prints all Atom attributes stored in run as a dict - - Fetching metadata from an existing model version: - >>> model_version = neptune.init_model_version(with_id="CLS-TREE-45") - >>> optimizer = model["parameters/optimizer"].fetch() - - See also the API reference: - https://docs.neptune.ai/api/universal#fetch - """ - return self._get_root_handler().fetch() - - def ping(self): - self._backend.ping(self._id, self.container_type) - - def start(self): - atexit.register(self._shutdown_hook) - self._op_processor.start() - self._bg_job.start(self) - self._state = ContainerState.STARTED - - def stop(self, *, seconds: Optional[Union[float, int]] = None) -> None: - """Stops the connection and ends the synchronization thread. - - You should stop any initialized runs or other objects when the connection to them is no longer needed. - - This method is automatically called: - - when the script that created the run or other object finishes execution. - - if using a context manager, on destruction of the Neptune context. - - Note: In interactive sessions, such as Jupyter Notebook, objects are stopped automatically only when - the Python kernel stops. However, background monitoring of system metrics and standard streams is disabled - unless explicitly enabled when initializing Neptune. - - Args: - seconds: Seconds to wait for all metadata tracking calls to finish before stopping the object. - If `None`, waits for all tracking calls to finish. - - Example: - >>> import neptune - >>> run = neptune.init_run() - >>> # Your training or monitoring code - >>> run.stop() - - See also the docs: - Best practices - Stopping objects - https://docs.neptune.ai/usage/best_practices/#stopping-runs-and-other-objects - API reference: - https://docs.neptune.ai/api/universal/#stop - """ - verify_type("seconds", seconds, (float, int, type(None))) - if self._state != ContainerState.STARTED: - return - - with self._forking_cond: - self._forking_cond.wait_for(lambda: not self._forking_state) - self._state = ContainerState.STOPPING - - ts = time.time() - self._logger.info("Shutting down background jobs, please wait a moment...") - self._bg_job.stop() - self._bg_job.join(seconds) - self._logger.info("Done!") - - sec_left = None if seconds is None else seconds - (time.time() - ts) - self._op_processor.stop(sec_left) - - if self._mode not in {Mode.OFFLINE, Mode.DEBUG}: - metadata_url = self.get_url().rstrip("/") + "/metadata" - self._logger.info(f"Explore the metadata in the Neptune app: {metadata_url}") - self._backend.close() - - with self._forking_cond: - self._state = ContainerState.STOPPED - self._forking_cond.notify_all() - - def get_state(self) -> str: - """Returns the current state of the container as a string. - - Examples: - >>> from neptune import init_run - >>> run = init_run() - >>> run.get_state() - 'started' - >>> run.stop() - >>> run.get_state() - 'stopped' - """ - return self._state.value - - def get_structure(self) -> Dict[str, Any]: - """Returns the object's metadata structure as a dictionary. - - This method can be used to programmatically traverse the metadata structure of a run, model, - or project object when using Neptune in automated workflows. - - Note: The returned object is a deep copy of the structure of the internal object. - - See also the API reference: - https://docs.neptune.ai/api/universal/#get_structure - """ - return self._structure.get_structure().to_dict() - - def print_structure(self) -> None: - """Pretty-prints the structure of the object's metadata. - - Paths are ordered lexicographically and the whole structure is neatly colored. - - See also: https://docs.neptune.ai/api/universal/#print_structure - """ - self._print_structure_impl(self.get_structure(), indent=0) - - def _print_structure_impl(self, struct: dict, indent: int) -> None: - for key in sorted(struct.keys()): - print(" " * indent, end="") - if isinstance(struct[key], dict): - print("{blue}'{key}'{end}:".format(blue=UNIX_STYLES["blue"], key=key, end=UNIX_STYLES["end"])) - self._print_structure_impl(struct[key], indent=indent + 1) - else: - print( - "{blue}'{key}'{end}: {type}".format( - blue=UNIX_STYLES["blue"], - key=key, - end=UNIX_STYLES["end"], - type=type(struct[key]).__name__, - ) - ) - - def define( - self, - path: str, - value: Any, - *, - wait: bool = False, - ) -> Optional[Attribute]: - with self._lock: - old_attr = self.get_attribute(path) - if old_attr is not None: - raise MetadataInconsistency("Attribute or namespace {} is already defined".format(path)) - - neptune_value = cast_value(value) - if neptune_value is None: - warn_about_unsupported_type(type_str=str(type(value))) - return None - - attr = ValueToAttributeVisitor(self, parse_path(path)).visit(neptune_value) - self.set_attribute(path, attr) - attr.process_assignment(neptune_value, wait=wait) - return attr - - def get_attribute(self, path: str) -> Optional[Attribute]: - with self._lock: - return self._structure.get(parse_path(path)) - - def set_attribute(self, path: str, attribute: Attribute) -> Optional[Attribute]: - with self._lock: - return self._structure.set(parse_path(path), attribute) - - def exists(self, path: str) -> bool: - """Checks if there is a field or namespace under the specified path.""" - verify_type("path", path, str) - return self.get_attribute(path) is not None - - @ensure_not_stopped - def pop(self, path: str, *, wait: bool = False) -> None: - """Removes the field stored under the path and all data associated with it. - - Args: - path: Path of the field to be removed. - wait: If `True`, Neptune waits to send all tracked metadata to the server before executing the call. - - Examples: - >>> import neptune - >>> run = neptune.init_run() - >>> run["parameters/learninggg_rata"] = 0.3 - >>> # Let's delete that misspelled field along with its data - ... run.pop("parameters/learninggg_rata") - >>> run["parameters/learning_rate"] = 0.3 - >>> # Training finished - ... run["trained_model"].upload("model.pt") - >>> # "model_checkpoint" is a File field - ... run.pop("model_checkpoint") - - See also the API reference: - https://docs.neptune.ai/api/universal/#pop - """ - verify_type("path", path, str) - self._get_root_handler().pop(path, wait=wait) - - def _pop_impl(self, parsed_path: List[str], *, wait: bool): - self._structure.pop(parsed_path) - self._op_processor.enqueue_operation(DeleteAttribute(parsed_path), wait=wait) - - def lock(self) -> threading.RLock: - return self._lock - - def wait(self, *, disk_only=False) -> None: - """Wait for all the queued metadata tracking calls to reach the Neptune servers. - - Args: - disk_only: If `True`, the process will only wait for data to be saved - locally from memory, but will not wait for them to reach Neptune servers. - - See also the API reference: - https://docs.neptune.ai/api/universal/#wait - """ - with self._lock: - if disk_only: - self._op_processor.flush() - else: - self._op_processor.wait() - - def sync(self, *, wait: bool = True) -> None: - """Synchronizes the local representation of the object with the representation on the Neptune servers. - - Args: - wait: If `True`, the process will only wait for data to be saved - locally from memory, but will not wait for them to reach Neptune servers. - - Example: - >>> import neptune - >>> # Connect to a run from Worker #3 - ... worker_id = 3 - >>> run = neptune.init_run(with_id="DIST-43", monitoring_namespace=f"monitoring/{worker_id}") - >>> # Try to access logs that were created in the meantime by Worker #2 - ... worker_2_status = run["status/2"].fetch() - ... # Error if this field was created after this script starts - >>> run.sync() # Synchronizes local representation with Neptune servers - >>> worker_2_status = run["status/2"].fetch() - ... # No error - - See also the API reference: - https://docs.neptune.ai/api/universal/#sync - """ - with self._lock: - if wait: - self._op_processor.wait() - attributes = self._backend.get_attributes(self._id, self.container_type) - self._structure.clear() - for attribute in attributes: - self._define_attribute(parse_path(attribute.path), attribute.type) - - def _define_attribute(self, _path: List[str], _type: FieldType): - attr = create_attribute_from_type(_type, self, _path) - self._structure.set(_path, attr) - - def _get_root_handler(self): - return Handler(self, "") - - @abc.abstractmethod - def get_url(self) -> str: - """Returns a link to the object in the Neptune app. - - The same link is printed in the console once the object has been initialized. - - API reference: https://docs.neptune.ai/api/universal/#get_url - """ - ... - - def _startup(self, debug_mode): - if not debug_mode: - self._logger.info(f"Neptune initialized. Open in the app: {self.get_url()}") - - self.start() - - uncaught_exception_handler.activate() - - def _shutdown_hook(self): - self.stop() - - def _fetch_entries( - self, - child_type: ContainerType, - query: NQLQuery, - columns: Optional[Iterable[str]], - limit: Optional[int], - sort_by: str, - ascending: bool, - progress_bar: Optional[ProgressBarType], - ) -> Table: - if columns is not None: - # always return entries with 'sys/id' and the column chosen for sorting when filter applied - columns = set(columns) - columns.add("sys/id") - columns.add(sort_by) - - leaderboard_entries = self._backend.search_leaderboard_entries( - project_id=self._project_id, - types=[child_type], - query=query, - columns=columns, - limit=limit, - sort_by=sort_by, - ascending=ascending, - progress_bar=progress_bar, - ) - - return Table( - backend=self._backend, - container_type=child_type, - entries=leaderboard_entries, - ) - - def get_root_object(self) -> "NeptuneObject": - """Returns the same Neptune object.""" - return self diff --git a/src/neptune/objects/project.py b/src/neptune/objects/project.py deleted file mode 100644 index d6efdd924..000000000 --- a/src/neptune/objects/project.py +++ /dev/null @@ -1,422 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Project"] - -import os -from typing import ( - Iterable, - Optional, - Union, -) - -from typing_extensions import Literal - -from neptune.envs import CONNECTION_MODE -from neptune.exceptions import ( - InactiveProjectException, - NeptuneUnsupportedFunctionalityException, -) -from neptune.internal.backends.api_model import ApiExperiment -from neptune.internal.container_type import ContainerType -from neptune.internal.exceptions import NeptuneException -from neptune.internal.init.parameters import ( - ASYNC_LAG_THRESHOLD, - ASYNC_NO_PROGRESS_THRESHOLD, - DEFAULT_FLUSH_PERIOD, -) -from neptune.internal.state import ContainerState -from neptune.internal.utils import ( - as_list, - verify_collection_type, - verify_type, - verify_value, -) -from neptune.objects.neptune_object import ( - NeptuneObject, - NeptuneObjectCallback, -) -from neptune.objects.utils import ( - build_raw_query, - prepare_nql_query, -) -from neptune.table import Table -from neptune.types.mode import Mode -from neptune.typing import ( - ProgressBarCallback, - ProgressBarType, -) - - -class Project(NeptuneObject): - """Starts a connection to an existing Neptune project. - - You can use the Project object to retrieve information about runs, models, and model versions - within the project. - - You can also log (and fetch) metadata common to the whole project, such as information about datasets, - links to documents, or key project metrics. - - Note: If you want to instead create a project, use the - [`management.create_project()`](https://docs.neptune.ai/api/management/#create_project) function. - - You can also use the Project object as a context manager (see examples). - - Args: - project: Name of a project in the form `workspace-name/project-name`. - If left empty, the value of the NEPTUNE_PROJECT environment variable is used. - api_token: User's API token. - If left empty, the value of the NEPTUNE_API_TOKEN environment variable is used (recommended). - mode: Connection mode in which the tracking will work. - If left empty, the value of the NEPTUNE_MODE environment variable is used. - If no value was set for the environment variable, "async" is used by default. - Possible values are `async`, `sync`, `read-only`, and `debug`. - flush_period: In the asynchronous (default) connection mode, how often disk flushing is triggered. - Defaults to 5 (every 5 seconds). - proxies: Argument passed to HTTP calls made via the Requests library, as dictionary of strings. - For more information about proxies, see the Requests documentation. - async_lag_callback: Custom callback which is called if the lag between a queued operation and its - synchronization with the server exceeds the duration defined by `async_lag_threshold`. The callback - should take a Project object as the argument and can contain any custom code, such as calling `stop()` - on the object. - Note: Instead of using this argument, you can use Neptune's default callback by setting the - `NEPTUNE_ENABLE_DEFAULT_ASYNC_LAG_CALLBACK` environment variable to `TRUE`. - async_lag_threshold: In seconds, duration between the queueing and synchronization of an operation. - If a lag callback (default callback enabled via environment variable or custom callback passed to the - `async_lag_callback` argument) is enabled, the callback is called when this duration is exceeded. - async_no_progress_callback: Custom callback which is called if there has been no synchronization progress - whatsoever for the duration defined by `async_no_progress_threshold`. The callback - should take a Project object as the argument and can contain any custom code, such as calling `stop()` - on the object. - Note: Instead of using this argument, you can use Neptune's default callback by setting the - `NEPTUNE_ENABLE_DEFAULT_ASYNC_NO_PROGRESS_CALLBACK` environment variable to `TRUE`. - async_no_progress_threshold: In seconds, for how long there has been no synchronization progress since the - object was initialized. If a no-progress callback (default callback enabled via environment variable or - custom callback passed to the `async_no_progress_callback` argument) is enabled, the callback is called - when this duration is exceeded. - - Returns: - Project object that can be used to interact with the project as a whole, - like logging or fetching project-level metadata. - - Examples: - - >>> import neptune - - >>> # Connect to the project "classification" in the workspace "ml-team": - ... project = neptune.init_project(project="ml-team/classification") - - >>> # Or initialize with the constructor - ... project = Project(project="ml-team/classification") - - >>> # Connect to a project in read-only mode: - ... project = neptune.init_project( - ... project="ml-team/classification", - ... mode="read-only", - ... ) - - Using the Project object as context manager: - - >>> with Project(project="ml-team/classification") as project: - ... project["metadata"] = some_metadata - - For more, see the docs: - Initializing a project: - https://docs.neptune.ai/api/neptune#init_project - Project class reference: - https://docs.neptune.ai/api/project/ - """ - - container_type = ContainerType.PROJECT - - def __init__( - self, - project: Optional[str] = None, - *, - api_token: Optional[str] = None, - mode: Optional[Literal["async", "sync", "read-only", "debug"]] = None, - flush_period: float = DEFAULT_FLUSH_PERIOD, - proxies: Optional[dict] = None, - async_lag_callback: Optional[NeptuneObjectCallback] = None, - async_lag_threshold: float = ASYNC_LAG_THRESHOLD, - async_no_progress_callback: Optional[NeptuneObjectCallback] = None, - async_no_progress_threshold: float = ASYNC_NO_PROGRESS_THRESHOLD, - ): - if mode in {Mode.ASYNC.value, Mode.SYNC.value}: - raise NeptuneUnsupportedFunctionalityException - - verify_type("mode", mode, (str, type(None))) - - # make mode proper Enum instead of string - mode = Mode(mode or os.getenv(CONNECTION_MODE) or Mode.ASYNC.value) - - if mode == Mode.OFFLINE: - raise NeptuneException("Project can't be initialized in OFFLINE mode") - - super().__init__( - project=project, - api_token=api_token, - mode=mode, - flush_period=flush_period, - proxies=proxies, - async_lag_callback=async_lag_callback, - async_lag_threshold=async_lag_threshold, - async_no_progress_callback=async_no_progress_callback, - async_no_progress_threshold=async_no_progress_threshold, - ) - - def _get_or_create_api_object(self) -> ApiExperiment: - return ApiExperiment( - id=self._project_api_object.id, - type=ContainerType.PROJECT, - sys_id=self._project_api_object.sys_id, - workspace=self._project_api_object.workspace, - project_name=self._project_api_object.name, - ) - - def _raise_if_stopped(self): - if self._state == ContainerState.STOPPED: - raise InactiveProjectException(label=f"{self._workspace}/{self._project_name}") - - def get_url(self) -> str: - """Returns the URL that can be accessed within the browser""" - return self._backend.get_project_url( - project_id=self._id, - workspace=self._workspace, - project_name=self._project_name, - ) - - def fetch_runs_table( - self, - *, - query: Optional[str] = None, - id: Optional[Union[str, Iterable[str]]] = None, - state: Optional[Union[Literal["inactive", "active"], Iterable[Literal["inactive", "active"]]]] = None, - owner: Optional[Union[str, Iterable[str]]] = None, - tag: Optional[Union[str, Iterable[str]]] = None, - columns: Optional[Iterable[str]] = None, - trashed: Optional[bool] = False, - limit: Optional[int] = None, - sort_by: str = "sys/creation_time", - ascending: bool = False, - progress_bar: Optional[ProgressBarType] = None, - ) -> Table: - """Retrieve runs matching the specified criteria. - - All parameters are optional. Each of them specifies a single criterion. - Only runs matching all of the criteria will be returned. - - Args: - query: NQL query string. Syntax: https://docs.neptune.ai/usage/nql/ - Example: `"(accuracy: float > 0.88) AND (loss: float < 0.2)"`. - Exclusive with the `id`, `state`, `owner`, and `tag` parameters. - id: Neptune ID of a run, or list of several IDs. - Example: `"SAN-1"` or `["SAN-1", "SAN-2"]`. - Matching any element of the list is sufficient to pass the criterion. - state: Run state, or list of states. - Example: `"active"`. - Possible values: `"inactive"`, `"active"`. - Matching any element of the list is sufficient to pass the criterion. - owner: Username of the run owner, or a list of owners. - Example: `"josh"` or `["frederic", "josh"]`. - The owner is the user who created the run. - Matching any element of the list is sufficient to pass the criterion. - tag: A tag or list of tags applied to the run. - Example: `"lightGBM"` or `["pytorch", "cycleLR"]`. - Only runs that have all specified tags will match this criterion. - columns: Names of columns to include in the table, as a list of field names. - The Neptune ID ("sys/id") is included automatically. - If `None` (default), all the columns of the runs table are included, up to a maximum of 10 000 columns. - trashed: Whether to retrieve trashed runs. - If `True`, only trashed runs are retrieved. - If `False` (default), only not-trashed runs are retrieved. - If `None`, both trashed and not-trashed runs are retrieved. - limit: How many entries to return at most. If `None`, all entries are returned. - sort_by: Name of the field to sort the results by. - The field must represent a simple type (string, float, datetime, integer, or Boolean). - ascending: Whether to sort the entries in ascending order of the sorting column values. - progress_bar: Set to `False` to disable the download progress bar, - or pass a `ProgressBarCallback` class to use your own progress bar callback. - - Returns: - `Table` object containing `Run` objects matching the specified criteria. - - Use `to_pandas()` to convert the table to a pandas DataFrame. - - Examples: - >>> import neptune - ... # Fetch project "jackie/sandbox" - ... project = neptune.init_project(mode="read-only", project="jackie/sandbox") - - >>> # Fetch the metadata of all runs as a pandas DataFrame - ... runs_table_df = project.fetch_runs_table().to_pandas() - ... # Extract the ID of the last run - ... last_run_id = runs_table_df["sys/id"].values[0] - - >>> # Fetch the 100 oldest runs - ... runs_table_df = project.fetch_runs_table( - ... sort_by="sys/creation_time", ascending=True, limit=100 - ... ).to_pandas() - - >>> # Fetch the 100 largest runs (space they take up in Neptune) - ... runs_table_df = project.fetch_runs_table(sort_by="sys/size", limit=100).to_pandas() - - >>> # Include only the fields "train/loss" and "params/lr" as columns: - ... runs_table_df = project.fetch_runs_table(columns=["params/lr", "train/loss"]).to_pandas() - - >>> # Pass a custom progress bar callback - ... runs_table_df = project.fetch_runs_table(progress_bar=MyProgressBar).to_pandas() - ... # The class MyProgressBar(ProgressBarCallback) must be defined - - You can also filter the runs table by state, owner, tag, or a combination of these: - - >>> # Fetch only inactive runs - ... runs_table_df = project.fetch_runs_table(state="inactive").to_pandas() - - >>> # Fetch only runs created by CI service - ... runs_table_df = project.fetch_runs_table(owner="my_company_ci_service").to_pandas() - - >>> # Fetch only runs that have both "Exploration" and "Optuna" tags - ... runs_table_df = project.fetch_runs_table(tag=["Exploration", "Optuna"]).to_pandas() - - >>> # You can combine conditions. Runs satisfying all conditions will be fetched - ... runs_table_df = project.fetch_runs_table(state="inactive", tag="Exploration").to_pandas() - - See also the API reference in the docs: - https://docs.neptune.ai/api/project#fetch_runs_table - """ - - if any((id, state, owner, tag)) and query is not None: - raise ValueError( - "You can't use the 'query' parameter together with the 'id', 'state', 'owner', or 'tag' parameters." - ) - - ids = as_list("id", id) - states = as_list("state", state) - owners = as_list("owner", owner) - tags = as_list("tag", tag) - - verify_type("query", query, (str, type(None))) - verify_type("trashed", trashed, (bool, type(None))) - verify_type("limit", limit, (int, type(None))) - verify_type("sort_by", sort_by, str) - verify_type("ascending", ascending, bool) - verify_type("progress_bar", progress_bar, (type(None), bool, type(ProgressBarCallback))) - verify_collection_type("state", states, str) - - if isinstance(limit, int) and limit <= 0: - raise ValueError(f"Parameter 'limit' must be a positive integer or None. Got {limit}.") - - for state in states: - verify_value("state", state.lower(), ("inactive", "active")) - - if query is not None: - nql_query = build_raw_query(query, trashed=trashed) - else: - nql_query = prepare_nql_query(ids, states, owners, tags, trashed) - - return NeptuneObject._fetch_entries( - self, - child_type=ContainerType.RUN, - query=nql_query, - columns=columns, - limit=limit, - sort_by=sort_by, - ascending=ascending, - progress_bar=progress_bar, - ) - - def fetch_models_table( - self, - *, - query: Optional[str] = None, - columns: Optional[Iterable[str]] = None, - trashed: Optional[bool] = False, - limit: Optional[int] = None, - sort_by: str = "sys/creation_time", - ascending: bool = False, - progress_bar: Optional[ProgressBarType] = None, - ) -> Table: - """Retrieve models stored in the project. - - Args: - query: NQL query string. Syntax: https://docs.neptune.ai/usage/nql/ - Example: `"(model_size: float > 100) AND (backbone: string = VGG)"`. - trashed: Whether to retrieve trashed models. - If `True`, only trashed models are retrieved. - If `False`, only not-trashed models are retrieved. - If `None`, both trashed and not-trashed models are retrieved. - columns: Names of columns to include in the table, as a list of field names. - The Neptune ID ("sys/id") is included automatically. - If `None`, all the columns of the models table are included, up to a maximum of 10 000 columns. - limit: How many entries to return at most. If `None`, all entries are returned. - sort_by: Name of the field to sort the results by. - The field must represent a simple type (string, float, datetime, integer, or Boolean). - ascending: Whether to sort the entries in ascending order of the sorting column values. - progress_bar: Set to `False` to disable the download progress bar, - or pass a `ProgressBarCallback` class to use your own progress bar callback. - - Returns: - `Table` object containing `Model` objects. - - Use `to_pandas()` to convert the table to a pandas DataFrame. - - Examples: - >>> import neptune - ... # Fetch project "jackie/sandbox" - ... project = neptune.init_project(mode="read-only", project="jackie/sandbox") - - >>> # Fetch the metadata of all models as a pandas DataFrame - ... models_table_df = project.fetch_models_table().to_pandas() - - >>> # Include only the fields "params/lr" and "info/size" as columns: - ... models_table_df = project.fetch_models_table(columns=["params/lr", "info/size"]).to_pandas() - - >>> # Fetch 10 oldest model objects - ... models_table_df = project.fetch_models_table( - ... sort_by="sys/creation_time", ascending=True, limit=10 - ... ).to_pandas() - ... # Extract the ID of the first listed (oldest) model object - ... last_model_id = models_table_df["sys/id"].values[0] - - >>> # Fetch models with VGG backbone - ... models_table_df = project.fetch_models_table( - query="(backbone: string = VGG)" - ).to_pandas() - - See also the API reference in the docs: - https://docs.neptune.ai/api/project#fetch_models_table - """ - verify_type("query", query, (str, type(None))) - verify_type("limit", limit, (int, type(None))) - verify_type("sort_by", sort_by, str) - verify_type("ascending", ascending, bool) - verify_type("progress_bar", progress_bar, (type(None), bool, type(ProgressBarCallback))) - - if isinstance(limit, int) and limit <= 0: - raise ValueError(f"Parameter 'limit' must be a positive integer or None. Got {limit}.") - - query = query if query is not None else "" - nql = build_raw_query(query=query, trashed=trashed) - return NeptuneObject._fetch_entries( - self, - child_type=ContainerType.MODEL, - query=nql, - columns=columns, - limit=limit, - sort_by=sort_by, - ascending=ascending, - progress_bar=progress_bar, - ) diff --git a/src/neptune/objects/run.py b/src/neptune/objects/run.py deleted file mode 100644 index eef2727b1..000000000 --- a/src/neptune/objects/run.py +++ /dev/null @@ -1,593 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Run"] - -import os -import threading -from platform import node as get_hostname -from typing import ( - TYPE_CHECKING, - Callable, - List, - Optional, - Tuple, - TypeVar, - Union, -) - -from typing_extensions import Literal - -from neptune.attributes.constants import ( - SYSTEM_DESCRIPTION_ATTRIBUTE_PATH, - SYSTEM_FAILED_ATTRIBUTE_PATH, - SYSTEM_HOSTNAME_ATTRIBUTE_PATH, - SYSTEM_NAME_ATTRIBUTE_PATH, - SYSTEM_TAGS_ATTRIBUTE_PATH, -) -from neptune.envs import ( - CONNECTION_MODE, - CUSTOM_RUN_ID_ENV_NAME, - MONITORING_NAMESPACE, - NEPTUNE_NOTEBOOK_ID, - NEPTUNE_NOTEBOOK_PATH, -) -from neptune.exceptions import ( - InactiveRunException, - NeedExistingRunForReadOnlyMode, - NeptuneRunResumeAndCustomIdCollision, -) -from neptune.internal.backends.api_model import ApiExperiment -from neptune.internal.backends.neptune_backend import NeptuneBackend -from neptune.internal.container_type import ContainerType -from neptune.internal.hardware.hardware_metric_reporting_job import HardwareMetricReportingJob -from neptune.internal.id_formats import QualifiedName -from neptune.internal.init.parameters import ( - ASYNC_LAG_THRESHOLD, - ASYNC_NO_PROGRESS_THRESHOLD, - DEFAULT_FLUSH_PERIOD, - DEFAULT_NAME, - OFFLINE_PROJECT_QUALIFIED_NAME, -) -from neptune.internal.notebooks.notebooks import create_checkpoint -from neptune.internal.state import ContainerState -from neptune.internal.streams.std_capture_background_job import ( - StderrCaptureBackgroundJob, - StdoutCaptureBackgroundJob, -) -from neptune.internal.utils import ( - verify_collection_type, - verify_type, -) -from neptune.internal.utils.dependency_tracking import ( - FileDependenciesStrategy, - InferDependenciesStrategy, -) -from neptune.internal.utils.git import ( - to_git_info, - track_uncommitted_changes, -) -from neptune.internal.utils.hashing import generate_hash -from neptune.internal.utils.limits import custom_run_id_exceeds_length -from neptune.internal.utils.ping_background_job import PingBackgroundJob -from neptune.internal.utils.runningmode import ( - in_interactive, - in_notebook, -) -from neptune.internal.utils.source_code import upload_source_code -from neptune.internal.utils.traceback_job import TracebackJob -from neptune.internal.warnings import ( - NeptuneWarning, - warn_once, -) -from neptune.internal.websockets.websocket_signals_background_job import WebsocketSignalsBackgroundJob -from neptune.objects.neptune_object import ( - NeptuneObject, - NeptuneObjectCallback, -) -from neptune.types import ( - GitRef, - StringSeries, -) -from neptune.types.atoms.git_ref import GitRefDisabled -from neptune.types.mode import Mode - -if TYPE_CHECKING: - from neptune.internal.background_job import BackgroundJob - - -T = TypeVar("T") - - -def temporarily_disabled(func: Callable[..., T]) -> Callable[..., T]: - def wrapper(*_, **__): - if func.__name__ == "_get_background_jobs": - return [] - elif func.__name__ == "_write_initial_attributes": - return None - elif func.__name__ == "_write_initial_monitoring_attributes": - return None - - return wrapper - - -class Run(NeptuneObject): - """Starts a new tracked run that logs ML model-building metadata to neptune.ai. - - You can log metadata by assigning it to the initialized Run object: - - ``` - run = neptune.init_run() - run["your/structure"] = some_metadata - ``` - - Examples of metadata you can log: metrics, losses, scores, artifact versions, images, predictions, - model weights, parameters, checkpoints, and interactive visualizations. - - By default, the run automatically tracks hardware consumption, stdout/stderr, source code, and Git information. - If you're using Neptune in an interactive session, however, some background monitoring needs to be enabled - explicitly. - - If you provide the ID of an existing run, that run is resumed and no new run is created. You may resume a run - either to log more metadata or to fetch metadata from it. - - The run ends either when its `stop()` method is called or when the script finishes execution. - - You can also use the Run object as a context manager (see examples). - - Args: - project: Name of the project where the run should go, in the form `workspace-name/project_name`. - If left empty, the value of the NEPTUNE_PROJECT environment variable is used. - api_token: User's API token. - If left empty, the value of the NEPTUNE_API_TOKEN environment variable is used (recommended). - with_id: If you want to resume a run, pass the identifier of an existing run. For example, "SAN-1". - If left empty, a new run is created. - custom_run_id: A unique identifier to be used when running Neptune in distributed training jobs. - Make sure to use the same identifier throughout the whole pipeline execution. - mode: Connection mode in which the tracking will work. - If left empty, the value of the NEPTUNE_MODE environment variable is used. - If no value was set for the environment variable, "async" is used by default. - Possible values are `async`, `sync`, `offline`, `read-only`, and `debug`. - name: Custom name for the run. You can add it as a column in the runs table ("sys/name"). - You can also edit the name in the app: Open the run menu and access the run information. - description: Custom description of the run. You can add it as a column in the runs table - ("sys/description"). - You can also edit the description in the app: Open the run menu and access the run information. - tags: Tags of the run as a list of strings. - You can edit the tags through the "sys/tags" field or in the app (run menu -> information). - You can also select multiple runs and manage their tags as a single action. - source_files: List of source files to be uploaded. - Uploaded source files are displayed in the "Source code" dashboard. - To not upload anything, pass an empty list (`[]`). - Unix style pathname pattern expansion is supported. For example, you can pass `*.py` to upload - all Python files from the current directory. - If None is passed, the Python file from which the run was created will be uploaded. - capture_stdout: Whether to log the stdout of the run. - Defaults to `False` in interactive sessions and `True` otherwise. - The data is logged under the monitoring namespace (see the `monitoring_namespace` parameter). - capture_stderr: Whether to log the stderr of the run. - Defaults to `False` in interactive sessions and `True` otherwise. - The data is logged under the monitoring namespace (see the `monitoring_namespace` parameter). - capture_hardware_metrics: Whether to send hardware monitoring logs (CPU, GPU, and memory utilization). - Defaults to `False` in interactive sessions and `True` otherwise. - The data is logged under the monitoring namespace (see the `monitoring_namespace` parameter). - fail_on_exception: Whether to register an uncaught exception handler to this process and, - in case of an exception, set the "sys/failed" field of the run to `True`. - An exception is always logged. - monitoring_namespace: Namespace inside which all hardware monitoring logs are stored. - Defaults to "monitoring/", where the hash is generated based on environment information, - to ensure that it's unique for each process. - flush_period: In the asynchronous (default) connection mode, how often disk flushing is triggered - (in seconds). - proxies: Argument passed to HTTP calls made via the Requests library, as dictionary of strings. - For more information about proxies, see the Requests documentation. - capture_traceback: Whether to log the traceback of the run in case of an exception. - The tracked metadata is stored in the "/traceback" namespace (see the - `monitoring_namespace` parameter). - git_ref: GitRef object containing information about the Git repository path. - If None, Neptune looks for a repository in the path of the script that is executed. - To specify a different location, set to GitRef(repository_path="path/to/repo"). - To turn off Git tracking for the run, set to False or GitRef.DISABLED. - dependencies: If you pass `"infer"`, Neptune logs dependencies installed in the current environment. - You can also pass a path to your dependency file directly. - If left empty, no dependencies are tracked. - async_lag_callback: Custom callback which is called if the lag between a queued operation and its - synchronization with the server exceeds the duration defined by `async_lag_threshold`. The callback - should take a Run object as the argument and can contain any custom code, such as calling `stop()` on - the object. - Note: Instead of using this argument, you can use Neptune's default callback by setting the - `NEPTUNE_ENABLE_DEFAULT_ASYNC_LAG_CALLBACK` environment variable to `TRUE`. - async_lag_threshold: In seconds, duration between the queueing and synchronization of an operation. - If a lag callback (default callback enabled via environment variable or custom callback passed to the - `async_lag_callback` argument) is enabled, the callback is called when this duration is exceeded. - async_no_progress_callback: Custom callback which is called if there has been no synchronization progress - whatsoever for the duration defined by `async_no_progress_threshold`. The callback - should take a Run object as the argument and can contain any custom code, such as calling `stop()` on - the object. - Note: Instead of using this argument, you can use Neptune's default callback by setting the - `NEPTUNE_ENABLE_DEFAULT_ASYNC_NO_PROGRESS_CALLBACK` environment variable to `TRUE`. - async_no_progress_threshold: In seconds, for how long there has been no synchronization progress since the - object was initialized. If a no-progress callback (default callback enabled via environment variable or - custom callback passed to the `async_no_progress_callback` argument) is enabled, the callback is called - when this duration is exceeded. - - Returns: - Run object that is used to manage the tracked run and log metadata to it. - - Examples: - - Creating a new run: - - >>> import neptune - - >>> # Minimal invoke - ... # (creates a run in the project specified by the NEPTUNE_PROJECT environment variable) - ... run = neptune.init_run() - - >>> # Or initialize with the constructor - ... run = Run(project="ml-team/classification") - - >>> # Create a run with a name and description, with no sources files or Git info tracked: - >>> run = neptune.init_run( - ... name="neural-net-mnist", - ... description="neural net trained on MNIST", - ... source_files=[], - ... git_ref=False, - ... ) - - >>> # Log all .py files from all subdirectories, excluding hidden files - ... run = neptune.init_run(source_files="**/*.py") - - >>> # Log all files and directories in the current working directory, excluding hidden files - ... run = neptune.init_run(source_files="*") - - >>> # Larger example - ... run = neptune.init_run( - ... project="ml-team/classification", - ... name="first-pytorch-ever", - ... description="Longer description of the run goes here", - ... tags=["tags", "go-here", "as-list-of-strings"], - ... source_files=["training_with_pytorch.py", "net.py"], - ... dependencies="infer", - ... capture_stderr=False, - ... git_ref=GitRef(repository_path="/Users/Jackie/repos/cls_project"), - ... ) - - Connecting to an existing run: - - >>> # Resume logging to an existing run with the ID "SAN-3" - ... run = neptune.init_run(with_id="SAN-3") - ... run["parameters/lr"] = 0.1 # modify or add metadata - - >>> # Initialize an existing run in read-only mode (logging new data is not possible, only fetching) - ... run = neptune.init_run(with_id="SAN-4", mode="read-only") - ... learning_rate = run["parameters/lr"].fetch() - - Using the Run object as context manager: - - >>> with Run() as run: - ... run["metric"].append(value) - - For more, see the docs: - Initializing a run: - https://docs.neptune.ai/api/neptune#init_run - Run class reference: - https://docs.neptune.ai/api/run/ - Essential logging methods: - https://docs.neptune.ai/logging/methods/ - Resuming a run: - https://docs.neptune.ai/logging/to_existing_object/ - Setting a custom run ID: - https://docs.neptune.ai/logging/custom_run_id/ - Logging to multiple runs at once: - https://docs.neptune.ai/logging/to_multiple_objects/ - Accessing the run from multiple places: - https://docs.neptune.ai/logging/from_multiple_places/ - """ - - container_type = ContainerType.RUN - - def __init__( - self, - with_id: Optional[str] = None, - *, - project: Optional[str] = None, - api_token: Optional[str] = None, - custom_run_id: Optional[str] = None, - mode: Optional[Literal["async", "sync", "offline", "read-only", "debug"]] = None, - name: Optional[str] = None, - description: Optional[str] = None, - tags: Optional[Union[List[str], str]] = None, - source_files: Optional[Union[List[str], str]] = None, - capture_stdout: Optional[bool] = None, - capture_stderr: Optional[bool] = None, - capture_hardware_metrics: Optional[bool] = None, - fail_on_exception: bool = True, - monitoring_namespace: Optional[str] = None, - flush_period: float = DEFAULT_FLUSH_PERIOD, - proxies: Optional[dict] = None, - capture_traceback: bool = True, - git_ref: Optional[Union[GitRef, GitRefDisabled, bool]] = None, - dependencies: Optional[Union[str, os.PathLike]] = None, - async_lag_callback: Optional[NeptuneObjectCallback] = None, - async_lag_threshold: float = ASYNC_LAG_THRESHOLD, - async_no_progress_callback: Optional[NeptuneObjectCallback] = None, - async_no_progress_threshold: float = ASYNC_NO_PROGRESS_THRESHOLD, - **kwargs, - ): - check_for_extra_kwargs("Run", kwargs) - - verify_type("with_id", with_id, (str, type(None))) - verify_type("project", project, (str, type(None))) - verify_type("custom_run_id", custom_run_id, (str, type(None))) - verify_type("mode", mode, (str, type(None))) - verify_type("name", name, (str, type(None))) - verify_type("description", description, (str, type(None))) - verify_type("capture_stdout", capture_stdout, (bool, type(None))) - verify_type("capture_stderr", capture_stderr, (bool, type(None))) - verify_type("capture_hardware_metrics", capture_hardware_metrics, (bool, type(None))) - verify_type("fail_on_exception", fail_on_exception, bool) - verify_type("monitoring_namespace", monitoring_namespace, (str, type(None))) - verify_type("capture_traceback", capture_traceback, bool) - verify_type("git_ref", git_ref, (GitRef, str, bool, type(None))) - verify_type("dependencies", dependencies, (str, os.PathLike, type(None))) - - if tags is not None: - if isinstance(tags, str): - tags = [tags] - else: - verify_collection_type("tags", tags, str) - if source_files is not None: - if isinstance(source_files, str): - source_files = [source_files] - else: - verify_collection_type("source_files", source_files, str) - - self._with_id: Optional[str] = with_id - self._name: Optional[str] = DEFAULT_NAME if with_id is None and name is None else name - self._description: Optional[str] = "" if with_id is None and description is None else description - self._custom_run_id: Optional[str] = custom_run_id or os.getenv(CUSTOM_RUN_ID_ENV_NAME) - self._hostname: str = get_hostname() - self._pid: int = os.getpid() - self._tid: int = threading.get_ident() - self._tags: Optional[List[str]] = tags - self._source_files: Optional[List[str]] = source_files - self._fail_on_exception: bool = fail_on_exception - self._capture_traceback: bool = capture_traceback - - if type(git_ref) is bool: - git_ref = GitRef() if git_ref else GitRef.DISABLED - - self._git_ref: Optional[GitRef, GitRefDisabled] = git_ref or GitRef() - self._dependencies: Optional[str, os.PathLike] = dependencies - - self._monitoring_namespace: str = ( - monitoring_namespace - or os.getenv(MONITORING_NAMESPACE) - or generate_monitoring_namespace(self._hostname, self._pid, self._tid) - ) - - # for backward compatibility imports - mode = Mode(mode or os.getenv(CONNECTION_MODE) or Mode.ASYNC.value) - - self._stdout_path: str = "{}/stdout".format(self._monitoring_namespace) - self._capture_stdout: bool = capture_stdout - if capture_stdout is None: - self._capture_stdout = capture_only_if_non_interactive(mode=mode) - - self._stderr_path: str = "{}/stderr".format(self._monitoring_namespace) - self._capture_stderr: bool = capture_stderr - if capture_stderr is None: - self._capture_stderr = capture_only_if_non_interactive(mode=mode) - - self._capture_hardware_metrics: bool = capture_hardware_metrics - if capture_hardware_metrics is None: - self._capture_hardware_metrics = capture_only_if_non_interactive(mode=mode) - - if with_id and custom_run_id: - raise NeptuneRunResumeAndCustomIdCollision() - - if mode == Mode.OFFLINE or mode == Mode.DEBUG: - project = OFFLINE_PROJECT_QUALIFIED_NAME - - super().__init__( - project=project, - api_token=api_token, - mode=mode, - flush_period=flush_period, - proxies=proxies, - async_lag_callback=async_lag_callback, - async_lag_threshold=async_lag_threshold, - async_no_progress_callback=async_no_progress_callback, - async_no_progress_threshold=async_no_progress_threshold, - ) - - def _get_or_create_api_object(self) -> ApiExperiment: - project_workspace = self._project_api_object.workspace - project_name = self._project_api_object.name - project_qualified_name = f"{project_workspace}/{project_name}" - - if self._with_id: - return self._backend.get_metadata_container( - container_id=QualifiedName(project_qualified_name + "/" + self._with_id), - expected_container_type=Run.container_type, - ) - else: - if self._mode == Mode.READ_ONLY: - raise NeedExistingRunForReadOnlyMode() - - git_info = to_git_info(git_ref=self._git_ref) - - custom_run_id = self._custom_run_id - if custom_run_id_exceeds_length(self._custom_run_id): - custom_run_id = None - - notebook_id, checkpoint_id = create_notebook_checkpoint(backend=self._backend) - - return self._backend.create_run( - project_id=self._project_api_object.id, - git_info=git_info, - custom_run_id=custom_run_id, - notebook_id=notebook_id, - checkpoint_id=checkpoint_id, - ) - - @temporarily_disabled - def _get_background_jobs(self) -> List["BackgroundJob"]: - background_jobs = [PingBackgroundJob()] - - websockets_factory = self._backend.websockets_factory(self._project_api_object.id, self._id) - if websockets_factory: - background_jobs.append(WebsocketSignalsBackgroundJob(websockets_factory)) - - if self._capture_stdout: - background_jobs.append(StdoutCaptureBackgroundJob(attribute_name=self._stdout_path)) - - if self._capture_stderr: - background_jobs.append(StderrCaptureBackgroundJob(attribute_name=self._stderr_path)) - - if self._capture_hardware_metrics: - background_jobs.append(HardwareMetricReportingJob(attribute_namespace=self._monitoring_namespace)) - - if self._capture_traceback: - background_jobs.append( - TracebackJob(path=f"{self._monitoring_namespace}/traceback", fail_on_exception=self._fail_on_exception) - ) - - return background_jobs - - @temporarily_disabled - def _write_initial_monitoring_attributes(self) -> None: - if self._hostname is not None: - self[f"{self._monitoring_namespace}/hostname"] = self._hostname - if self._with_id is None: - self[SYSTEM_HOSTNAME_ATTRIBUTE_PATH] = self._hostname - - if self._pid is not None: - self[f"{self._monitoring_namespace}/pid"] = str(self._pid) - - if self._tid is not None: - self[f"{self._monitoring_namespace}/tid"] = str(self._tid) - - @temporarily_disabled - def _write_initial_attributes(self): - if self._name is not None: - self[SYSTEM_NAME_ATTRIBUTE_PATH] = self._name - - if self._description is not None: - self[SYSTEM_DESCRIPTION_ATTRIBUTE_PATH] = self._description - - if any((self._capture_stderr, self._capture_stdout, self._capture_traceback, self._capture_hardware_metrics)): - self._write_initial_monitoring_attributes() - - if self._tags is not None: - self[SYSTEM_TAGS_ATTRIBUTE_PATH].add(self._tags) - - if self._with_id is None: - self[SYSTEM_FAILED_ATTRIBUTE_PATH] = False - - if self._capture_stdout and not self.exists(self._stdout_path): - self.define(self._stdout_path, StringSeries([])) - - if self._capture_stderr and not self.exists(self._stderr_path): - self.define(self._stderr_path, StringSeries([])) - - if self._with_id is None or self._source_files is not None: - # upload default sources ONLY if creating a new run - upload_source_code(source_files=self._source_files, run=self) - - if self._dependencies: - try: - if self._dependencies == "infer": - dependency_strategy = InferDependenciesStrategy() - - else: - dependency_strategy = FileDependenciesStrategy(path=self._dependencies) - - dependency_strategy.log_dependencies(run=self) - except Exception as e: - warn_once( - "An exception occurred in automatic dependency tracking." - "Skipping upload of requirement files." - "Exception: " + str(e), - exception=NeptuneWarning, - ) - - try: - track_uncommitted_changes( - git_ref=self._git_ref, - run=self, - ) - except Exception as e: - warn_once( - "An exception occurred in tracking uncommitted changes." - "Skipping upload of patch files." - "Exception: " + str(e), - exception=NeptuneWarning, - ) - - @property - def monitoring_namespace(self) -> str: - return self._monitoring_namespace - - def _raise_if_stopped(self): - if self._state == ContainerState.STOPPED: - raise InactiveRunException(label=self._sys_id) - - def get_url(self) -> str: - """Returns the URL that can be accessed within the browser""" - return self._backend.get_run_url( - run_id=self._id, - workspace=self._workspace, - project_name=self._project_name, - sys_id=self._sys_id, - ) - - -def capture_only_if_non_interactive(mode) -> bool: - if in_interactive() or in_notebook(): - if mode in {Mode.OFFLINE, Mode.SYNC, Mode.ASYNC}: - warn_once( - "The following monitoring options are disabled by default in interactive sessions:" - " 'capture_stdout', 'capture_stderr', 'capture_traceback', and 'capture_hardware_metrics'." - " To enable them, set each parameter to 'True' when initializing the run. The monitoring will" - " continue until you call run.stop() or the kernel stops." - " Also note: Your source files can only be tracked if you pass the path(s) to the 'source_code'" - " argument. For help, see the Neptune docs: https://docs.neptune.ai/logging/source_code/", - exception=NeptuneWarning, - ) - return False - return True - - -def generate_monitoring_namespace(*descriptors) -> str: - return f"monitoring/{generate_hash(*descriptors, length=8)}" - - -def check_for_extra_kwargs(caller_name: str, kwargs: dict): - if kwargs: - first_key = next(iter(kwargs.keys())) - raise TypeError(f"{caller_name}() got an unexpected keyword argument '{first_key}'") - - -def create_notebook_checkpoint(backend: NeptuneBackend) -> Tuple[Optional[str], Optional[str]]: - notebook_id = os.getenv(NEPTUNE_NOTEBOOK_ID, None) - notebook_path = os.getenv(NEPTUNE_NOTEBOOK_PATH, None) - - checkpoint_id = None - if notebook_id is not None and notebook_path is not None: - checkpoint_id = create_checkpoint(backend=backend, notebook_id=notebook_id, notebook_path=notebook_path) - - return notebook_id, checkpoint_id diff --git a/src/neptune/objects/structure_version.py b/src/neptune/objects/structure_version.py deleted file mode 100644 index c576a4d94..000000000 --- a/src/neptune/objects/structure_version.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["StructureVersion"] - -from enum import Enum - - -class StructureVersion(Enum): - # ------------------------------------------------- - # .neptune/ - # async/ - # / - # exec-/ - # container_type - # data-1.log - # ... - # ------------------------------------------------- - LEGACY = 1 - - # ------------------------------------------------- - # .neptune/ - # async/ - # run__/ - # exec---/ - # data-1.log - # ... - # ------------------------------------------------- - CHILD_EXECUTION_DIRECTORIES = 2 - - # ------------------------------------------------- - # .neptune/ - # async/ - # run______/ - # data-1.log - # ... - # ------------------------------------------------- - DIRECT_DIRECTORY = 3 diff --git a/src/neptune/objects/utils.py b/src/neptune/objects/utils.py deleted file mode 100644 index 73763e746..000000000 --- a/src/neptune/objects/utils.py +++ /dev/null @@ -1,142 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = [ - "prepare_nql_query", -] - -from typing import ( - Iterable, - List, - Optional, - Union, -) - -from neptune.internal.backends.nql import ( - NQLAggregator, - NQLAttributeOperator, - NQLAttributeType, - NQLQuery, - NQLQueryAggregate, - NQLQueryAttribute, - RawNQLQuery, -) -from neptune.internal.utils.run_state import RunState - - -def prepare_nql_query( - ids: Optional[Iterable[str]], - states: Optional[Iterable[str]], - owners: Optional[Iterable[str]], - tags: Optional[Iterable[str]], - trashed: Optional[bool], -) -> NQLQueryAggregate: - query_items: List[Union[NQLQueryAttribute, NQLQueryAggregate]] = [] - - if trashed is not None: - query_items.append( - NQLQueryAttribute( - name="sys/trashed", - type=NQLAttributeType.BOOLEAN, - operator=NQLAttributeOperator.EQUALS, - value=trashed, - ) - ) - - if ids: - query_items.append( - NQLQueryAggregate( - items=[ - NQLQueryAttribute( - name="sys/id", - type=NQLAttributeType.STRING, - operator=NQLAttributeOperator.EQUALS, - value=api_id, - ) - for api_id in ids - ], - aggregator=NQLAggregator.OR, - ) - ) - - if states: - query_items.append( - NQLQueryAggregate( - items=[ - NQLQueryAttribute( - name="sys/state", - type=NQLAttributeType.EXPERIMENT_STATE, - operator=NQLAttributeOperator.EQUALS, - value=RunState.from_string(state).to_api(), - ) - for state in states - ], - aggregator=NQLAggregator.OR, - ) - ) - - if owners: - query_items.append( - NQLQueryAggregate( - items=[ - NQLQueryAttribute( - name="sys/owner", - type=NQLAttributeType.STRING, - operator=NQLAttributeOperator.EQUALS, - value=owner, - ) - for owner in owners - ], - aggregator=NQLAggregator.OR, - ) - ) - - if tags: - query_items.append( - NQLQueryAggregate( - items=[ - NQLQueryAttribute( - name="sys/tags", - type=NQLAttributeType.STRING_SET, - operator=NQLAttributeOperator.CONTAINS, - value=tag, - ) - for tag in tags - ], - aggregator=NQLAggregator.AND, - ) - ) - - query = NQLQueryAggregate(items=query_items, aggregator=NQLAggregator.AND) - return query - - -def build_raw_query(query: str, trashed: Optional[bool]) -> NQLQuery: - raw_nql = RawNQLQuery(query) - - if trashed is None: - return raw_nql - - nql = NQLQueryAggregate( - items=[ - raw_nql, - NQLQueryAttribute( - name="sys/trashed", type=NQLAttributeType.BOOLEAN, operator=NQLAttributeOperator.EQUALS, value=trashed - ), - ], - aggregator=NQLAggregator.AND, - ) - return nql diff --git a/src/neptune/table.py b/src/neptune/table.py deleted file mode 100644 index ac4a75c2e..000000000 --- a/src/neptune/table.py +++ /dev/null @@ -1,172 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Table"] - -from typing import ( - TYPE_CHECKING, - Any, - Generator, - List, - Optional, -) - -from neptune.api.field_visitor import FieldToValueVisitor -from neptune.api.models import ( - Field, - FieldType, - LeaderboardEntry, -) -from neptune.exceptions import MetadataInconsistency -from neptune.integrations.pandas import to_pandas -from neptune.internal.backends.neptune_backend import NeptuneBackend -from neptune.internal.container_type import ContainerType -from neptune.internal.utils.logger import get_logger -from neptune.internal.utils.paths import ( - join_paths, - parse_path, -) -from neptune.typing import ProgressBarType - -if TYPE_CHECKING: - import pandas - - -logger = get_logger() - - -class TableEntry: - def __init__( - self, - backend: NeptuneBackend, - container_type: ContainerType, - _id: str, - attributes: List[Field], - ): - self._backend = backend - self._container_type = container_type - self._id = _id - self._fields = attributes - self._field_to_value_visitor = FieldToValueVisitor() - - def __getitem__(self, path: str) -> "LeaderboardHandler": - return LeaderboardHandler(table_entry=self, path=path) - - def get_attribute_type(self, path: str) -> FieldType: - for field in self._fields: - if field.path == path: - return field.type - - raise ValueError(f"Could not find {path} field") - - def get_attribute_value(self, path: str) -> Any: - for field in self._fields: - if field.path == path: - return self._field_to_value_visitor.visit(field) - raise ValueError("Could not find {} attribute".format(path)) - - def download_file_attribute( - self, - path: str, - destination: Optional[str], - progress_bar: Optional[ProgressBarType] = None, - ) -> None: - for attr in self._fields: - if attr.path == path: - _type = attr.type - if _type == FieldType.FILE: - self._backend.download_file( - container_id=self._id, - container_type=self._container_type, - path=parse_path(path), - destination=destination, - progress_bar=progress_bar, - ) - return - raise MetadataInconsistency("Cannot download file from attribute of type {}".format(_type)) - raise ValueError("Could not find {} attribute".format(path)) - - def download_file_set_attribute( - self, - path: str, - destination: Optional[str], - progress_bar: Optional[ProgressBarType] = None, - ) -> None: - for attr in self._fields: - if attr.path == path: - _type = attr.type - if _type == FieldType.FILE_SET: - self._backend.download_file_set( - container_id=self._id, - container_type=self._container_type, - path=parse_path(path), - destination=destination, - progress_bar=progress_bar, - ) - return - raise MetadataInconsistency("Cannot download ZIP archive from attribute of type {}".format(_type)) - raise ValueError("Could not find {} attribute".format(path)) - - -class LeaderboardHandler: - def __init__(self, table_entry: TableEntry, path: str) -> None: - self._table_entry = table_entry - self._path = path - - def __getitem__(self, path: str) -> "LeaderboardHandler": - return LeaderboardHandler(table_entry=self._table_entry, path=join_paths(self._path, path)) - - def get(self) -> Any: - return self._table_entry.get_attribute_value(path=self._path) - - def download(self, destination: Optional[str]) -> None: - attr_type = self._table_entry.get_attribute_type(self._path) - if attr_type == FieldType.FILE: - return self._table_entry.download_file_attribute(self._path, destination) - elif attr_type == FieldType.FILE_SET: - return self._table_entry.download_file_set_attribute(path=self._path, destination=destination) - raise MetadataInconsistency("Cannot download file from attribute of type {}".format(attr_type)) - - -class Table: - def __init__( - self, - backend: NeptuneBackend, - container_type: ContainerType, - entries: Generator[LeaderboardEntry, None, None], - ) -> None: - self._backend = backend - self._entries = entries - self._container_type = container_type - self._iterator = iter(entries if entries else ()) - - def to_rows(self) -> List[TableEntry]: - return list(self) - - def __iter__(self) -> "Table": - return self - - def __next__(self) -> TableEntry: - entry = next(self._iterator) - - return TableEntry( - backend=self._backend, - container_type=self._container_type, - _id=entry.object_id, - attributes=entry.fields, - ) - - def to_pandas(self) -> "pandas.DataFrame": - return to_pandas(self) diff --git a/src/neptune/types/__init__.py b/src/neptune/types/__init__.py deleted file mode 100644 index fffa362cb..000000000 --- a/src/neptune/types/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "Artifact", - "Boolean", - "Datetime", - "File", - "Float", - "GitRef", - "Integer", - "String", - "FileSet", - "StringSet", - "FileSeries", - "FloatSeries", - "StringSeries", -] - - -from .atoms import ( - Artifact, - Boolean, - Datetime, - File, - Float, - GitRef, - Integer, - String, -) -from .file_set import FileSet -from .series import ( - FileSeries, - FloatSeries, - StringSeries, -) -from .sets import StringSet diff --git a/src/neptune/types/atoms/__init__.py b/src/neptune/types/atoms/__init__.py deleted file mode 100644 index ef73e5c8c..000000000 --- a/src/neptune/types/atoms/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Artifact", "Boolean", "Datetime", "File", "Float", "GitRef", "Integer", "String"] - -from .artifact import Artifact -from .boolean import Boolean -from .datetime import Datetime -from .file import File -from .float import Float -from .git_ref import GitRef -from .integer import Integer -from .string import String diff --git a/src/neptune/types/atoms/artifact.py b/src/neptune/types/atoms/artifact.py deleted file mode 100644 index 893bc715d..000000000 --- a/src/neptune/types/atoms/artifact.py +++ /dev/null @@ -1,56 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Artifact"] - -from dataclasses import dataclass -from typing import ( - TYPE_CHECKING, - Optional, - TypeVar, - Union, -) - -from neptune.internal.artifacts.file_hasher import FileHasher -from neptune.internal.types.stringify_value import ( - StringifyValue, - extract_if_stringify_value, -) -from neptune.types.atoms.atom import Atom - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -@dataclass -class Artifact(Atom): - - hash: str - - def __init__(self, value: Union[Optional[str], StringifyValue] = None): - value = extract_if_stringify_value(value) - - self.hash = str(value) - assert ( - len(self.hash) == FileHasher.HASH_LENGTH or value is None - ), "Expected sha-256 string. E.g. 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'" - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_artifact(self) - - def __str__(self): - return "Artifact({})".format(self.hash) diff --git a/src/neptune/types/atoms/atom.py b/src/neptune/types/atoms/atom.py deleted file mode 100644 index 7c0e4771a..000000000 --- a/src/neptune/types/atoms/atom.py +++ /dev/null @@ -1,35 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Atom"] - -import abc -from typing import ( - TYPE_CHECKING, - TypeVar, -) - -from neptune.types.value import Value - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -class Atom(Value): - @abc.abstractmethod - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - pass diff --git a/src/neptune/types/atoms/boolean.py b/src/neptune/types/atoms/boolean.py deleted file mode 100644 index 9af0d6481..000000000 --- a/src/neptune/types/atoms/boolean.py +++ /dev/null @@ -1,48 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Boolean"] - -from dataclasses import dataclass -from typing import ( - TYPE_CHECKING, - TypeVar, -) - -from neptune.internal.types.stringify_value import extract_if_stringify_value -from neptune.types.atoms.atom import Atom - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -@dataclass -class Boolean(Atom): - - value: bool - - def __init__(self, value): - self.value = bool(extract_if_stringify_value(value)) - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_boolean(self) - - def __str__(self): - return "Boolean({})".format(str(self.value)) - - def __bool__(self): - return self.value diff --git a/src/neptune/types/atoms/datetime.py b/src/neptune/types/atoms/datetime.py deleted file mode 100644 index 5944cf42f..000000000 --- a/src/neptune/types/atoms/datetime.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Datetime"] - -from dataclasses import dataclass -from datetime import datetime -from typing import ( - TYPE_CHECKING, - TypeVar, - Union, -) - -from neptune.internal.types.stringify_value import ( - StringifyValue, - extract_if_stringify_value, -) -from neptune.types.atoms.atom import Atom - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -@dataclass -class Datetime(Atom): - value: datetime - - def __init__(self, value: Union[datetime, StringifyValue]): - value = extract_if_stringify_value(value) - self.value = value.replace(microsecond=1000 * int(value.microsecond / 1000)) - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_datetime(self) - - def __str__(self): - return "Datetime({})".format(str(self.value)) diff --git a/src/neptune/types/atoms/file.py b/src/neptune/types/atoms/file.py deleted file mode 100644 index 8f2299cb4..000000000 --- a/src/neptune/types/atoms/file.py +++ /dev/null @@ -1,325 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "File", -] - -from io import IOBase -from typing import ( - TYPE_CHECKING, - Optional, - TypeVar, - Union, -) - -from neptune.internal.types.file_types import ( - FileComposite, - InMemoryComposite, - LocalFileComposite, - StreamComposite, -) -from neptune.internal.utils import verify_type -from neptune.internal.utils.images import ( - get_html_content, - get_image_content, - get_pickle_content, - is_altair_chart, - is_bokeh_figure, - is_matplotlib_figure, - is_numpy_array, - is_pandas_dataframe, - is_pil_image, - is_plotly_figure, - is_seaborn_figure, -) -from neptune.types.atoms.atom import Atom - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -class File(Atom): - def __init__(self, path: Optional[str] = None, file_composite: Optional[FileComposite] = None): - """We have to support `path` parameter since almost all of `File` usages by our users look like `File(path)`.""" - verify_type("path", path, (str, type(None))) - verify_type("file_composite", file_composite, (FileComposite, type(None))) - - if path is not None and file_composite is not None: - raise ValueError("path and file_composite are mutually exclusive") - if path is None and file_composite is None: - raise ValueError("path or file_composite is required") - if path is not None: - self._file_composite = LocalFileComposite(path) - else: - self._file_composite = file_composite - - @property - def extension(self): - return self._file_composite.extension - - @property - def file_type(self): - return self._file_composite.file_type - - @property - def path(self): - return self._file_composite.path - - @property - def content(self): - return self._file_composite.content - - def _save(self, path): - self._file_composite.save(path) - - def __str__(self): - return str(self._file_composite) - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_file(self) - - @staticmethod - def from_path(path: str, *, extension: Optional[str] = None) -> "File": - """Creates a File value object from a given path. - - Equivalent to `File(path)`, but you can specify the extension separately. - - Args: - path: Path of the file to be stored in the File value object. - extension (optional): Extension of the file, if not included in the path argument. - - Returns: - `File` value object created based on the path. - - For more, see the documentation: https://docs.neptune.ai/api/field_types#from_path - """ - verify_type("path", path, str) - verify_type("extension", extension, (str, type(None))) - - file_composite = LocalFileComposite(path, extension) - return File(file_composite=file_composite) - - @staticmethod - def from_content(content: Union[str, bytes], *, extension: Optional[str] = None) -> "File": - """Factory method for creating File value objects directly from binary and text content. - - In the case of text content, UTF-8 encoding will be used. - - Args: - content (str or bytes): Text or binary content to stored in the `File` value object. - extension (str, optional, default is None): Extension of the created file. - File will be used for interpreting the type of content for visualization. - If `None` it will be bin for binary content and txt for text content. - Defaults to `None`. - - Returns: - ``File``: value object created from the content - - You may also want to check `from_content docs page`_. - - .. _from_content docs page: - https://docs.neptune.ai/api/field_types#from_content - """ - verify_type("content", content, (bytes, str, type(None))) - verify_type("extension", extension, (str, type(None))) - - file_composite = InMemoryComposite(content, extension) - return File(file_composite=file_composite) - - @staticmethod - def from_stream(stream: IOBase, *, seek: Optional[int] = 0, extension: Optional[str] = None) -> "File": - """Factory method for creating File value objects directly from binary and text streams. - - Note that you can only log content from the same stream once. - In the case of text streams, UTF-8 encoding will be used. - - Args: - stream (IOBase): Stream to be converted. - seek (optional): Change the stream position to the given byte offset. For details, - see the IOBase documentation. - extension (optional): Extension of the created file that will be used for interpreting the type - of content for visualization. - If None (default), it will be 'bin' for binary streams and 'txt' for text streams. - - Returns: - `File` value object created from the stream. - - See also: - - from_stream() documentation: https://docs.neptune.ai/api/field_types#from_stream - - IOBase documentation: https://docs.python.org/3/library/io.html#io.IOBase - """ - verify_type("stream", stream, (IOBase, type(None))) - verify_type("seek", seek, (int, type(None))) - verify_type("extension", extension, (str, type(None))) - - file_composite = StreamComposite(stream, seek, extension) - return File(file_composite=file_composite) - - @staticmethod - def as_image(image, autoscale: bool = True) -> "File": - """Static method for converting image objects or image-like objects to an image File value object. - - This way you can upload Matplotlib figures, Seaborn figures, PIL images, and NumPy arrays as static images. - - Args: - image: Image-like object to be converted. - The input image pixel must be either in range [0.0, 1.0] (float) or [0, 255] (integer). - Supported are PyTorch tensors, TensorFlow/Keras tensors, NumPy arrays, PIL images, - Matplotlib figures and Seaborn figures. - autoscale: Whether Neptune should try to detect the pixel range automatically - and scale it to an acceptable format. - - Returns: - `File`: value object with converted image - - Examples: - >>> import neptune - >>> from neptune.types import File - >>> run = neptune.init_run() - - Convert NumPy array to File value object and upload it - - >>> run["train/prediction_example"].upload(File.as_image(numpy_array)) - - Convert PIL image to File value object and upload it - - >>> pil_file = File.as_image(pil_image) - >>> run["dataset/data_sample/img1"].upload(pil_file) - - You can upload PIL images without explicit conversion - - >>> run["dataset/data_sample/img2"].upload(pil_image) - - See also the docs: - - How to log images: https://docs.neptune.ai/logging/images/ - - API referene: https://docs.neptune.ai/api/field_types#as_image - """ - content_bytes = get_image_content(image, autoscale=autoscale) - return File.from_content(content_bytes if content_bytes is not None else b"", extension="png") - - @staticmethod - def as_html(chart) -> "File": - """Converts an object to an HTML File value object. - - This way you can upload `Altair`, `Bokeh`, `Plotly`, `Matplotlib`, `Seaborn` interactive charts - or upload directly `Pandas` `DataFrame` objects to explore them in Neptune UI. - - Args: - chart: An object to be converted. - Supported are `Altair`, `Bokeh`, `Plotly`, `Matplotlib`, `Seaborn` interactive charts, - and `Pandas` `DataFrame` objects. - - Returns: - ``File``: value object with converted object. - - Examples: - >>> import neptune - >>> from neptune.types import File - >>> run = neptune.init_run() - - Convert Pandas DataFrame to File value object and upload it - - >>> run["train/results"].upload(File.as_html(df_predictions)) - - Convert Altair interactive chart to File value object and upload it - - >>> altair_file = File.as_html(altair_chart) - >>> run["dataset/data_sample/img1"].upload(altair_file) - - You can upload Altair interactive chart without explicit conversion - - >>> run["dataset/data_sample/img2"].upload(altair_chart) - - You may also want to check `as_html docs page`_. - - .. _as_html docs page: - https://docs.neptune.ai/api/field_types#as_html - """ - content = get_html_content(chart) - return File.from_content(content if content is not None else "", extension="html") - - @staticmethod - def as_pickle(obj) -> "File": - """Pickles a Python object and stores it in `File` value object. - - This way you can upload any Python object for future use. - - Args: - obj: An object to be converted. - Supported are `Altair`, `Bokeh`, `Plotly`, `Matplotlib` interactive charts, - and `Pandas` `DataFrame` objects. - - Returns: - ``File``: value object with pickled object. - - Examples: - >>> import neptune - >>> from neptune.types import File - >>> run = neptune.init_run() - - Pickle model object and upload it - - >>> run["results/pickled_model"].upload(File.as_pickle(trained_model)) - - You may also want to check `as_pickle docs page`_. - - .. _as_pickle docs page: - https://docs.neptune.ai/api/field_types#as_pickle - """ - content = get_pickle_content(obj) - return File.from_content(content if content is not None else b"", extension="pkl") - - @staticmethod - def create_from(value) -> "File": - if isinstance(value, str): - return File(path=value) - elif File.is_convertable_to_image(value): - return File.as_image(value) - elif File.is_convertable_to_html(value): - return File.as_html(value) - elif is_numpy_array(value): - raise TypeError("Value of type {} is not supported. Please use File.as_image().".format(type(value))) - elif is_pandas_dataframe(value): - raise TypeError("Value of type {} is not supported. Please use File.as_html().".format(type(value))) - elif isinstance(value, File): - return value - raise TypeError("Value of type {} is not supported.".format(type(value))) - - @staticmethod - def is_convertable(value): - return ( - is_pil_image(value) - or is_matplotlib_figure(value) - or is_plotly_figure(value) - or is_altair_chart(value) - or is_bokeh_figure(value) - or is_numpy_array(value) - or is_pandas_dataframe(value) - or is_seaborn_figure(value) - or isinstance(value, File) - ) - - @staticmethod - def is_convertable_to_image(value): - convertable_to_img_predicates = (is_pil_image, is_matplotlib_figure, is_seaborn_figure) - return any(predicate(value) for predicate in convertable_to_img_predicates) - - @staticmethod - def is_convertable_to_html(value): - convertable_to_html_predicates = (is_altair_chart, is_bokeh_figure, is_plotly_figure, is_seaborn_figure) - return any(predicate(value) for predicate in convertable_to_html_predicates) diff --git a/src/neptune/types/atoms/float.py b/src/neptune/types/atoms/float.py deleted file mode 100644 index 20adedd44..000000000 --- a/src/neptune/types/atoms/float.py +++ /dev/null @@ -1,48 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Float"] - -from dataclasses import dataclass -from typing import ( - TYPE_CHECKING, - TypeVar, -) - -from neptune.internal.types.stringify_value import extract_if_stringify_value -from neptune.types.atoms.atom import Atom - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -@dataclass -class Float(Atom): - - value: float - - def __init__(self, value): - self.value = float(extract_if_stringify_value(value)) - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_float(self) - - def __str__(self): - return "Float({})".format(str(self.value)) - - def __float__(self): - return self.value diff --git a/src/neptune/types/atoms/git_ref.py b/src/neptune/types/atoms/git_ref.py deleted file mode 100644 index 41df24a33..000000000 --- a/src/neptune/types/atoms/git_ref.py +++ /dev/null @@ -1,64 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["GitRef", "GitRefDisabled"] - -from dataclasses import dataclass -from pathlib import Path -from typing import ( - TYPE_CHECKING, - NewType, - Optional, - TypeVar, - Union, -) - -from neptune.types.atoms.atom import Atom -from neptune.vendor.lib_programname import get_path_executed_script - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") -GitRefDisabled = NewType("GitRefDisabled", str) - - -class WithDisabledMixin: - DISABLED: GitRefDisabled = GitRefDisabled("DO_NOT_TRACK_GIT_REPOSITORY") - """Constant that can be used to disable Git repository tracking.""" - - -@dataclass -class GitRef(Atom, WithDisabledMixin): - """ - Represents Git repository metadata. - - Args: - repository_path: Path to the repository. If not provided, - the path to the script that is currently executed is used. - """ - - repository_path: Optional[Union[str, Path]] = get_path_executed_script() - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_git_ref(self) - - def __str__(self) -> str: - return f"GitRef({self.repository_path})" - - def resolve_path(self) -> Optional[Path]: - if self.repository_path is None: - return None - return Path(self.repository_path).resolve() diff --git a/src/neptune/types/atoms/integer.py b/src/neptune/types/atoms/integer.py deleted file mode 100644 index aacebd351..000000000 --- a/src/neptune/types/atoms/integer.py +++ /dev/null @@ -1,48 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Integer"] - -from dataclasses import dataclass -from typing import ( - TYPE_CHECKING, - TypeVar, -) - -from neptune.internal.types.stringify_value import extract_if_stringify_value -from neptune.types.atoms.atom import Atom - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -@dataclass -class Integer(Atom): - - value: int - - def __init__(self, value): - self.value = int(extract_if_stringify_value(value)) - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_integer(self) - - def __str__(self): - return "Integer({})".format(str(self.value)) - - def __int__(self): - return self.value diff --git a/src/neptune/types/atoms/string.py b/src/neptune/types/atoms/string.py deleted file mode 100644 index 0ab6a29b4..000000000 --- a/src/neptune/types/atoms/string.py +++ /dev/null @@ -1,53 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["String"] - -from dataclasses import dataclass -from typing import ( - TYPE_CHECKING, - Optional, - TypeVar, - Union, -) - -from neptune.internal.types.stringify_value import StringifyValue -from neptune.internal.utils import ( - is_stringify_value, - verify_type, -) -from neptune.types.atoms.atom import Atom - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -@dataclass -class String(Atom): - - value: str - - def __init__(self, value: Optional[Union[str, StringifyValue]]): - verify_type("value", value, (str, type(None), StringifyValue)) - - self.value = str(value.value) if is_stringify_value(value) else value - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_string(self) - - def __str__(self) -> str: - return "String({})".format(str(self.value)) diff --git a/src/neptune/types/file_set.py b/src/neptune/types/file_set.py deleted file mode 100644 index c8ff1fa02..000000000 --- a/src/neptune/types/file_set.py +++ /dev/null @@ -1,53 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "FileSet", -] - -from typing import ( - TYPE_CHECKING, - Iterable, - List, - TypeVar, - Union, -) - -from neptune.internal.utils import ( - verify_collection_type, - verify_type, -) -from neptune.types.value import Value - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -class FileSet(Value): - def __init__(self, file_globs: Union[str, Iterable[str]]): - verify_type("file_globs", file_globs, (str, Iterable)) - if isinstance(file_globs, str): - file_globs = [file_globs] - else: - verify_collection_type("file_globs", file_globs, str) - self.file_globs: List[str] = list(file_globs) - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_file_set(self) - - def __str__(self): - return "FileSet({})".format(str(self.file_globs)) diff --git a/src/neptune/types/mode.py b/src/neptune/types/mode.py deleted file mode 100644 index 0b3ffd477..000000000 --- a/src/neptune/types/mode.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Mode"] - -from enum import Enum - - -class Mode(str, Enum): - OFFLINE = "offline" - DEBUG = "debug" - ASYNC = "async" - SYNC = "sync" - READ_ONLY = "read-only" - - def __repr__(self): - return f'"{self.value}"' diff --git a/src/neptune/types/model_version_stage.py b/src/neptune/types/model_version_stage.py deleted file mode 100644 index 65bc3e144..000000000 --- a/src/neptune/types/model_version_stage.py +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ModelVersionStage"] - -import enum - - -class ModelVersionStage(enum.Enum): - NONE = "none" - STAGING = "staging" - PRODUCTION = "production" - ARCHIVED = "archived" diff --git a/src/neptune/types/namespace.py b/src/neptune/types/namespace.py deleted file mode 100644 index e03ae8ae5..000000000 --- a/src/neptune/types/namespace.py +++ /dev/null @@ -1,56 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Namespace"] - -from dataclasses import dataclass -from typing import ( - TYPE_CHECKING, - TypeVar, -) - -from neptune.internal.utils.logger import get_logger -from neptune.internal.utils.paths import parse_path -from neptune.types.value import Value - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -logger = get_logger() -Ret = TypeVar("Ret") - - -@dataclass -class Namespace(Value): - - value: dict - - def __init__(self, value): - self.value = value - empty_keys = [k for k in self.value.keys() if not parse_path(k)] - if empty_keys: - all_keys = ", ".join(['"' + k + '"' for k in empty_keys]) - logger.warning( - f"Key(s) {all_keys} can't be used in Namespaces and dicts stored in Neptune. Please use non-empty " - f"keys instead. The value(s) will be dropped.", - ) - self.value = value.copy() - [self.value.pop(key) for key in empty_keys] - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_namespace(self) - - def __str__(self): - return "Namespace({})".format(str(self.value)) diff --git a/src/neptune/types/series/__init__.py b/src/neptune/types/series/__init__.py deleted file mode 100644 index 737893f5e..000000000 --- a/src/neptune/types/series/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -__all__ = [ - "FileSeries", - "FloatSeries", - "StringSeries", -] - -from .file_series import FileSeries -from .float_series import FloatSeries -from .string_series import StringSeries diff --git a/src/neptune/types/series/file_series.py b/src/neptune/types/series/file_series.py deleted file mode 100644 index 41bb4fc48..000000000 --- a/src/neptune/types/series/file_series.py +++ /dev/null @@ -1,84 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["FileSeries"] - -import time -from itertools import cycle -from typing import ( - TYPE_CHECKING, - List, - Optional, - Sequence, - TypeVar, -) - -from neptune.internal.types.stringify_value import extract_if_stringify_value -from neptune.internal.utils import is_collection -from neptune.internal.utils.logger import get_logger -from neptune.types import File -from neptune.types.series.series import Series - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -logger = get_logger() -Ret = TypeVar("Ret") - - -class FileSeries(Series): - def __init__( - self, values, timestamps: Optional[Sequence[float]] = None, steps: Optional[Sequence[float]] = None, **kwargs - ): - values = extract_if_stringify_value(values) - - if not is_collection(values): - raise TypeError("`values` is not a collection") - - self._values = [File.create_from(value) for value in values] - self.name = kwargs.pop("name", None) - self.description = kwargs.pop("description", None) - if kwargs: - logger.error("Warning: unexpected arguments (%s) in FileSeries", kwargs) - - if steps is None: - self._steps = cycle([None]) - else: - assert len(values) == len(steps) - self._steps = steps - - if timestamps is None: - self._timestamps = cycle([time.time()]) - else: - assert len(values) == len(timestamps) - self._timestamps = timestamps - - @property - def steps(self): - return self._steps - - @property - def timestamps(self): - return self._timestamps - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_image_series(self) - - @property - def values(self) -> List[File]: - return self._values - - def __str__(self): - return f"FileSeries({self.values})" diff --git a/src/neptune/types/series/float_series.py b/src/neptune/types/series/float_series.py deleted file mode 100644 index 483f8250c..000000000 --- a/src/neptune/types/series/float_series.py +++ /dev/null @@ -1,131 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["FloatSeries"] - -import time -from itertools import cycle -from typing import ( - TYPE_CHECKING, - Optional, - Sequence, - TypeVar, - Union, -) - -from neptune.internal.types.stringify_value import extract_if_stringify_value -from neptune.internal.types.utils import is_unsupported_float -from neptune.internal.utils import is_collection -from neptune.internal.warnings import ( - NeptuneUnsupportedValue, - warn_once, -) -from neptune.types.series.series import Series - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -class FloatSeries(Series): - def __init__( - self, - values, - min: Optional[Union[float, int]] = None, - max: Optional[Union[float, int]] = None, - unit: Optional[str] = None, - timestamps: Optional[Sequence[float]] = None, - steps: Optional[Sequence[float]] = None, - ): - values = extract_if_stringify_value(values) - - if not is_collection(values): - raise TypeError("`values` is not a collection") - - self._values = [float(value) for value in values] - self._min = min - self._max = max - self._unit = unit - - if steps is None: - filled_steps = cycle([None]) - else: - assert len(values) == len(steps) - filled_steps = steps - - if timestamps is None: - filled_timestamps = cycle([time.time()]) - else: - assert len(values) == len(timestamps) - filled_timestamps = timestamps - - clean_values, self._steps, self._timestamps = self.filter_unsupported_values( - values=values, - steps=filled_steps, - timestamps=filled_timestamps, - filter_by=self.is_unsupported_float_with_warn, - ) - self._values = [float(value) for value in clean_values] - - @property - def steps(self): - return self._steps - - @property - def timestamps(self): - return self._timestamps - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_float_series(self) - - @property - def values(self): - return self._values - - @property - def min(self): - return self._min - - @property - def max(self): - return self._max - - @property - def unit(self): - return self._unit - - def __str__(self): - return "FloatSeries({})".format(str(self.values)) - - def is_unsupported_float_with_warn(self, value): - if is_unsupported_float(value): - warn_once( - message=f"WARNING: A value you're trying to log (`{str(value)}`) will be skipped because " - f"it's a non-standard float value that is not currently supported.", - exception=NeptuneUnsupportedValue, - ) - return False - return True - - def filter_unsupported_values(self, values, steps, timestamps, filter_by): - filtered = [ - (value, step, timestamp) for value, step, timestamp in zip(values, steps, timestamps) if filter_by(value) - ] - return ( - [value for value, _, _ in filtered], - [step for _, step, _ in filtered], - [timestamp for _, _, timestamp in filtered], - ) diff --git a/src/neptune/types/series/series.py b/src/neptune/types/series/series.py deleted file mode 100644 index 675f82985..000000000 --- a/src/neptune/types/series/series.py +++ /dev/null @@ -1,53 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Series"] - -import abc -from typing import ( - TYPE_CHECKING, - TypeVar, -) - -from neptune.types.value import Value - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -class Series(Value): - @abc.abstractmethod - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - pass - - @property - @abc.abstractmethod - def values(self): - pass - - @property - @abc.abstractmethod - def steps(self): - pass - - @property - @abc.abstractmethod - def timestamps(self): - pass - - def __len__(self): - return len(self.values) diff --git a/src/neptune/types/series/series_value.py b/src/neptune/types/series/series_value.py deleted file mode 100644 index 430cf6eee..000000000 --- a/src/neptune/types/series/series_value.py +++ /dev/null @@ -1,54 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["SeriesValue"] - -from typing import ( - Generic, - TypeVar, -) - -T = TypeVar("T") - - -class SeriesValue(Generic[T]): - def __init__(self, step: float, value: T, timestamp: float): - self._step = step - self._value = value - self._timestamp = timestamp - - @property - def step(self) -> float: - return self._step - - @step.setter - def step(self, step: float): - self._step = step - - @property - def value(self) -> T: - return self._value - - @value.setter - def value(self, value: T): - self._value = value - - @property - def timestamp(self) -> float: - return self._timestamp - - @timestamp.setter - def timestamp(self, timestamp: float): - self._timestamp = timestamp diff --git a/src/neptune/types/series/string_series.py b/src/neptune/types/series/string_series.py deleted file mode 100644 index 99b8641f0..000000000 --- a/src/neptune/types/series/string_series.py +++ /dev/null @@ -1,93 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["StringSeries"] - -import time -from itertools import cycle -from typing import ( - TYPE_CHECKING, - Iterable, - Optional, - Sequence, - TypeVar, - Union, -) - -from neptune.internal.types.stringify_value import StringifyValue -from neptune.internal.utils import ( - is_collection, - is_stringify_value, -) -from neptune.types.series.series import Series - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - -MAX_STRING_SERIES_VALUE_LENGTH = 1000 - - -class StringSeries(Series): - def __init__( - self, - values: Union[Iterable[str], StringifyValue], - timestamps: Optional[Sequence[float]] = None, - steps: Optional[Sequence[float]] = None, - ): - if is_stringify_value(values): - values = list(map(str, values.value)) - - if not is_collection(values): - raise TypeError("`values` is not a collection") - - self._truncated = any([len(value) > MAX_STRING_SERIES_VALUE_LENGTH for value in values]) - self._values = [value[:MAX_STRING_SERIES_VALUE_LENGTH] for value in values] - - if steps is None: - self._steps = cycle([None]) - else: - assert len(values) == len(steps) - self._steps = steps - - if timestamps is None: - self._timestamps = cycle([time.time()]) - else: - assert len(values) == len(timestamps) - self._timestamps = timestamps - - @property - def steps(self): - return self._steps - - @property - def timestamps(self): - return self._timestamps - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_string_series(self) - - @property - def values(self): - return self._values - - @property - def truncated(self): - """True if any value had to be truncated to `MAX_STRING_SERIES_VALUE_LENGTH`""" - return self._truncated - - def __str__(self): - return "StringSeries({})".format(str(self.values)) diff --git a/src/neptune/types/sets/__init__.py b/src/neptune/types/sets/__init__.py deleted file mode 100644 index 499516a83..000000000 --- a/src/neptune/types/sets/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["StringSet"] - -from .string_set import StringSet diff --git a/src/neptune/types/sets/set.py b/src/neptune/types/sets/set.py deleted file mode 100644 index 15715f206..000000000 --- a/src/neptune/types/sets/set.py +++ /dev/null @@ -1,35 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Set"] - -import abc -from typing import ( - TYPE_CHECKING, - TypeVar, -) - -from neptune.types.value import Value - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -class Set(Value): - @abc.abstractmethod - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - pass diff --git a/src/neptune/types/sets/string_set.py b/src/neptune/types/sets/string_set.py deleted file mode 100644 index 6e945c2e5..000000000 --- a/src/neptune/types/sets/string_set.py +++ /dev/null @@ -1,40 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["StringSet"] - -from typing import ( - TYPE_CHECKING, - Iterable, - TypeVar, -) - -from neptune.types.sets.set import Set - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -class StringSet(Set): - def __init__(self, values: Iterable[str]): - self.values = set(values) - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - return visitor.visit_string_set(self) - - def __str__(self): - return "StringSet({})".format(str(self.values)) diff --git a/src/neptune/types/type_casting.py b/src/neptune/types/type_casting.py deleted file mode 100644 index bbae8d00a..000000000 --- a/src/neptune/types/type_casting.py +++ /dev/null @@ -1,118 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["cast_value", "cast_value_for_extend"] - -import argparse -from datetime import datetime -from typing import ( - Any, - Collection, - Optional, - Union, -) - -from neptune.internal.types.stringify_value import StringifyValue -from neptune.internal.utils import ( - is_bool, - is_dict_like, - is_float, - is_float_like, - is_int, - is_string, - is_stringify_value, -) -from neptune.types import ( - Boolean, - File, - Integer, -) -from neptune.types.atoms.datetime import Datetime -from neptune.types.atoms.float import Float -from neptune.types.atoms.string import String -from neptune.types.namespace import Namespace -from neptune.types.series import ( - FileSeries, - FloatSeries, - StringSeries, -) -from neptune.types.series.series import Series -from neptune.types.value import Value -from neptune.types.value_copy import ValueCopy - - -def cast_value(value: Any) -> Optional[Value]: - from neptune.handler import Handler - - from_stringify_value = False - if is_stringify_value(value): - from_stringify_value, value = True, value.value - - if isinstance(value, Value): - return value - elif isinstance(value, Handler): - return ValueCopy(value) - elif isinstance(value, argparse.Namespace): - return Namespace(vars(value)) - elif File.is_convertable_to_image(value): - return File.as_image(value) - elif File.is_convertable_to_html(value): - return File.as_html(value) - elif is_bool(value): - return Boolean(value) - elif is_int(value): - return Integer(value) - elif is_float(value): - return Float(value) - elif is_string(value): - return String(value) - elif isinstance(value, datetime): - return Datetime(value) - elif is_float_like(value): - return Float(value) - elif is_dict_like(value): - return Namespace(value) - elif from_stringify_value: - return String(str(value)) - - -def cast_value_for_extend( - values: Union[StringifyValue, Namespace, Series, Collection[Any]] -) -> Optional[Union[Series, Namespace]]: - from_stringify_value, original_values = False, None - if is_stringify_value(values): - from_stringify_value, original_values, values = True, values, values.value - - if isinstance(values, Namespace): - return values - elif is_dict_like(values): - return Namespace(values) - elif isinstance(values, Series): - return values - - sample_val = next(iter(values)) - - if isinstance(sample_val, File): - return FileSeries(values=values) - elif File.is_convertable_to_image(sample_val): - return FileSeries(values=values) - elif File.is_convertable_to_html(sample_val): - return FileSeries(values=values) - elif is_string(sample_val): - return StringSeries(values=values) - elif is_float_like(sample_val): - return FloatSeries(values=values) - elif from_stringify_value: - return StringSeries(values=original_values) diff --git a/src/neptune/types/value.py b/src/neptune/types/value.py deleted file mode 100644 index c98affb5d..000000000 --- a/src/neptune/types/value.py +++ /dev/null @@ -1,33 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["Value"] - -import abc -from typing import ( - TYPE_CHECKING, - TypeVar, -) - -if TYPE_CHECKING: - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -class Value: - @abc.abstractmethod - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - pass diff --git a/src/neptune/types/value_copy.py b/src/neptune/types/value_copy.py deleted file mode 100644 index debee667c..000000000 --- a/src/neptune/types/value_copy.py +++ /dev/null @@ -1,51 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ValueCopy"] - -from dataclasses import dataclass -from typing import ( - TYPE_CHECKING, - TypeVar, -) - -from neptune.internal.utils.paths import parse_path -from neptune.types.value import Value - -if TYPE_CHECKING: - from neptune.handler import Handler - from neptune.types.value_visitor import ValueVisitor - -Ret = TypeVar("Ret") - - -@dataclass -class ValueCopy(Value): - - source_handler: "Handler" - - def __init__(self, source_handler: "Handler"): - self.source_handler = source_handler - - def accept(self, visitor: "ValueVisitor[Ret]") -> Ret: - source_path = self.source_handler._path - source_attr = self.source_handler._container.get_attribute(source_path) - if source_attr and source_attr.supports_copy: - return visitor.copy_value(source_type=type(source_attr), source_path=parse_path(source_path)) - else: - raise Exception(f"{type(source_attr).__name__} doesn't support copying") - - def __str__(self): - return "Copy({})".format(str(self.source_handler)) diff --git a/src/neptune/types/value_visitor.py b/src/neptune/types/value_visitor.py deleted file mode 100644 index 5c59f41ed..000000000 --- a/src/neptune/types/value_visitor.py +++ /dev/null @@ -1,110 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["ValueVisitor"] - -import abc -from typing import ( - Generic, - List, - Type, - TypeVar, -) - -from neptune.attributes.attribute import Attribute -from neptune.types import ( - Artifact, - Boolean, - Datetime, - File, - FileSeries, - FileSet, - Float, - FloatSeries, - GitRef, - Integer, - String, - StringSeries, - StringSet, -) -from neptune.types.namespace import Namespace -from neptune.types.value import Value - -Ret = TypeVar("Ret") - - -class ValueVisitor(Generic[Ret]): - def visit(self, value: Value) -> Ret: - return value.accept(self) - - @abc.abstractmethod - def visit_float(self, value: Float) -> Ret: - pass - - @abc.abstractmethod - def visit_integer(self, value: Integer) -> Ret: - pass - - @abc.abstractmethod - def visit_boolean(self, value: Boolean) -> Ret: - pass - - @abc.abstractmethod - def visit_string(self, value: String) -> Ret: - pass - - @abc.abstractmethod - def visit_datetime(self, value: Datetime) -> Ret: - pass - - @abc.abstractmethod - def visit_artifact(self, value: Artifact) -> Ret: - pass - - @abc.abstractmethod - def visit_file(self, value: File) -> Ret: - pass - - @abc.abstractmethod - def visit_file_set(self, value: FileSet) -> Ret: - pass - - @abc.abstractmethod - def visit_float_series(self, value: FloatSeries) -> Ret: - pass - - @abc.abstractmethod - def visit_string_series(self, value: StringSeries) -> Ret: - pass - - @abc.abstractmethod - def visit_image_series(self, value: FileSeries) -> Ret: - pass - - @abc.abstractmethod - def visit_string_set(self, value: StringSet) -> Ret: - pass - - @abc.abstractmethod - def visit_git_ref(self, value: GitRef) -> Ret: - pass - - @abc.abstractmethod - def visit_namespace(self, value: Namespace) -> Ret: - pass - - @abc.abstractmethod - def copy_value(self, source_type: Type[Attribute], source_path: List[str]) -> Ret: - pass diff --git a/src/neptune/typing.py b/src/neptune/typing.py deleted file mode 100644 index fe3eff16b..000000000 --- a/src/neptune/typing.py +++ /dev/null @@ -1,87 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "SupportsNamespaces", - "ProgressBarCallback", - "ProgressBarType", -] - -import abc -import contextlib -from typing import ( - Any, - Optional, - Type, - Union, -) - -from typing_extensions import TypeAlias - -from neptune.objects.abstract import SupportsNamespaces - - -class ProgressBarCallback(contextlib.AbstractContextManager): - """Abstract base class for progress bar callbacks. - - You can use this class to implement your own progress bar callback that will be invoked in table fetching methods: - - - `fetch_runs_table()` - - `fetch_models_table()` - - `fetch_model_versions_table()` - - Example using `click`: - >>> from typing import Any, Optional, Type - >>> from types import TracebackType - >>> from neptune.typing import ProgressBarCallback - >>> class ClickProgressBar(ProgressBarCallback): - ... def __init__(self, *, description: Optional[str] = None, **_: Any) -> None: - ... super().__init__() - ... from click import progressbar - ... - ... self._progress_bar = progressbar(iterable=None, length=1, label=description) - ... - ... def update(self, *, by: int, total: Optional[int] = None) -> None: - ... if total: - ... self._progress_bar.length = total - ... self._progress_bar.update(by) - ... - ... def __enter__(self) -> "ClickProgressBar": - ... self._progress_bar.__enter__() - ... return self - ... - ... def __exit__( - ... self, - ... exc_type: Optional[Type[BaseException]], - ... exc_val: Optional[BaseException], - ... exc_tb: Optional[TracebackType], - ... ) -> None: - ... self._progress_bar.__exit__(exc_type, exc_val, exc_tb) - >>> from neptune import init_project - >>> with init_project() as project: - ... project.fetch_runs_table(progress_bar=ClickProgressBar) - ... project.fetch_models_table(progress_bar=ClickProgressBar) - - IMPORTANT: Pass a type, not an instance to the `progress_bar` argument. - That is, `ClickProgressBar`, not `ClickProgressBar()`. - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: ... - - @abc.abstractmethod - def update(self, *, by: int, total: Optional[int] = None) -> None: ... - - -ProgressBarType: TypeAlias = Union[bool, Type[ProgressBarCallback]] diff --git a/src/neptune/utils.py b/src/neptune/utils.py deleted file mode 100644 index 7bc92cf27..000000000 --- a/src/neptune/utils.py +++ /dev/null @@ -1,138 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""Utility functions to support ML metadata logging with neptune.ai.""" -__all__ = [ - "stringify_unsupported", - "stop_synchronization_callback", - "TqdmProgressBar", - "NullProgressBar", -] - -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Mapping, - MutableMapping, - Optional, - Type, - Union, -) - -from neptune.internal.init.parameters import DEFAULT_STOP_TIMEOUT -from neptune.internal.types.stringify_value import StringifyValue -from neptune.internal.utils.logger import get_logger -from neptune.typing import ProgressBarCallback - -if TYPE_CHECKING: - from neptune.objects.neptune_object import NeptuneObject - - -logger = get_logger() - - -def stringify_unsupported(value: Any) -> Union[StringifyValue, Mapping]: - """Helper function that converts unsupported values in a collection or dictionary to strings. - - Args: - value (Any): A dictionary with values or a collection - - Example: - >>> import neptune - >>> run = neptune.init_run() - >>> complex_dict = {"tuple": ("hi", 1), "metric": 0.87} - >>> run["complex_dict"] = complex_dict - >>> # (as of 1.0.0) error - tuple is not a supported type - ... from neptune.utils import stringify_unsupported - >>> run["complex_dict"] = stringify_unsupported(complex_dict) - """ - if isinstance(value, MutableMapping): - return {str(k): stringify_unsupported(v) for k, v in value.items()} - - return StringifyValue(value=value) - - -def stop_synchronization_callback(neptune_object: "NeptuneObject") -> None: - """Default callback function that stops a Neptune object's synchronization with the server. - - Args: - neptune_object: A Neptune object (Run, Model, ModelVersion, or Project) to be stopped. - - Example: - >>> import neptune - >>> from neptune.utils import stop_synchronization_callback - >>> run = neptune.init_run( - ... async_no_progress_callback = stop_synchronization_callback - ... ) - - For more information, see: - https://docs.neptune.ai/api/utils/stop_synchronization_callback/ - """ - logger.error( - "Threshold for disrupted synchronization exceeded. Stopping the synchronization using the default callback." - ) - neptune_object.stop(seconds=DEFAULT_STOP_TIMEOUT) - - -class NullProgressBar(ProgressBarCallback): - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - pass - - def update(self, *, by: int, total: Optional[int] = None) -> None: - pass - - -class TqdmProgressBar(ProgressBarCallback): - def __init__( - self, - *args: Any, - description: Optional[str] = None, - unit: Optional[str] = None, - unit_scale: bool = False, - **kwargs: Any, - ) -> None: - super().__init__(*args, **kwargs) - - from tqdm.auto import tqdm - - unit = unit if unit else "" - - self._progress_bar = tqdm(desc=description, unit=unit, unit_scale=unit_scale, **kwargs) - - def __enter__(self) -> "TqdmProgressBar": - self._progress_bar.__enter__() - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - self._progress_bar.__exit__(exc_type, exc_val, exc_tb) - - def update(self, *, by: int, total: Optional[int] = None) -> None: - if total: - self._progress_bar.total = total - self._progress_bar.update(by) diff --git a/src/neptune/vendor/__init__.py b/src/neptune/vendor/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/neptune/vendor/lib_programname.py b/src/neptune/vendor/lib_programname.py deleted file mode 100644 index 4371a71a2..000000000 --- a/src/neptune/vendor/lib_programname.py +++ /dev/null @@ -1,177 +0,0 @@ -# MIT License -# -# Copyright (c) 1990-2022 Robert Nowotny -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# flake8: noqa - -import pathlib -import sys - -import __main__ # noqa - -__all__ = ["empty_path", "get_path_executed_script"] - -empty_path = pathlib.Path() - - -def get_path_executed_script() -> pathlib.Path: - """ - getting the full path of the program from which a Python module is running - - >>> ### TEST get it via __main__.__file__ - >>> # Setup - >>> # force __main__.__file__ valid - >>> save_main_file = str(__main__.__file__) - >>> __main__.__file__ = __file__ - - >>> # Test via __main__.__file__ - >>> assert get_path_executed_script() == pathlib.Path(__file__).resolve() - - - >>> ### TEST get it via sys.argv - >>> # Setup - >>> # force __main__.__file__ invalid - >>> __main__.__file__ = str((pathlib.Path(__file__).parent / 'invalid_file.py')) # .resolve() seems not to work on a non existing file in python 3.5 - - >>> # force sys.argv valid - >>> save_sys_argv = list(sys.argv) - >>> valid_path = str((pathlib.Path(__file__).resolve())) - >>> sys.argv = [valid_path] - - >>> # Test via sys.argv - >>> assert get_path_executed_script() == pathlib.Path(__file__).resolve() - - - >>> ### TEST get it via stack - >>> # Setup - >>> # force sys.argv invalid - >>> invalid_path = str((pathlib.Path(__file__).parent / 'invalid_file.py')) # .resolve() seems not to work on a non existing file in python 3.5 - >>> sys.argv = [invalid_path] - - - >>> assert get_path_executed_script() - - >>> # teardown - >>> __main__.__file__ = save_main_file - >>> sys.argv = list(save_sys_argv) - - """ - - # try to get it from __main__.__file__ - does not work under pytest, doctest - path_candidate = get_fullpath_from_main_file() - if path_candidate != empty_path: - return path_candidate - - # try to get it from sys_argv - does not work when loaded from uwsgi, works in eclipse and pydev - path_candidate = get_fullpath_from_sys_argv() - if path_candidate != empty_path: - return path_candidate - - return empty_path - - -def get_fullpath_from_main_file() -> pathlib.Path: - """try to get it from __main__.__file__ - does not work under pytest, doctest - - >>> # test no attrib __main__.__file__ - >>> save_main_file = str(__main__.__file__) - >>> delattr(__main__, '__file__') - >>> assert get_fullpath_from_main_file() == empty_path - >>> setattr(__main__, '__file__', save_main_file) - - """ - if not hasattr(sys.modules["__main__"], "__file__"): - return empty_path - - arg_string = str(sys.modules["__main__"].__file__) - valid_executable_path = get_valid_executable_path_or_empty_path(arg_string) - return valid_executable_path - - -def get_fullpath_from_sys_argv() -> pathlib.Path: - """try to get it from sys_argv - does not work when loaded from uwsgi, works in eclipse and pydev - - >>> # force test invalid sys.path - >>> save_sys_argv = list(sys.argv) - >>> invalid_path = str((pathlib.Path(__file__).parent / 'invalid_file.py')) # .resolve() seems not to work on a non existing file in python 3.5 - >>> sys.argv = [invalid_path] - >>> assert get_fullpath_from_sys_argv() == pathlib.Path() - >>> sys.argv = list(save_sys_argv) - - >>> # force test valid sys.path - >>> save_sys_path = list(sys.argv) - >>> valid_path = str((pathlib.Path(__file__).resolve())) - >>> sys.argv = [valid_path] - >>> assert get_fullpath_from_sys_argv() == pathlib.Path(valid_path) - >>> sys.argv = list(save_sys_argv) - - - """ - - for arg_string in sys.argv: - valid_executable_path = get_valid_executable_path_or_empty_path(arg_string) - if valid_executable_path != empty_path: - return valid_executable_path - return empty_path - - -def get_valid_executable_path_or_empty_path(arg_string: str) -> pathlib.Path: - arg_string = remove_doctest_and_docrunner_parameters(arg_string) - arg_string = add_python_extension_if_not_there(arg_string) - path = pathlib.Path(arg_string) - try: - if path.is_file(): - path = path.resolve() # .resolve does not work on a non existing file in python 3.5 - return path - else: - return empty_path - except Exception as e: - return empty_path - - -def remove_doctest_and_docrunner_parameters(arg_string: str) -> str: - """ - >>> # Setup - >>> arg_string_with_parameter = __file__ + '::::::some docrunner parameter' - >>> arg_string_without_parameter = __file__ - - >>> # Test with and without docrunner parameters - >>> assert remove_doctest_and_docrunner_parameters(arg_string_with_parameter) == __file__ - >>> assert remove_doctest_and_docrunner_parameters(arg_string_without_parameter) == __file__ - """ - path = arg_string.split("::", 1)[0] - return path - - -def add_python_extension_if_not_there(arg_string: str) -> str: - """ - >>> # Setup - >>> arg_string_with_py = __file__ - >>> arg_string_without_py = __file__.rsplit('.py',1)[0] - - >>> # Test with and without .py suffix - >>> assert add_python_extension_if_not_there(arg_string_with_py) == __file__ - >>> assert add_python_extension_if_not_there(arg_string_without_py) == __file__ - - """ - - if not arg_string.endswith(".py"): - arg_string = arg_string + ".py" - return arg_string diff --git a/src/neptune/vendor/pynvml.py b/src/neptune/vendor/pynvml.py deleted file mode 100644 index 422eaabc0..000000000 --- a/src/neptune/vendor/pynvml.py +++ /dev/null @@ -1,2312 +0,0 @@ -# ============================================================================ # -# Copyright (c) 2011-2015, NVIDIA Corporation. All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the NVIDIA Corporation nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF -# THE POSSIBILITY OF SUCH DAMAGE. -# ============================================================================ # -# flake8: noqa - -""" -Python bindings for the NVML library. -""" - -import os -import string -import sys -import threading -from ctypes import * -from ctypes.util import find_library - -## ========================================================================== ## -## ## -## CType Mappings ## -## ## -## ========================================================================== ## - -## Enums -_nvmlEnableState_t = c_uint -NVML_FEATURE_DISABLED = 0 -NVML_FEATURE_ENABLED = 1 - -_nvmlBrandType_t = c_uint -NVML_BRAND_UNKNOWN = 0 -NVML_BRAND_QUADRO = 1 -NVML_BRAND_TESLA = 2 -NVML_BRAND_NVS = 3 -NVML_BRAND_GRID = 4 -NVML_BRAND_GEFORCE = 5 -NVML_BRAND_COUNT = 6 - -_nvmlTemperatureThresholds_t = c_uint -NVML_TEMPERATURE_THRESHOLD_SHUTDOWN = 0 -NVML_TEMPERATURE_THRESHOLD_SLOWDOWN = 1 -NVML_TEMPERATURE_THRESHOLD_COUNT = 1 - -_nvmlTemperatureSensors_t = c_uint -NVML_TEMPERATURE_GPU = 0 -NVML_TEMPERATURE_COUNT = 1 - -_nvmlComputeMode_t = c_uint -NVML_COMPUTEMODE_DEFAULT = 0 -NVML_COMPUTEMODE_EXCLUSIVE_THREAD = 1 -NVML_COMPUTEMODE_PROHIBITED = 2 -NVML_COMPUTEMODE_EXCLUSIVE_PROCESS = 3 -NVML_COMPUTEMODE_COUNT = 4 - -_nvmlMemoryLocation_t = c_uint -NVML_MEMORY_LOCATION_L1_CACHE = 0 -NVML_MEMORY_LOCATION_L2_CACHE = 1 -NVML_MEMORY_LOCATION_DEVICE_MEMORY = 2 -NVML_MEMORY_LOCATION_REGISTER_FILE = 3 -NVML_MEMORY_LOCATION_TEXTURE_MEMORY = 4 -NVML_MEMORY_LOCATION_COUNT = 5 - -# These are deprecated, instead use _nvmlMemoryErrorType_t -_nvmlEccBitType_t = c_uint -NVML_SINGLE_BIT_ECC = 0 -NVML_DOUBLE_BIT_ECC = 1 -NVML_ECC_ERROR_TYPE_COUNT = 2 - -_nvmlEccCounterType_t = c_uint -NVML_VOLATILE_ECC = 0 -NVML_AGGREGATE_ECC = 1 -NVML_ECC_COUNTER_TYPE_COUNT = 2 - -_nvmlMemoryErrorType_t = c_uint -NVML_MEMORY_ERROR_TYPE_CORRECTED = 0 -NVML_MEMORY_ERROR_TYPE_UNCORRECTED = 1 -NVML_MEMORY_ERROR_TYPE_COUNT = 2 - -_nvmlClockType_t = c_uint -NVML_CLOCK_GRAPHICS = 0 -NVML_CLOCK_SM = 1 -NVML_CLOCK_MEM = 2 -NVML_CLOCK_COUNT = 3 - -_nvmlDriverModel_t = c_uint -NVML_DRIVER_WDDM = 0 -NVML_DRIVER_WDM = 1 - -_nvmlPstates_t = c_uint -NVML_PSTATE_0 = 0 -NVML_PSTATE_1 = 1 -NVML_PSTATE_2 = 2 -NVML_PSTATE_3 = 3 -NVML_PSTATE_4 = 4 -NVML_PSTATE_5 = 5 -NVML_PSTATE_6 = 6 -NVML_PSTATE_7 = 7 -NVML_PSTATE_8 = 8 -NVML_PSTATE_9 = 9 -NVML_PSTATE_10 = 10 -NVML_PSTATE_11 = 11 -NVML_PSTATE_12 = 12 -NVML_PSTATE_13 = 13 -NVML_PSTATE_14 = 14 -NVML_PSTATE_15 = 15 -NVML_PSTATE_UNKNOWN = 32 - -_nvmlInforomObject_t = c_uint -NVML_INFOROM_OEM = 0 -NVML_INFOROM_ECC = 1 -NVML_INFOROM_POWER = 2 -NVML_INFOROM_COUNT = 3 - -_nvmlReturn_t = c_uint -NVML_SUCCESS = 0 -NVML_ERROR_UNINITIALIZED = 1 -NVML_ERROR_INVALID_ARGUMENT = 2 -NVML_ERROR_NOT_SUPPORTED = 3 -NVML_ERROR_NO_PERMISSION = 4 -NVML_ERROR_ALREADY_INITIALIZED = 5 -NVML_ERROR_NOT_FOUND = 6 -NVML_ERROR_INSUFFICIENT_SIZE = 7 -NVML_ERROR_INSUFFICIENT_POWER = 8 -NVML_ERROR_DRIVER_NOT_LOADED = 9 -NVML_ERROR_TIMEOUT = 10 -NVML_ERROR_IRQ_ISSUE = 11 -NVML_ERROR_LIBRARY_NOT_FOUND = 12 -NVML_ERROR_FUNCTION_NOT_FOUND = 13 -NVML_ERROR_CORRUPTED_INFOROM = 14 -NVML_ERROR_GPU_IS_LOST = 15 -NVML_ERROR_RESET_REQUIRED = 16 -NVML_ERROR_OPERATING_SYSTEM = 17 -NVML_ERROR_LIB_RM_VERSION_MISMATCH = 18 -NVML_ERROR_UNKNOWN = 999 - -_nvmlFanState_t = c_uint -NVML_FAN_NORMAL = 0 -NVML_FAN_FAILED = 1 - -_nvmlLedColor_t = c_uint -NVML_LED_COLOR_GREEN = 0 -NVML_LED_COLOR_AMBER = 1 - -_nvmlGpuOperationMode_t = c_uint -NVML_GOM_ALL_ON = 0 -NVML_GOM_COMPUTE = 1 -NVML_GOM_LOW_DP = 2 - -_nvmlPageRetirementCause_t = c_uint -NVML_PAGE_RETIREMENT_CAUSE_DOUBLE_BIT_ECC_ERROR = 0 -NVML_PAGE_RETIREMENT_CAUSE_MULTIPLE_SINGLE_BIT_ECC_ERRORS = 1 -NVML_PAGE_RETIREMENT_CAUSE_COUNT = 2 - -_nvmlRestrictedAPI_t = c_uint -NVML_RESTRICTED_API_SET_APPLICATION_CLOCKS = 0 -NVML_RESTRICTED_API_SET_AUTO_BOOSTED_CLOCKS = 1 -NVML_RESTRICTED_API_COUNT = 2 - -_nvmlBridgeChipType_t = c_uint -NVML_BRIDGE_CHIP_PLX = 0 -NVML_BRIDGE_CHIP_BRO4 = 1 -NVML_MAX_PHYSICAL_BRIDGE = 128 - -_nvmlValueType_t = c_uint -NVML_VALUE_TYPE_DOUBLE = 0 -NVML_VALUE_TYPE_UNSIGNED_INT = 1 -NVML_VALUE_TYPE_UNSIGNED_LONG = 2 -NVML_VALUE_TYPE_UNSIGNED_LONG_LONG = 3 -NVML_VALUE_TYPE_COUNT = 4 - -_nvmlPerfPolicyType_t = c_uint -NVML_PERF_POLICY_POWER = 0 -NVML_PERF_POLICY_THERMAL = 1 -NVML_PERF_POLICY_COUNT = 2 - -_nvmlSamplingType_t = c_uint -NVML_TOTAL_POWER_SAMPLES = 0 -NVML_GPU_UTILIZATION_SAMPLES = 1 -NVML_MEMORY_UTILIZATION_SAMPLES = 2 -NVML_ENC_UTILIZATION_SAMPLES = 3 -NVML_DEC_UTILIZATION_SAMPLES = 4 -NVML_PROCESSOR_CLK_SAMPLES = 5 -NVML_MEMORY_CLK_SAMPLES = 6 -NVML_SAMPLINGTYPE_COUNT = 7 - -_nvmlPcieUtilCounter_t = c_uint -NVML_PCIE_UTIL_TX_BYTES = 0 -NVML_PCIE_UTIL_RX_BYTES = 1 -NVML_PCIE_UTIL_COUNT = 2 - -_nvmlGpuTopologyLevel_t = c_uint -NVML_TOPOLOGY_INTERNAL = 0 -NVML_TOPOLOGY_SINGLE = 10 -NVML_TOPOLOGY_MULTIPLE = 20 -NVML_TOPOLOGY_HOSTBRIDGE = 30 -NVML_TOPOLOGY_CPU = 40 -NVML_TOPOLOGY_SYSTEM = 50 - -_nvmlNvLinkCapability_t = c_uint -NVML_NVLINK_CAP_P2P_SUPPORTED = 0 -NVML_NVLINK_CAP_SYSMEM_ACCESS = 1 -NVML_NVLINK_CAP_P2P_ATOMICS = 2 -NVML_NVLINK_CAP_SYSMEM_ATOMICS = 3 -NVML_NVLINK_CAP_SLI_BRIDGE = 4 -NVML_NVLINK_CAP_VALID = 5 -NVML_NVLINK_CAP_COUNT = 6 - -_nvmlNvLinkErrorCounter_t = c_uint -NVML_NVLINK_ERROR_DL_REPLAY = 0 -NVML_NVLINK_ERROR_DL_RECOVERY = 1 -NVML_NVLINK_ERROR_DL_CRC_FLIT = 2 -NVML_NVLINK_ERROR_DL_CRC_DATA = 3 -NVML_NVLINK_ERROR_COUNT = 4 - -_nvmlNvLinkUtilizationCountPktTypes_t = c_uint -NVML_NVLINK_COUNTER_PKTFILTER_NOP = 0x1 -NVML_NVLINK_COUNTER_PKTFILTER_READ = 0x2 -NVML_NVLINK_COUNTER_PKTFILTER_WRITE = 0x4 -NVML_NVLINK_COUNTER_PKTFILTER_RATOM = 0x8 -NVML_NVLINK_COUNTER_PKTFILTER_NRATOM = 0x10 -NVML_NVLINK_COUNTER_PKTFILTER_FLUSH = 0x20 -NVML_NVLINK_COUNTER_PKTFILTER_RESPDATA = 0x40 -NVML_NVLINK_COUNTER_PKTFILTER_RESPNODATA = 0x80 -NVML_NVLINK_COUNTER_PKTFILTER_ALL = 0xFF - -_nvmlNvLinkUtilizationCountUnits_t = c_uint -NVML_NVLINK_COUNTER_UNIT_CYCLES = 0 -NVML_NVLINK_COUNTER_UNIT_PACKETS = 1 -NVML_NVLINK_COUNTER_UNIT_BYTES = 2 -NVML_NVLINK_COUNTER_UNIT_COUNT = 3 - -_nvmlNvLinkUtilizationControl_t = c_int - -# C preprocessor defined values -nvmlFlagDefault = 0 -nvmlFlagForce = 1 - -# buffer size -NVML_DEVICE_INFOROM_VERSION_BUFFER_SIZE = 16 -NVML_DEVICE_UUID_BUFFER_SIZE = 80 -NVML_SYSTEM_DRIVER_VERSION_BUFFER_SIZE = 81 -NVML_SYSTEM_NVML_VERSION_BUFFER_SIZE = 80 -NVML_DEVICE_NAME_BUFFER_SIZE = 64 -NVML_DEVICE_SERIAL_BUFFER_SIZE = 30 -NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE = 32 -NVML_DEVICE_PCI_BUS_ID_BUFFER_SIZE = 32 -NVML_DEVICE_PCI_BUS_ID_BUFFER_V2_SIZE = 16 - -# NvLink -NVML_NVLINK_MAX_LINKS = 6 - -NVML_VALUE_NOT_AVAILABLE_ulonglong = c_ulonglong(-1) -NVML_VALUE_NOT_AVAILABLE_uint = c_uint(-1) - -## ========================================================================== ## -## ## -## Library Loading ## -## ## -## ========================================================================== ## - -nvml_lib = None -lib_load_lock = threading.Lock() -nvml_lib_refcount = 0 # Incremented on each nvmlInit and decremented on nvmlShutdown - - -## ========================================================================== ## -## ## -## Error-checking Functionality ## -## ## -## ========================================================================== ## - - -class NVMLError(Exception): - _val_mapping = dict() - # List of currently known error codes - _errcode_to_string = { - NVML_ERROR_UNINITIALIZED: "Uninitialized", - NVML_ERROR_INVALID_ARGUMENT: "Invalid Argument", - NVML_ERROR_NOT_SUPPORTED: "Not Supported", - NVML_ERROR_NO_PERMISSION: "Insufficient Permissions", - NVML_ERROR_ALREADY_INITIALIZED: "Already Initialized", - NVML_ERROR_NOT_FOUND: "Not Found", - NVML_ERROR_INSUFFICIENT_SIZE: "Insufficient Size", - NVML_ERROR_INSUFFICIENT_POWER: "Insufficient External Power", - NVML_ERROR_DRIVER_NOT_LOADED: "Driver Not Loaded", - NVML_ERROR_TIMEOUT: "Timeout", - NVML_ERROR_IRQ_ISSUE: "Interrupt Request Issue", - NVML_ERROR_LIBRARY_NOT_FOUND: "NVML Shared Library Not Found", - NVML_ERROR_FUNCTION_NOT_FOUND: "Function Not Found", - NVML_ERROR_CORRUPTED_INFOROM: "Corrupted infoROM", - NVML_ERROR_GPU_IS_LOST: "GPU is lost", - NVML_ERROR_RESET_REQUIRED: "GPU requires restart", - NVML_ERROR_OPERATING_SYSTEM: "The operating system has blocked the request.", - NVML_ERROR_LIB_RM_VERSION_MISMATCH: "RM has detected an NVML/RM version mismatch.", - NVML_ERROR_UNKNOWN: "Unknown Error", - } - - def __new__(typ, value): - """ - Maps value to a proper subclass of NVMLError. - See _extract_errors_as_classes function for more details - """ - if typ == NVMLError: - typ = NVMLError._val_mapping.get(value, typ) - obj = Exception.__new__(typ) - obj.value = value - return obj - - def __str__(self): - try: - if self.value not in NVMLError._errcode_to_string: - NVMLError._errcode_to_string[self.value] = str(nvml_error_string(self.value)) - return NVMLError._errcode_to_string[self.value] - except NVMLError_Uninitialized: - return "NVML Error with code %d" % self.value - - def __eq__(self, other): - return self.value == other.value - - -def _extract_errors_as_classes(): - """ - Generates a hierarchy of classes on top of NVMLError class. - - Each NVML Error gets a new NVMLError subclass. This way try,except blocks can filter appropriate - exceptions more easily. - - NVMLError is a parent class. Each NVML_ERROR_* gets it's own subclass. - e.g. NVML_ERROR_ALREADY_INITIALIZED will be turned into NVMLError_AlreadyInitialized - """ - this_module = sys.modules[__name__] - nvmlErrorsNames = filter(lambda x: x.startswith("NVML_ERROR_"), dir(this_module)) - for err_name in nvmlErrorsNames: - # e.g. Turn NVML_ERROR_ALREADY_INITIALIZED into NVMLError_AlreadyInitialized - class_name = "NVMLError_" + string.capwords(err_name.replace("NVML_ERROR_", ""), "_").replace("_", "") - err_val = getattr(this_module, err_name) - - def gen_new(val): - def new(typ): - obj = NVMLError.__new__(typ, val) - return obj - - return new - - new_error_class = type(class_name, (NVMLError,), {"__new__": gen_new(err_val)}) - new_error_class.__module__ = __name__ - setattr(this_module, class_name, new_error_class) - NVMLError._val_mapping[err_val] = new_error_class - - -_extract_errors_as_classes() - - -def check_return(ret): - if ret != NVML_SUCCESS: - raise NVMLError(ret) - return ret - - -## ========================================================================== ## -## ## -## Library Function Access ## -## ## -## ========================================================================== ## - -_func_pointer_cache = dict() # function pointers are cached to prevent unnecessary lib_load_lock locking - - -def get_func_pointer(name): - global nvml_lib - - if name in _func_pointer_cache: - return _func_pointer_cache[name] - - lib_load_lock.acquire() - try: - # ensure library was loaded - if nvml_lib == None: - raise NVMLError(NVML_ERROR_UNINITIALIZED) - try: - _func_pointer_cache[name] = getattr(nvml_lib, name) - return _func_pointer_cache[name] - except AttributeError: - raise NVMLError(NVML_ERROR_FUNCTION_NOT_FOUND) - finally: - # lock is always freed - lib_load_lock.release() - - -## Alternative object -# Allows the object to be printed -# Allows mismatched types to be assigned -# - like None when the Structure variant requires c_uint -class FriendlyObject(object): - def __init__(self, dictionary): - for x in dictionary: - setattr(self, x, dictionary[x]) - - def __str__(self): - return self.__dict__.__str__() - - -def struct_to_friendly_object(struct): - d = {} - for x in struct._fields_: - key = x[0] - value = getattr(struct, key) - d[key] = value - obj = FriendlyObject(d) - return obj - - -# pack the object so it can be passed to the NVML library -def friendly_object_to_struct(obj, model): - for x in model._fields_: - key = x[0] - value = obj.__dict__[key] - setattr(model, key, value) - return model - - -## Unit structures -class struct_c_nvmlUnit_t(Structure): - pass # opaque handle - - -c_nvmlUnit_t = POINTER(struct_c_nvmlUnit_t) - - -class PrintableStructure(Structure): - """ - Abstract class that produces nicer __str__ output than ctypes.Structure. - e.g. instead of: - >>> print str(obj) - - this class will print - class_name(field_name: formatted_value, field_name: formatted_value) - - _fmt_ dictionary of -> - e.g. class that has _field_ 'hex_value', c_uint could be formatted with - _fmt_ = {"hex_value" : "%08X"} - to produce nicer output. - Default fomratting string for all fields can be set with key "" like: - _fmt_ = {"" : "%d MHz"} # e.g all values are numbers in MHz. - If not set it's assumed to be just "%s" - - Exact format of returned str from this class is subject to change in the future. - """ - - _fmt_ = {} - - def __str__(self): - result = [] - for x in self._fields_: - key = x[0] - value = getattr(self, key) - fmt = "%s" - if key in self._fmt_: - fmt = self._fmt_[key] - elif "" in self._fmt_: - fmt = self._fmt_[""] - result.append(("%s: " + fmt) % (key, value)) - return self.__class__.__name__ + "(" + string.join(result, ", ") + ")" - - -## ========================================================================== ## -## ## -## C-Type Class Definitions ## -## ## -## ========================================================================== ## - - -class c_nvmlUnitInfo_t(PrintableStructure): - _fields_ = [ - ("name", c_char * 96), - ("id", c_char * 96), - ("serial", c_char * 96), - ("firmwareVersion", c_char * 96), - ] - - -class c_nvmlLedState_t(PrintableStructure): - _fields_ = [ - ("cause", c_char * 256), - ("color", _nvmlLedColor_t), - ] - - -class c_nvmlPSUInfo_t(PrintableStructure): - _fields_ = [ - ("state", c_char * 256), - ("current", c_uint), - ("voltage", c_uint), - ("power", c_uint), - ] - - -class c_nvmlUnitFanInfo_t(PrintableStructure): - _fields_ = [ - ("speed", c_uint), - ("state", _nvmlFanState_t), - ] - - -class c_nvmlUnitFanSpeeds_t(PrintableStructure): - _fields_ = [("fans", c_nvmlUnitFanInfo_t * 24), ("count", c_uint)] - - -## Device structures -class struct_c_nvmlDevice_t(Structure): - pass # opaque handle - - -c_nvmlDevice_t = POINTER(struct_c_nvmlDevice_t) - - -class nvmlPciInfo_t(PrintableStructure): - _fields_ = [ - ("busId", c_char * 16), - ("domain", c_uint), - ("bus", c_uint), - ("device", c_uint), - ("pciDeviceId", c_uint), - # Added in 2.285 - ("pciSubSystemId", c_uint), - ("reserved0", c_uint), - ("reserved1", c_uint), - ("reserved2", c_uint), - ("reserved3", c_uint), - ] - _fmt_ = { - "domain": "0x%04X", - "bus": "0x%02X", - "device": "0x%02X", - "pciDeviceId": "0x%08X", - "pciSubSystemId": "0x%08X", - } - - -class c_nvmlMemory_t(PrintableStructure): - _fields_ = [ - ("total", c_ulonglong), - ("free", c_ulonglong), - ("used", c_ulonglong), - ] - _fmt_ = {"": "%d B"} - - -class c_nvmlBAR1Memory_t(PrintableStructure): - _fields_ = [ - ("bar1Total", c_ulonglong), - ("bar1Free", c_ulonglong), - ("bar1Used", c_ulonglong), - ] - _fmt_ = {"": "%d B"} - - -# On Windows with the WDDM driver, usedGpuMemory is reported as None -# Code that processes this structure should check for None, I.E. -# -# if (info.usedGpuMemory == None): -# # TODO handle the error -# pass -# else: -# print("Using %d MiB of memory" % (info.usedGpuMemory / 1024 / 1024)) -# -# See NVML documentation for more information -class c_nvmlProcessInfo_t(PrintableStructure): - _fields_ = [ - ("pid", c_uint), - ("usedGpuMemory", c_ulonglong), - ] - _fmt_ = {"usedGpuMemory": "%d B"} - - -class c_nvmlBridgeChipInfo_t(PrintableStructure): - _fields_ = [ - ("type", _nvmlBridgeChipType_t), - ("fwVersion", c_uint), - ] - - -class c_nvmlBridgeChipHierarchy_t(PrintableStructure): - _fields_ = [ - ("bridgeCount", c_uint), - ("bridgeChipInfo", c_nvmlBridgeChipInfo_t * 128), - ] - - -class c_nvmlEccErrorCounts_t(PrintableStructure): - _fields_ = [ - ("l1Cache", c_ulonglong), - ("l2Cache", c_ulonglong), - ("deviceMemory", c_ulonglong), - ("registerFile", c_ulonglong), - ] - - -class c_nvmlUtilization_t(PrintableStructure): - _fields_ = [ - ("gpu", c_uint), - ("memory", c_uint), - ] - _fmt_ = {"": "%d %%"} - - -# Added in 2.285 -class c_nvmlHwbcEntry_t(PrintableStructure): - _fields_ = [ - ("hwbcId", c_uint), - ("firmwareVersion", c_char * 32), - ] - - -class c_nvmlValue_t(Union): - _fields_ = [ - ("dVal", c_double), - ("uiVal", c_uint), - ("ulVal", c_ulong), - ("ullVal", c_ulonglong), - ] - - -class c_nvmlSample_t(PrintableStructure): - _fields_ = [ - ("timeStamp", c_ulonglong), - ("sampleValue", c_nvmlValue_t), - ] - - -class c_nvmlViolationTime_t(PrintableStructure): - _fields_ = [ - ("referenceTime", c_ulonglong), - ("violationTime", c_ulonglong), - ] - - -## Event structures -class struct_c_nvmlEventSet_t(Structure): - pass # opaque handle - - -c_nvmlEventSet_t = POINTER(struct_c_nvmlEventSet_t) - -nvmlEventTypeSingleBitEccError = 0x0000000000000001 -nvmlEventTypeDoubleBitEccError = 0x0000000000000002 -nvmlEventTypePState = 0x0000000000000004 -nvmlEventTypeXidCriticalError = 0x0000000000000008 -nvmlEventTypeClock = 0x0000000000000010 -nvmlEventTypeNone = 0x0000000000000000 -nvmlEventTypeAll = ( - nvmlEventTypeNone - | nvmlEventTypeSingleBitEccError - | nvmlEventTypeDoubleBitEccError - | nvmlEventTypePState - | nvmlEventTypeClock - | nvmlEventTypeXidCriticalError -) - -## Clock Throttle Reasons defines -nvmlClocksThrottleReasonGpuIdle = 0x0000000000000001 -nvmlClocksThrottleReasonApplicationsClocksSetting = 0x0000000000000002 -nvmlClocksThrottleReasonUserDefinedClocks = nvmlClocksThrottleReasonApplicationsClocksSetting # deprecated, use nvmlClocksThrottleReasonApplicationsClocksSetting -nvmlClocksThrottleReasonSwPowerCap = 0x0000000000000004 -nvmlClocksThrottleReasonHwSlowdown = 0x0000000000000008 -nvmlClocksThrottleReasonUnknown = 0x8000000000000000 -nvmlClocksThrottleReasonNone = 0x0000000000000000 -nvmlClocksThrottleReasonAll = ( - nvmlClocksThrottleReasonNone - | nvmlClocksThrottleReasonGpuIdle - | nvmlClocksThrottleReasonApplicationsClocksSetting - | nvmlClocksThrottleReasonSwPowerCap - | nvmlClocksThrottleReasonHwSlowdown - | nvmlClocksThrottleReasonUnknown -) - - -class c_nvmlEventData_t(PrintableStructure): - _fields_ = [ - ("device", c_nvmlDevice_t), - ("eventType", c_ulonglong), - ("eventData", c_ulonglong), - ] - _fmt_ = {"eventType": "0x%08X"} - - -class c_nvmlAccountingStats_t(PrintableStructure): - _fields_ = [ - ("gpuUtilization", c_uint), - ("memoryUtilization", c_uint), - ("maxMemoryUsage", c_ulonglong), - ("time", c_ulonglong), - ("startTime", c_ulonglong), - ("isRunning", c_uint), - ("reserved", c_uint * 5), - ] - - -class c_nvmlPciInfo_t(PrintableStructure): - _fields_ = [ - ("bus", c_uint), - ("busId", c_char * 16), - ("busIdLegacy", c_char * 16), - ("device", c_uint), - ("domain", c_uint), - ("pciDeviceId", c_uint), - ("pciSubSystemId", c_uint), - ] - _fmt_ = { - "domain": "0x%04X", - "bus": "0x%02X", - "device": "0x%02X", - "pciDeviceId": "0x%08X", - "pciSubSystemId": "0x%08X", - } - - -## ========================================================================== ## -## ## -## NVML Library Function Wrappers ## -## ## -## ========================================================================== ## - - -def nvmlInit(): - """Initialize NVML. - - Uses nvmlInit_v2() from the underlying NVML library. - - Args: - None - - Returns: - None - """ - - def _load_nvml_library(): - """ - Load the library if it isn't loaded already - """ - global nvml_lib - - if nvml_lib == None: - # lock to ensure only one caller loads the library - lib_load_lock.acquire() - - try: - # ensure the library still isn't loaded - if nvml_lib == None: - if sys.platform[:3] == "win": - # cdecl calling convention - # load nvml.dll from %ProgramFiles%/NVIDIA Corporation/NVSMI/nvml.dll or try other paths - search_paths = [ - os.path.join( - os.getenv("ProgramFiles", "C:/Program Files"), - "NVIDIA Corporation/NVSMI/nvml.dll", - ), - os.path.join("C:/Windows", "System32", "nvml.dll"), - ] - nvml_path = os.getenv("NVML_DLL_PATH") - if nvml_path: - search_paths.append(nvml_path) - for path in search_paths: - try: - nvml_lib = CDLL(path) - except OSError: - continue - break - else: - # assume linux - try: - nvml_lib = CDLL("libnvidia-ml.so.1") - except OSError: - check_return(NVML_ERROR_LIBRARY_NOT_FOUND) - if nvml_lib == None: - check_return(NVML_ERROR_LIBRARY_NOT_FOUND) - finally: - # lock is always freed - lib_load_lock.release() - - _load_nvml_library() - - # - # Initialize the library - # - fn = get_func_pointer("nvmlInit_v2") - ret = fn() - check_return(ret) - - # Atomically update refcount - global nvml_lib_refcount - lib_load_lock.acquire() - nvml_lib_refcount += 1 - lib_load_lock.release() - return None - - -def nvmlShutdown(): - """Shutdown NVML. - - Uses nvmlShutdown() from the underlying NVML library. - - Args: - None - - Returns: - None - """ - # - # Leave the library loaded, but shutdown the interface - # - fn = get_func_pointer("nvmlShutdown") - ret = fn() - check_return(ret) - - # Atomically update refcount - global nvml_lib_refcount - lib_load_lock.acquire() - if 0 < nvml_lib_refcount: - nvml_lib_refcount -= 1 - lib_load_lock.release() - return None - - -# Added in 2.285 -def nvml_error_string(result): - fn = get_func_pointer("nvmlErrorString") - fn.restype = c_char_p # otherwise return is an int - ret = fn(result) - return ret - - -# Added in 2.285 -def nvmlSystemGetNVMLVersion(): - c_version = create_string_buffer(NVML_SYSTEM_NVML_VERSION_BUFFER_SIZE) - fn = get_func_pointer("nvmlSystemGetNVMLVersion") - ret = fn(c_version, c_uint(NVML_SYSTEM_NVML_VERSION_BUFFER_SIZE)) - check_return(ret) - return c_version.value - - -# Added in 2.285 -def nvmlSystemGetProcessName(pid): - c_name = create_string_buffer(1024) - fn = get_func_pointer("nvmlSystemGetProcessName") - ret = fn(c_uint(pid), c_name, c_uint(1024)) - check_return(ret) - return c_name.value - - -def nvmlSystemGetDriverVersion(): - c_version = create_string_buffer(NVML_SYSTEM_DRIVER_VERSION_BUFFER_SIZE) - fn = get_func_pointer("nvmlSystemGetDriverVersion") - ret = fn(c_version, c_uint(NVML_SYSTEM_DRIVER_VERSION_BUFFER_SIZE)) - check_return(ret) - return c_version.value - - -# Added in 2.285 -def nvmlSystemGetHicVersion(): - c_count = c_uint(0) - hics = None - fn = get_func_pointer("nvmlSystemGetHicVersion") - - # get the count - ret = fn(byref(c_count), None) - - # this should only fail with insufficient size - if (ret != NVML_SUCCESS) and (ret != NVML_ERROR_INSUFFICIENT_SIZE): - raise NVMLError(ret) - - # if there are no hics - if c_count.value == 0: - return [] - - hic_array = c_nvmlHwbcEntry_t * c_count.value - hics = hic_array() - ret = fn(byref(c_count), hics) - check_return(ret) - return hics - - -## Unit get functions -def nvmlUnitGetCount(): - c_count = c_uint() - fn = get_func_pointer("nvmlUnitGetCount") - ret = fn(byref(c_count)) - check_return(ret) - return c_count.value - - -def nvmlUnitGetHandleByIndex(index): - c_index = c_uint(index) - unit = c_nvmlUnit_t() - fn = get_func_pointer("nvmlUnitGetHandleByIndex") - ret = fn(c_index, byref(unit)) - check_return(ret) - return unit - - -def nvmlUnitGetUnitInfo(unit): - c_info = c_nvmlUnitInfo_t() - fn = get_func_pointer("nvmlUnitGetUnitInfo") - ret = fn(unit, byref(c_info)) - check_return(ret) - return c_info - - -def nvmlUnitGetLedState(unit): - c_state = c_nvmlLedState_t() - fn = get_func_pointer("nvmlUnitGetLedState") - ret = fn(unit, byref(c_state)) - check_return(ret) - return c_state - - -def nvmlUnitGetPsuInfo(unit): - c_info = c_nvmlPSUInfo_t() - fn = get_func_pointer("nvmlUnitGetPsuInfo") - ret = fn(unit, byref(c_info)) - check_return(ret) - return c_info - - -def nvmlUnitGetTemperature(unit, type): - c_temp = c_uint() - fn = get_func_pointer("nvmlUnitGetTemperature") - ret = fn(unit, c_uint(type), byref(c_temp)) - check_return(ret) - return c_temp.value - - -def nvmlUnitGetFanSpeedInfo(unit): - c_speeds = c_nvmlUnitFanSpeeds_t() - fn = get_func_pointer("nvmlUnitGetFanSpeedInfo") - ret = fn(unit, byref(c_speeds)) - check_return(ret) - return c_speeds - - -# added to API -def nvmlUnitGetDeviceCount(unit): - c_count = c_uint(0) - # query the unit to determine device count - fn = get_func_pointer("nvmlUnitGetDevices") - ret = fn(unit, byref(c_count), None) - if ret == NVML_ERROR_INSUFFICIENT_SIZE: - ret = NVML_SUCCESS - check_return(ret) - return c_count.value - - -def nvmlUnitGetDevices(unit): - c_count = c_uint(nvmlUnitGetDeviceCount(unit)) - device_array = c_nvmlDevice_t * c_count.value - c_devices = device_array() - fn = get_func_pointer("nvmlUnitGetDevices") - ret = fn(unit, byref(c_count), c_devices) - check_return(ret) - return c_devices - - -## Device get functions -def nvmlDeviceGetCount(): - c_count = c_uint() - fn = get_func_pointer("nvmlDeviceGetCount_v2") - ret = fn(byref(c_count)) - check_return(ret) - return c_count.value - - -def nvmlDeviceGetHandleByIndex(index): - c_index = c_uint(index) - device = c_nvmlDevice_t() - fn = get_func_pointer("nvmlDeviceGetHandleByIndex_v2") - ret = fn(c_index, byref(device)) - check_return(ret) - return device - - -def nvmlDeviceGetHandleBySerial(serial): - c_serial = c_char_p(serial) - device = c_nvmlDevice_t() - fn = get_func_pointer("nvmlDeviceGetHandleBySerial") - ret = fn(c_serial, byref(device)) - check_return(ret) - return device - - -def nvmlDeviceGetHandleByUUID(uuid): - c_uuid = c_char_p(uuid) - device = c_nvmlDevice_t() - fn = get_func_pointer("nvmlDeviceGetHandleByUUID") - ret = fn(c_uuid, byref(device)) - check_return(ret) - return device - - -def nvmlDeviceGetHandleByPciBusId(pciBusId): - c_busId = c_char_p(pciBusId) - device = c_nvmlDevice_t() - fn = get_func_pointer("nvmlDeviceGetHandleByPciBusId_v2") - ret = fn(c_busId, byref(device)) - check_return(ret) - return device - - -def nvmlDeviceGetName(handle): - c_name = create_string_buffer(NVML_DEVICE_NAME_BUFFER_SIZE) - fn = get_func_pointer("nvmlDeviceGetName") - ret = fn(handle, c_name, c_uint(NVML_DEVICE_NAME_BUFFER_SIZE)) - check_return(ret) - return c_name.value - - -def nvmlDeviceGetBoardId(handle): - c_id = c_uint() - fn = get_func_pointer("nvmlDeviceGetBoardId") - ret = fn(handle, byref(c_id)) - check_return(ret) - return c_id.value - - -def nvmlDeviceGetMultiGpuBoard(handle): - c_multiGpu = c_uint() - fn = get_func_pointer("nvmlDeviceGetMultiGpuBoard") - ret = fn(handle, byref(c_multiGpu)) - check_return(ret) - return c_multiGpu.value - - -def nvmlDeviceGetBrand(handle): - c_type = _nvmlBrandType_t() - fn = get_func_pointer("nvmlDeviceGetBrand") - ret = fn(handle, byref(c_type)) - check_return(ret) - return c_type.value - - -def nvmlDeviceGetSerial(handle): - c_serial = create_string_buffer(NVML_DEVICE_SERIAL_BUFFER_SIZE) - fn = get_func_pointer("nvmlDeviceGetSerial") - ret = fn(handle, c_serial, c_uint(NVML_DEVICE_SERIAL_BUFFER_SIZE)) - check_return(ret) - return c_serial.value - - -def nvmlDeviceGetCpuAffinity(handle, cpuSetSize): - affinity_array = c_ulonglong * cpuSetSize - c_affinity = affinity_array() - fn = get_func_pointer("nvmlDeviceGetCpuAffinity") - ret = fn(handle, cpuSetSize, byref(c_affinity)) - check_return(ret) - return c_affinity - - -def nvmlDeviceSetCpuAffinity(handle): - fn = get_func_pointer("nvmlDeviceSetCpuAffinity") - ret = fn(handle) - check_return(ret) - return None - - -def nvmlDeviceClearCpuAffinity(handle): - fn = get_func_pointer("nvmlDeviceClearCpuAffinity") - ret = fn(handle) - check_return(ret) - return None - - -def nvmlDeviceGetMinorNumber(handle): - c_minor_number = c_uint() - fn = get_func_pointer("nvmlDeviceGetMinorNumber") - ret = fn(handle, byref(c_minor_number)) - check_return(ret) - return c_minor_number.value - - -def nvmlDeviceGetUUID(handle): - c_uuid = create_string_buffer(NVML_DEVICE_UUID_BUFFER_SIZE) - fn = get_func_pointer("nvmlDeviceGetUUID") - ret = fn(handle, c_uuid, c_uint(NVML_DEVICE_UUID_BUFFER_SIZE)) - check_return(ret) - return c_uuid.value - - -def nvmlDeviceGetInforomVersion(handle, infoRomObject): - c_version = create_string_buffer(NVML_DEVICE_INFOROM_VERSION_BUFFER_SIZE) - fn = get_func_pointer("nvmlDeviceGetInforomVersion") - ret = fn( - handle, - _nvmlInforomObject_t(infoRomObject), - c_version, - c_uint(NVML_DEVICE_INFOROM_VERSION_BUFFER_SIZE), - ) - check_return(ret) - return c_version.value - - -# Added in 4.304 -def nvmlDeviceGetInforomImageVersion(handle): - c_version = create_string_buffer(NVML_DEVICE_INFOROM_VERSION_BUFFER_SIZE) - fn = get_func_pointer("nvmlDeviceGetInforomImageVersion") - ret = fn(handle, c_version, c_uint(NVML_DEVICE_INFOROM_VERSION_BUFFER_SIZE)) - check_return(ret) - return c_version.value - - -# Added in 4.304 -def nvmlDeviceGetInforomConfigurationChecksum(handle): - c_checksum = c_uint() - fn = get_func_pointer("nvmlDeviceGetInforomConfigurationChecksum") - ret = fn(handle, byref(c_checksum)) - check_return(ret) - return c_checksum.value - - -# Added in 4.304 -def nvmlDeviceValidateInforom(handle): - fn = get_func_pointer("nvmlDeviceValidateInforom") - ret = fn(handle) - check_return(ret) - return None - - -def nvmlDeviceGetDisplayMode(handle): - c_mode = _nvmlEnableState_t() - fn = get_func_pointer("nvmlDeviceGetDisplayMode") - ret = fn(handle, byref(c_mode)) - check_return(ret) - return c_mode.value - - -def nvmlDeviceGetDisplayActive(handle): - c_mode = _nvmlEnableState_t() - fn = get_func_pointer("nvmlDeviceGetDisplayActive") - ret = fn(handle, byref(c_mode)) - check_return(ret) - return c_mode.value - - -def nvmlDeviceGetPersistenceMode(handle): - c_state = _nvmlEnableState_t() - fn = get_func_pointer("nvmlDeviceGetPersistenceMode") - ret = fn(handle, byref(c_state)) - check_return(ret) - return c_state.value - - -def nvmlDeviceGetPciInfo(handle): - c_info = nvmlPciInfo_t() - fn = get_func_pointer("nvmlDeviceGetPciInfo_v2") - ret = fn(handle, byref(c_info)) - check_return(ret) - return c_info - - -def nvmlDeviceGetClockInfo(handle, type): - c_clock = c_uint() - fn = get_func_pointer("nvmlDeviceGetClockInfo") - ret = fn(handle, _nvmlClockType_t(type), byref(c_clock)) - check_return(ret) - return c_clock.value - - -# Added in 2.285 -def nvmlDeviceGetMaxClockInfo(handle, type): - c_clock = c_uint() - fn = get_func_pointer("nvmlDeviceGetMaxClockInfo") - ret = fn(handle, _nvmlClockType_t(type), byref(c_clock)) - check_return(ret) - return c_clock.value - - -# Added in 4.304 -def nvmlDeviceGetApplicationsClock(handle, type): - c_clock = c_uint() - fn = get_func_pointer("nvmlDeviceGetApplicationsClock") - ret = fn(handle, _nvmlClockType_t(type), byref(c_clock)) - check_return(ret) - return c_clock.value - - -# Added in 5.319 -def nvmlDeviceGetDefaultApplicationsClock(handle, type): - c_clock = c_uint() - fn = get_func_pointer("nvmlDeviceGetDefaultApplicationsClock") - ret = fn(handle, _nvmlClockType_t(type), byref(c_clock)) - check_return(ret) - return c_clock.value - - -# Added in 4.304 -def nvmlDeviceGetSupportedMemoryClocks(handle): - # first call to get the size - c_count = c_uint(0) - fn = get_func_pointer("nvmlDeviceGetSupportedMemoryClocks") - ret = fn(handle, byref(c_count), None) - - if ret == NVML_SUCCESS: - # special case, no clocks - return [] - elif ret == NVML_ERROR_INSUFFICIENT_SIZE: - # typical case - clocks_array = c_uint * c_count.value - c_clocks = clocks_array() - - # make the call again - ret = fn(handle, byref(c_count), c_clocks) - check_return(ret) - - procs = [] - for i in range(c_count.value): - procs.append(c_clocks[i]) - - return procs - else: - # error case - raise NVMLError(ret) - - -# Added in 4.304 -def nvmlDeviceGetSupportedGraphicsClocks(handle, memoryClockMHz): - # first call to get the size - c_count = c_uint(0) - fn = get_func_pointer("nvmlDeviceGetSupportedGraphicsClocks") - ret = fn(handle, c_uint(memoryClockMHz), byref(c_count), None) - - if ret == NVML_SUCCESS: - # special case, no clocks - return [] - elif ret == NVML_ERROR_INSUFFICIENT_SIZE: - # typical case - clocks_array = c_uint * c_count.value - c_clocks = clocks_array() - - # make the call again - ret = fn(handle, c_uint(memoryClockMHz), byref(c_count), c_clocks) - check_return(ret) - - procs = [] - for i in range(c_count.value): - procs.append(c_clocks[i]) - - return procs - else: - # error case - raise NVMLError(ret) - - -def nvmlDeviceGetFanSpeed(handle): - c_speed = c_uint() - fn = get_func_pointer("nvmlDeviceGetFanSpeed") - ret = fn(handle, byref(c_speed)) - check_return(ret) - return c_speed.value - - -def nvmlDeviceGetTemperature(handle, sensor): - c_temp = c_uint() - fn = get_func_pointer("nvmlDeviceGetTemperature") - ret = fn(handle, _nvmlTemperatureSensors_t(sensor), byref(c_temp)) - check_return(ret) - return c_temp.value - - -def nvmlDeviceGetTemperatureThreshold(handle, threshold): - c_temp = c_uint() - fn = get_func_pointer("nvmlDeviceGetTemperatureThreshold") - ret = fn(handle, _nvmlTemperatureThresholds_t(threshold), byref(c_temp)) - check_return(ret) - return c_temp.value - - -# DEPRECATED use nvmlDeviceGetPerformanceState -def nvmlDeviceGetPowerState(handle): - c_pstate = _nvmlPstates_t() - fn = get_func_pointer("nvmlDeviceGetPowerState") - ret = fn(handle, byref(c_pstate)) - check_return(ret) - return c_pstate.value - - -def nvmlDeviceGetPerformanceState(handle): - c_pstate = _nvmlPstates_t() - fn = get_func_pointer("nvmlDeviceGetPerformanceState") - ret = fn(handle, byref(c_pstate)) - check_return(ret) - return c_pstate.value - - -def nvmlDeviceGetPowerManagementMode(handle): - c_pcapMode = _nvmlEnableState_t() - fn = get_func_pointer("nvmlDeviceGetPowerManagementMode") - ret = fn(handle, byref(c_pcapMode)) - check_return(ret) - return c_pcapMode.value - - -def nvmlDeviceGetPowerManagementLimit(handle): - c_limit = c_uint() - fn = get_func_pointer("nvmlDeviceGetPowerManagementLimit") - ret = fn(handle, byref(c_limit)) - check_return(ret) - return c_limit.value - - -# Added in 4.304 -def nvmlDeviceGetPowerManagementLimitConstraints(handle): - c_minLimit = c_uint() - c_maxLimit = c_uint() - fn = get_func_pointer("nvmlDeviceGetPowerManagementLimitConstraints") - ret = fn(handle, byref(c_minLimit), byref(c_maxLimit)) - check_return(ret) - return [c_minLimit.value, c_maxLimit.value] - - -# Added in 4.304 -def nvmlDeviceGetPowerManagementDefaultLimit(handle): - c_limit = c_uint() - fn = get_func_pointer("nvmlDeviceGetPowerManagementDefaultLimit") - ret = fn(handle, byref(c_limit)) - check_return(ret) - return c_limit.value - - -# Added in 331 -def nvmlDeviceGetEnforcedPowerLimit(handle): - c_limit = c_uint() - fn = get_func_pointer("nvmlDeviceGetEnforcedPowerLimit") - ret = fn(handle, byref(c_limit)) - check_return(ret) - return c_limit.value - - -def nvmlDeviceGetPowerUsage(handle): - c_mWatts = c_uint() - fn = get_func_pointer("nvmlDeviceGetPowerUsage") - ret = fn(handle, byref(c_mWatts)) - check_return(ret) - return c_mWatts.value - - -def nvmlDeviceGetTotalEnergyConsumption(handle): - c_mJoules = c_uint() - fn = get_func_pointer("nvmlDeviceGetTotalEnergyConsumption") - ret = fn(handle, byref(c_mJoules)) - check_return(ret) - return c_mJoules.value - - -# Added in 4.304 -def nvmlDeviceGetGpuOperationMode(handle): - c_currState = _nvmlGpuOperationMode_t() - c_pendingState = _nvmlGpuOperationMode_t() - fn = get_func_pointer("nvmlDeviceGetGpuOperationMode") - ret = fn(handle, byref(c_currState), byref(c_pendingState)) - check_return(ret) - return [c_currState.value, c_pendingState.value] - - -# Added in 4.304 -def nvmlDeviceGetCurrentGpuOperationMode(handle): - return nvmlDeviceGetGpuOperationMode(handle)[0] - - -# Added in 4.304 -def nvmlDeviceGetPendingGpuOperationMode(handle): - return nvmlDeviceGetGpuOperationMode(handle)[1] - - -def nvmlDeviceGetMemoryInfo(handle): - """Retrieves memory object. - - Return object includes the amount of used, free and total memory available - on the device, in bytes. - - Args: - handle: The identifier of the target device - - Returns: - memory: The return value. An `nvmlMemory_t` object - - """ - c_memory = c_nvmlMemory_t() - fn = get_func_pointer("nvmlDeviceGetMemoryInfo") - ret = fn(handle, byref(c_memory)) - check_return(ret) - return c_memory - - -def nvmlDeviceGetBAR1MemoryInfo(handle): - c_bar1_memory = c_nvmlBAR1Memory_t() - fn = get_func_pointer("nvmlDeviceGetBAR1MemoryInfo") - ret = fn(handle, byref(c_bar1_memory)) - check_return(ret) - return c_bar1_memory - - -def nvmlDeviceGetComputeMode(handle): - c_mode = _nvmlComputeMode_t() - fn = get_func_pointer("nvmlDeviceGetComputeMode") - ret = fn(handle, byref(c_mode)) - check_return(ret) - return c_mode.value - - -def nvmlDeviceGetEccMode(handle): - c_currState = _nvmlEnableState_t() - c_pendingState = _nvmlEnableState_t() - fn = get_func_pointer("nvmlDeviceGetEccMode") - ret = fn(handle, byref(c_currState), byref(c_pendingState)) - check_return(ret) - return [c_currState.value, c_pendingState.value] - - -# added to API -def nvmlDeviceGetCurrentEccMode(handle): - return nvmlDeviceGetEccMode(handle)[0] - - -# added to API -def nvmlDeviceGetPendingEccMode(handle): - return nvmlDeviceGetEccMode(handle)[1] - - -def nvmlDeviceGetTotalEccErrors(handle, errorType, counterType): - c_count = c_ulonglong() - fn = get_func_pointer("nvmlDeviceGetTotalEccErrors") - ret = fn( - handle, - _nvmlMemoryErrorType_t(errorType), - _nvmlEccCounterType_t(counterType), - byref(c_count), - ) - check_return(ret) - return c_count.value - - -# This is deprecated, instead use nvmlDeviceGetMemoryErrorCounter -def nvmlDeviceGetDetailedEccErrors(handle, errorType, counterType): - c_counts = c_nvmlEccErrorCounts_t() - fn = get_func_pointer("nvmlDeviceGetDetailedEccErrors") - ret = fn( - handle, - _nvmlMemoryErrorType_t(errorType), - _nvmlEccCounterType_t(counterType), - byref(c_counts), - ) - check_return(ret) - return c_counts - - -# Added in 4.304 -def nvmlDeviceGetMemoryErrorCounter(handle, errorType, counterType, locationType): - c_count = c_ulonglong() - fn = get_func_pointer("nvmlDeviceGetMemoryErrorCounter") - ret = fn( - handle, - _nvmlMemoryErrorType_t(errorType), - _nvmlEccCounterType_t(counterType), - _nvmlMemoryLocation_t(locationType), - byref(c_count), - ) - check_return(ret) - return c_count.value - - -def nvmlDeviceGetUtilizationRates(handle): - c_util = c_nvmlUtilization_t() - fn = get_func_pointer("nvmlDeviceGetUtilizationRates") - ret = fn(handle, byref(c_util)) - check_return(ret) - return c_util - - -def nvmlDeviceGetEncoderUtilization(handle): - c_util = c_uint() - c_samplingPeriod = c_uint() - fn = get_func_pointer("nvmlDeviceGetEncoderUtilization") - ret = fn(handle, byref(c_util), byref(c_samplingPeriod)) - check_return(ret) - return [c_util.value, c_samplingPeriod.value] - - -def nvmlDeviceGetDecoderUtilization(handle): - c_util = c_uint() - c_samplingPeriod = c_uint() - fn = get_func_pointer("nvmlDeviceGetDecoderUtilization") - ret = fn(handle, byref(c_util), byref(c_samplingPeriod)) - check_return(ret) - return [c_util.value, c_samplingPeriod.value] - - -def nvmlDeviceGetPcieReplayCounter(handle): - c_replay = c_uint() - fn = get_func_pointer("nvmlDeviceGetPcieReplayCounter") - ret = fn(handle, byref(c_replay)) - check_return(ret) - return c_replay.value - - -def nvmlDeviceGetDriverModel(handle): - c_currModel = _nvmlDriverModel_t() - c_pendingModel = _nvmlDriverModel_t() - fn = get_func_pointer("nvmlDeviceGetDriverModel") - ret = fn(handle, byref(c_currModel), byref(c_pendingModel)) - check_return(ret) - return [c_currModel.value, c_pendingModel.value] - - -# added to API -def nvmlDeviceGetCurrentDriverModel(handle): - return nvmlDeviceGetDriverModel(handle)[0] - - -# added to API -def nvmlDeviceGetPendingDriverModel(handle): - return nvmlDeviceGetDriverModel(handle)[1] - - -# Added in 2.285 -def nvmlDeviceGetVbiosVersion(handle): - c_version = create_string_buffer(NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE) - fn = get_func_pointer("nvmlDeviceGetVbiosVersion") - ret = fn(handle, c_version, c_uint(NVML_DEVICE_VBIOS_VERSION_BUFFER_SIZE)) - check_return(ret) - return c_version.value - - -# Added in 2.285 -def nvmlDeviceGetComputeRunningProcesses(handle): - # first call to get the size - c_count = c_uint(0) - fn = get_func_pointer("nvmlDeviceGetComputeRunningProcesses") - ret = fn(handle, byref(c_count), None) - - if ret == NVML_SUCCESS: - # special case, no running processes - return [] - elif ret == NVML_ERROR_INSUFFICIENT_SIZE: - # typical case - # oversize the array incase more processes are created - c_count.value = c_count.value * 2 + 5 - proc_array = c_nvmlProcessInfo_t * c_count.value - c_procs = proc_array() - - # make the call again - ret = fn(handle, byref(c_count), c_procs) - check_return(ret) - - procs = [] - for i in range(c_count.value): - # use an alternative struct for this object - obj = struct_to_friendly_object(c_procs[i]) - if obj.usedGpuMemory == NVML_VALUE_NOT_AVAILABLE_ulonglong.value: - # special case for WDDM on Windows, see comment above - obj.usedGpuMemory = None - procs.append(obj) - - return procs - else: - # error case - raise NVMLError(ret) - - -def nvmlDeviceGetGraphicsRunningProcesses(handle): - # first call to get the size - c_count = c_uint(0) - fn = get_func_pointer("nvmlDeviceGetGraphicsRunningProcesses") - ret = fn(handle, byref(c_count), None) - - if ret == NVML_SUCCESS: - # special case, no running processes - return [] - elif ret == NVML_ERROR_INSUFFICIENT_SIZE: - # typical case - # oversize the array incase more processes are created - c_count.value = c_count.value * 2 + 5 - proc_array = c_nvmlProcessInfo_t * c_count.value - c_procs = proc_array() - - # make the call again - ret = fn(handle, byref(c_count), c_procs) - check_return(ret) - - procs = [] - for i in range(c_count.value): - # use an alternative struct for this object - obj = struct_to_friendly_object(c_procs[i]) - if obj.usedGpuMemory == NVML_VALUE_NOT_AVAILABLE_ulonglong.value: - # special case for WDDM on Windows, see comment above - obj.usedGpuMemory = None - procs.append(obj) - - return procs - else: - # error case - raise NVMLError(ret) - - -def nvmlDeviceGetAutoBoostedClocksEnabled(handle): - c_isEnabled = _nvmlEnableState_t() - c_defaultIsEnabled = _nvmlEnableState_t() - fn = get_func_pointer("nvmlDeviceGetAutoBoostedClocksEnabled") - ret = fn(handle, byref(c_isEnabled), byref(c_defaultIsEnabled)) - check_return(ret) - return [c_isEnabled.value, c_defaultIsEnabled.value] - # Throws NVML_ERROR_NOT_SUPPORTED if hardware doesn't support setting auto boosted clocks - - -## Set functions -def nvmlUnitSetLedState(unit, color): - fn = get_func_pointer("nvmlUnitSetLedState") - ret = fn(unit, _nvmlLedColor_t(color)) - check_return(ret) - return None - - -def nvmlDeviceSetPersistenceMode(handle, mode): - fn = get_func_pointer("nvmlDeviceSetPersistenceMode") - ret = fn(handle, _nvmlEnableState_t(mode)) - check_return(ret) - return None - - -def nvmlDeviceSetComputeMode(handle, mode): - fn = get_func_pointer("nvmlDeviceSetComputeMode") - ret = fn(handle, _nvmlComputeMode_t(mode)) - check_return(ret) - return None - - -def nvmlDeviceSetEccMode(handle, mode): - fn = get_func_pointer("nvmlDeviceSetEccMode") - ret = fn(handle, _nvmlEnableState_t(mode)) - check_return(ret) - return None - - -def nvmlDeviceClearEccErrorCounts(handle, counterType): - fn = get_func_pointer("nvmlDeviceClearEccErrorCounts") - ret = fn(handle, _nvmlEccCounterType_t(counterType)) - check_return(ret) - return None - - -def nvmlDeviceSetDriverModel(handle, model): - fn = get_func_pointer("nvmlDeviceSetDriverModel") - ret = fn(handle, _nvmlDriverModel_t(model)) - check_return(ret) - return None - - -def nvmlDeviceSetAutoBoostedClocksEnabled(handle, enabled): - fn = get_func_pointer("nvmlDeviceSetAutoBoostedClocksEnabled") - ret = fn(handle, _nvmlEnableState_t(enabled)) - check_return(ret) - return None - # Throws NVML_ERROR_NOT_SUPPORTED if hardware doesn't support setting auto boosted clocks - - -def nvmlDeviceSetDefaultAutoBoostedClocksEnabled(handle, enabled, flags): - fn = get_func_pointer("nvmlDeviceSetDefaultAutoBoostedClocksEnabled") - ret = fn(handle, _nvmlEnableState_t(enabled), c_uint(flags)) - check_return(ret) - return None - # Throws NVML_ERROR_NOT_SUPPORTED if hardware doesn't support setting auto boosted clocks - - -# Added in 4.304 -def nvmlDeviceSetApplicationsClocks(handle, maxMemClockMHz, maxGraphicsClockMHz): - fn = get_func_pointer("nvmlDeviceSetApplicationsClocks") - ret = fn(handle, c_uint(maxMemClockMHz), c_uint(maxGraphicsClockMHz)) - check_return(ret) - return None - - -# Added in 4.304 -def nvmlDeviceResetApplicationsClocks(handle): - fn = get_func_pointer("nvmlDeviceResetApplicationsClocks") - ret = fn(handle) - check_return(ret) - return None - - -# Added in 4.304 -def nvmlDeviceSetPowerManagementLimit(handle, limit): - fn = get_func_pointer("nvmlDeviceSetPowerManagementLimit") - ret = fn(handle, c_uint(limit)) - check_return(ret) - return None - - -# Added in 4.304 -def nvmlDeviceSetGpuOperationMode(handle, mode): - fn = get_func_pointer("nvmlDeviceSetGpuOperationMode") - ret = fn(handle, _nvmlGpuOperationMode_t(mode)) - check_return(ret) - return None - - -# Added in 2.285 -def nvmlEventSetCreate(): - fn = get_func_pointer("nvmlEventSetCreate") - eventSet = c_nvmlEventSet_t() - ret = fn(byref(eventSet)) - check_return(ret) - return eventSet - - -# Added in 2.285 -def nvmlDeviceRegisterEvents(handle, eventTypes, eventSet): - fn = get_func_pointer("nvmlDeviceRegisterEvents") - ret = fn(handle, c_ulonglong(eventTypes), eventSet) - check_return(ret) - return None - - -# Added in 2.285 -def nvmlDeviceGetSupportedEventTypes(handle): - c_eventTypes = c_ulonglong() - fn = get_func_pointer("nvmlDeviceGetSupportedEventTypes") - ret = fn(handle, byref(c_eventTypes)) - check_return(ret) - return c_eventTypes.value - - -# Added in 2.285 -# raises NVML_ERROR_TIMEOUT exception on timeout -def nvmlEventSetWait(eventSet, timeoutms): - fn = get_func_pointer("nvmlEventSetWait") - data = c_nvmlEventData_t() - ret = fn(eventSet, byref(data), c_uint(timeoutms)) - check_return(ret) - return data - - -# Added in 2.285 -def nvmlEventSetFree(eventSet): - fn = get_func_pointer("nvmlEventSetFree") - ret = fn(eventSet) - check_return(ret) - return None - - -# Added in 3.295 -def nvmlDeviceOnSameBoard(handle1, handle2): - fn = get_func_pointer("nvmlDeviceOnSameBoard") - onSameBoard = c_int() - ret = fn(handle1, handle2, byref(onSameBoard)) - check_return(ret) - return onSameBoard.value != 0 - - -# Added in 3.295 -def nvmlDeviceGetCurrPcieLinkGeneration(handle): - fn = get_func_pointer("nvmlDeviceGetCurrPcieLinkGeneration") - gen = c_uint() - ret = fn(handle, byref(gen)) - check_return(ret) - return gen.value - - -# Added in 3.295 -def nvmlDeviceGetMaxPcieLinkGeneration(handle): - fn = get_func_pointer("nvmlDeviceGetMaxPcieLinkGeneration") - gen = c_uint() - ret = fn(handle, byref(gen)) - check_return(ret) - return gen.value - - -# Added in 3.295 -def nvmlDeviceGetCurrPcieLinkWidth(handle): - fn = get_func_pointer("nvmlDeviceGetCurrPcieLinkWidth") - width = c_uint() - ret = fn(handle, byref(width)) - check_return(ret) - return width.value - - -# Added in 3.295 -def nvmlDeviceGetMaxPcieLinkWidth(handle): - fn = get_func_pointer("nvmlDeviceGetMaxPcieLinkWidth") - width = c_uint() - ret = fn(handle, byref(width)) - check_return(ret) - return width.value - - -# Added in 4.304 -def nvmlDeviceGetSupportedClocksThrottleReasons(handle): - c_reasons = c_ulonglong() - fn = get_func_pointer("nvmlDeviceGetSupportedClocksThrottleReasons") - ret = fn(handle, byref(c_reasons)) - check_return(ret) - return c_reasons.value - - -# Added in 4.304 -def nvmlDeviceGetCurrentClocksThrottleReasons(handle): - c_reasons = c_ulonglong() - fn = get_func_pointer("nvmlDeviceGetCurrentClocksThrottleReasons") - ret = fn(handle, byref(c_reasons)) - check_return(ret) - return c_reasons.value - - -# Added in 5.319 -def nvmlDeviceGetIndex(handle): - fn = get_func_pointer("nvmlDeviceGetIndex") - c_index = c_uint() - ret = fn(handle, byref(c_index)) - check_return(ret) - return c_index.value - - -# Added in 5.319 -def nvmlDeviceGetAccountingMode(handle): - c_mode = _nvmlEnableState_t() - fn = get_func_pointer("nvmlDeviceGetAccountingMode") - ret = fn(handle, byref(c_mode)) - check_return(ret) - return c_mode.value - - -def nvmlDeviceSetAccountingMode(handle, mode): - fn = get_func_pointer("nvmlDeviceSetAccountingMode") - ret = fn(handle, _nvmlEnableState_t(mode)) - check_return(ret) - return None - - -def nvmlDeviceClearAccountingPids(handle): - fn = get_func_pointer("nvmlDeviceClearAccountingPids") - ret = fn(handle) - check_return(ret) - return None - - -def nvmlDeviceGetAccountingStats(handle, pid): - stats = c_nvmlAccountingStats_t() - fn = get_func_pointer("nvmlDeviceGetAccountingStats") - ret = fn(handle, c_uint(pid), byref(stats)) - check_return(ret) - if stats.maxMemoryUsage == NVML_VALUE_NOT_AVAILABLE_ulonglong.value: - # special case for WDDM on Windows, see comment above - stats.maxMemoryUsage = None - return stats - - -def nvmlDeviceGetAccountingPids(handle): - count = c_uint(nvmlDeviceGetAccountingBufferSize(handle)) - pids = (c_uint * count.value)() - fn = get_func_pointer("nvmlDeviceGetAccountingPids") - ret = fn(handle, byref(count), pids) - check_return(ret) - return map(int, pids[0 : count.value]) - - -def nvmlDeviceGetAccountingBufferSize(handle): - bufferSize = c_uint() - fn = get_func_pointer("nvmlDeviceGetAccountingBufferSize") - ret = fn(handle, byref(bufferSize)) - check_return(ret) - return int(bufferSize.value) - - -def nvmlDeviceGetRetiredPages(device, sourceFilter): - c_source = _nvmlPageRetirementCause_t(sourceFilter) - c_count = c_uint(0) - fn = get_func_pointer("nvmlDeviceGetRetiredPages") - - # First call will get the size - ret = fn(device, c_source, byref(c_count), None) - - # this should only fail with insufficient size - if (ret != NVML_SUCCESS) and (ret != NVML_ERROR_INSUFFICIENT_SIZE): - raise NVMLError(ret) - - # call again with a buffer - # oversize the array for the rare cases where additional pages - # are retired between NVML calls - c_count.value = c_count.value * 2 + 5 - page_array = c_ulonglong * c_count.value - c_pages = page_array() - ret = fn(device, c_source, byref(c_count), c_pages) - check_return(ret) - return map(int, c_pages[0 : c_count.value]) - - -def nvmlDeviceGetRetiredPagesPendingStatus(device): - c_pending = _nvmlEnableState_t() - fn = get_func_pointer("nvmlDeviceGetRetiredPagesPendingStatus") - ret = fn(device, byref(c_pending)) - check_return(ret) - return int(c_pending.value) - - -def nvmlDeviceGetAPIRestriction(device, apiType): - c_permission = _nvmlEnableState_t() - fn = get_func_pointer("nvmlDeviceGetAPIRestriction") - ret = fn(device, _nvmlRestrictedAPI_t(apiType), byref(c_permission)) - check_return(ret) - return int(c_permission.value) - - -def nvmlDeviceSetAPIRestriction(handle, apiType, isRestricted): - fn = get_func_pointer("nvmlDeviceSetAPIRestriction") - ret = fn(handle, _nvmlRestrictedAPI_t(apiType), _nvmlEnableState_t(isRestricted)) - check_return(ret) - return None - - -def nvmlDeviceGetBridgeChipInfo(handle): - bridgeHierarchy = c_nvmlBridgeChipHierarchy_t() - fn = get_func_pointer("nvmlDeviceGetBridgeChipInfo") - ret = fn(handle, byref(bridgeHierarchy)) - check_return(ret) - return bridgeHierarchy - - -def nvmlDeviceGetSamples(device, sampling_type, timeStamp): - c_sampling_type = _nvmlSamplingType_t(sampling_type) - c_time_stamp = c_ulonglong(timeStamp) - c_sample_count = c_uint(0) - c_sample_value_type = _nvmlValueType_t() - fn = get_func_pointer("nvmlDeviceGetSamples") - - ## First Call gets the size - ret = fn( - device, - c_sampling_type, - c_time_stamp, - byref(c_sample_value_type), - byref(c_sample_count), - None, - ) - - # Stop if this fails - if ret != NVML_SUCCESS: - raise NVMLError(ret) - - sampleArray = c_sample_count.value * c_nvmlSample_t - c_samples = sampleArray() - ret = fn( - device, - c_sampling_type, - c_time_stamp, - byref(c_sample_value_type), - byref(c_sample_count), - c_samples, - ) - check_return(ret) - return (c_sample_value_type.value, c_samples[0 : c_sample_count.value]) - - -def nvmlDeviceGetViolationStatus(device, perfPolicyType): - c_perfPolicy_type = _nvmlPerfPolicyType_t(perfPolicyType) - c_violTime = c_nvmlViolationTime_t() - fn = get_func_pointer("nvmlDeviceGetViolationStatus") - - ## Invoke the method to get violation time - ret = fn(device, c_perfPolicy_type, byref(c_violTime)) - check_return(ret) - return c_violTime - - -def nvmlDeviceGetPcieThroughput(device, counter): - c_util = c_uint() - fn = get_func_pointer("nvmlDeviceGetPcieThroughput") - ret = fn(device, _nvmlPcieUtilCounter_t(counter), byref(c_util)) - check_return(ret) - return c_util.value - - -def nvmlSystemGetTopologyGpuSet(cpuNumber): - c_count = c_uint(0) - fn = get_func_pointer("nvmlSystemGetTopologyGpuSet") - - # First call will get the size - ret = fn(cpuNumber, byref(c_count), None) - - if ret != NVML_SUCCESS: - raise NVMLError(ret) - print(c_count.value) - # call again with a buffer - device_array = c_nvmlDevice_t * c_count.value - c_devices = device_array() - ret = fn(cpuNumber, byref(c_count), c_devices) - check_return(ret) - return map(None, c_devices[0 : c_count.value]) - - -def nvmlDeviceGetTopologyNearestGpus(device, level): - c_count = c_uint(0) - fn = get_func_pointer("nvmlDeviceGetTopologyNearestGpus") - - # First call will get the size - ret = fn(device, level, byref(c_count), None) - - if ret != NVML_SUCCESS: - raise NVMLError(ret) - - # call again with a buffer - device_array = c_nvmlDevice_t * c_count.value - c_devices = device_array() - ret = fn(device, level, byref(c_count), c_devices) - check_return(ret) - return map(None, c_devices[0 : c_count.value]) - - -def nvmlDeviceGetTopologyCommonAncestor(device1, device2): - c_level = _nvmlGpuTopologyLevel_t() - fn = get_func_pointer("nvmlDeviceGetTopologyCommonAncestor") - ret = fn(device1, device2, byref(c_level)) - check_return(ret) - return c_level.value - - -def nvmlDeviceFreezeNvLinkUtilizationCounter(device, link, counter, freeze): - """Freeze the NVLINK utilization counters. - - Freeze the NVLINK utilization counters. Both the receive and transmit - counters are operated on by this function. - - Args: - device: The identifier of the target device - - link: Specifies the NvLink link to be queried (uint) - - freeze: NVML_FEATURE_ENABLED(1) = freeze the rx and tx counters - NVML_FEATURE_DISABLED(0) = unfreeze the rx and tx counters - - """ - c_link = c_uint(link) - c_counter = c_uint(counter) - fn = get_func_pointer("nvmlDeviceFreezeNvLinkUtilizationCounter") - ret = fn(device, c_link, c_counter, _nvmlEnableState_t(freeze)) - return check_return(ret) - - -def nvmlDeviceGetNvLinkCapability(device, link, cap): - """Retrieve the capability of a specified NvLinklink. - - Retrieves the requested capability from the device's NvLink for the link - specified. Please refer to the nvmlNvLinkCapability_t structure for the - specific caps that can be queried. The return value should be treated as a - boolean. - - Args: - device: The identifier of the target device - - link: Specifies the NvLink link to be queried (uint) - - capability: Specifies the nvmlNvLinkCapability_t to be queried - - Returns: - cap_result: A boolean for the queried capability indicating that feature - is available - - """ - c_link = c_uint(link) - c_cap_result = c_bool() - fn = get_func_pointer("nvmlDeviceGetNvLinkCapability") - ret = fn(device, c_link, _nvmlNvLinkCapability_t(cap), byref(c_cap_result)) - check_return(ret) - return c_cap_result.value - - -def nvmlDeviceGetNvLinkErrorCounter(device, link, counter): - """Retrieve the specified error counter value. - - Retrieves the specified error counter value. Please refer to - _nvmlNvLinkErrorCounter_t for error counters that are available. - - Args: - device: The identifier of the target device - - link: Specifies the NvLink link to be queried (uint) - - counter: Specifies the NvLink counter to be queried - - Returns: - value: The specified counter value - - """ - c_link = c_uint(link) - c_cap_result = c_bool() - c_value = c_ulonglong() - fn = get_func_pointer("nvmlDeviceGetNvLinkErrorCounter") - ret = fn(device, c_link, _nvmlNvLinkErrorCounter_t(counter), byref(c_value)) - check_return(ret) - return c_value.value - - -def nvmlDeviceGetNvLinkRemotePciInfo(device, link): - """Retrieve the PCI information for the remote node on a NvLink link. - - Retrieves the PCI information for the remote node on a NvLink link. Note: - pciSubSystemId is not filled in this function and is indeterminate. - - Args: - device: The identifier of the target device - - link: Specifies the NvLink link to be queried (uint) - - Returns: - pci: nvmlPciInfo_t of the remote node for the specified link - - """ - c_link = c_uint(link) - c_pci = c_nvmlPciInfo_t() - fn = get_func_pointer("nvmlDeviceGetNvLinkRemotePciInfo") - ret = fn(device, c_link, byref(c_pci)) - check_return(ret) - return c_pci - - -def nvmlDeviceGetNvLinkState(device, link): - """Retrieve the state of the device's NvLink for the link specified. - - Retrieves the state of the device's NvLink for the link specified. - - Args: - device: The identifier of the target device - - link: Specifies the NvLink link to be queried (uint) - - Returns: - mode: nvmlEnableState_t where NVML_FEATURE_ENABLED indicates that the - link is active and NVML_FEATURE_DISABLED indicates it is inactive. - - """ - c_link = c_uint(link) - c_mode = _nvmlEnableState_t() - fn = get_func_pointer("nvmlDeviceGetNvLinkState") - ret = fn(device, c_link, byref(c_mode)) - check_return(ret) - return c_mode.value - - -def nvmlDeviceGetNvLinkUtilizationControl(device, link, counter): - """Get NVLINK utilization counter control information - - Get the NVLINK utilization counter control information for the specified - counter, 0 or 1. Please refer to nvmlNvLinkUtilizationControl_t for the - structure definition. [Note: nvmlNvLinkUtilizationControl_t not documented] - - Args: - device: The identifier of the target device - - link: Specifies the NvLink link to be queried (uint) - - counter: Specifies the counter that should be set (0 or 1) - - Returns: - control: The nvmlNvLinkUtilizationControl_t object (an integer) - - """ - c_link = c_uint(link) - c_counter = c_uint(counter) - c_control = _nvmlNvLinkUtilizationControl_t() - fn = get_func_pointer("nvmlDeviceGetNvLinkUtilizationControl") - ret = fn(device, c_link, c_counter, byref(c_control)) - check_return(ret) - return c_control.value - - -def nvmlDeviceGetNvLinkUtilizationCounter(device, link, counter): - """Retrieve an NVLINK utilization counter. - - Retrieve the NVLINK utilization counter based on the current control for a - specified counter. In general it is good practice to use - nvmlDeviceSetNvLinkUtilizationControl before reading the utilization - counters as they have no default state. - - Args: - device: The identifier of the target device - - link: Specifies the NvLink link to be queried (uint) - - counter: Specifies the counter that should be set (0 or 1) - - Returns: - rxtx_dict: Dictionary with `rx` key (value is "receive" counter) and - `tx` key (value is "transmit" counter) - - """ - c_link = c_uint(link) - c_counter = c_uint(counter) - c_rx = c_ulonglong() - c_tx = c_ulonglong() - fn = get_func_pointer("nvmlDeviceGetNvLinkUtilizationCounter") - ret = fn(device, c_link, c_counter, byref(c_rx), byref(c_tx)) - check_return(ret) - rxtx_dict = {"rx": c_rx.value, "tx": c_tx.value} - return rxtx_dict - - -def nvmlDeviceGetNvLinkVersion(device, link): - """Retrieve NvLink version. - - Retrieves the version of the device's NvLink for the link specified. - - Args: - device: The identifier of the target device - - link: Specifies the NvLink link to be queried (uint) - - Returns: - version: Requested NvLink version (uint) - - """ - c_link = c_uint(link) - c_version = c_uint() - fn = get_func_pointer("nvmlDeviceGetNvLinkVersion") - ret = fn(device, c_link, byref(c_version)) - check_return(ret) - return c_version.value - - -def nvmlDeviceResetNvLinkErrorCounters(device, link): - """Reset all error counters to zero. - - Resets all error counters to zero. Please refer to nvmlNvLinkErrorCounter_t - for the list of error counters that are reset. - - Args: - device: The identifier of the target device - - link: Specifies the NvLink link to be queried (uint) - - """ - c_link = c_uint(link) - fn = get_func_pointer("nvmlDeviceResetNvLinkErrorCounters") - ret = fn(device, c_link) - return check_return(ret) - - -def nvmlDeviceResetNvLinkUtilizationCounter(device, link, counter): - """Reset the NVLINK utilization counters. - - Reset the NVLINK utilization counters. Both the receive and transmit - counters are operated on by this function. - - Args: - device: The identifier of the target device - - link: Specifies the NvLink link to be queried (uint) - - counter: Specifies the counter that should be reset (0 or 1) - - """ - c_link = c_uint(link) - c_counter = c_uint(counter) - fn = get_func_pointer("nvmlDeviceResetNvLinkUtilizationCounter") - ret = fn(device, c_link, c_counter) - return check_return(ret) - - -def nvmlDeviceSetNvLinkUtilizationControl(device, link, counter, control, reset): - """Set the NVLINK utilization counter control. - - Set the NVLINK utilization counter control information for the specified - counter, 0 or 1. Please refer to nvmlNvLinkUtilizationControl_t for the - structure definition. Performs a reset of the counters if the reset - parameter is non-zero. Note: nvmlNvLinkUtilizationControl_t is an integer. - - Args: - device: The identifier of the target device - - link: Specifies the NvLink link to be queried (uint) - - counter: Specifies the counter that should be set (0 or 1) - - reset: Resets the counters on set if non-zero (uint) - - control: The nvmlNvLinkUtilizationControl_t control setting - Note: 0 == cycles, 1 == packets, 2 == bytes - - """ - c_link = c_uint(link) - c_counter = c_uint(counter) - c_control = _nvmlNvLinkUtilizationControl_t(control) - c_reset = c_uint(reset) - fn = get_func_pointer("nvmlDeviceSetNvLinkUtilizationControl") - ret = fn(device, c_link, c_counter, byref(c_control), c_reset) - return check_return(ret) diff --git a/src/neptune/version.py b/src/neptune/version.py deleted file mode 100644 index 0e9c6138f..000000000 --- a/src/neptune/version.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = ["version", "__version__"] - -import sys -from typing import Optional - -from packaging.version import parse - -if sys.version_info >= (3, 8): - from importlib.metadata import PackageNotFoundError - from importlib.metadata import version as version_parser -else: - from importlib_metadata import PackageNotFoundError - from importlib_metadata import version as version_parser - - -def check_version(package_name: str) -> Optional[str]: - try: - return version_parser(package_name) - except PackageNotFoundError: - # package is not installed - return None - - -def detect_version() -> str: - neptune_version = check_version("neptune") - - if neptune_version is not None: - return neptune_version - - raise PackageNotFoundError("neptune") - - -__version__ = detect_version() -version = parse(__version__) diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/e2e/__init__.py b/tests/e2e/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/e2e/base.py b/tests/e2e/base.py deleted file mode 100644 index 432c19968..000000000 --- a/tests/e2e/base.py +++ /dev/null @@ -1,93 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "BaseE2ETest", - "AVAILABLE_CONTAINERS", - "fake", - "Parameters", - "available_containers_parameters", - "make_parameters", -] - -import inspect -from typing import ( - Any, - Callable, - List, - Optional, - Union, -) - -import pytest -from faker import Faker - -ParameterSet = Any - -fake = Faker() - -AVAILABLE_CONTAINERS = [ - pytest.param("run"), - pytest.param("project", marks=pytest.mark.skip(reason="Project not supported")), - pytest.param( - "model", - marks=pytest.mark.skip(reason="Model not implemented"), - ), - pytest.param( - "model_version", - marks=pytest.mark.skip(reason="Model not implemented"), - ), -] - - -class Parameters: - def __init__(self, params: List[ParameterSet]) -> None: - self.param_d = {p.values[0]: p for p in params} - - def _modify(self, *args: Union[str, str], func: Callable[[str], ParameterSet]) -> "Parameters": - for arg in args: - self.param_d[arg] = func(arg) - return self - - def skip(self, *args: str, reason: Optional[str] = None) -> "Parameters": - return self._modify(*args, func=lambda x: pytest.param(x, marks=pytest.mark.skip(reason=reason))) - - def xfail( - self, *args: str, reason: Optional[str] = None, strict=True, raises: Optional[Exception] = None - ) -> "Parameters": - return self._modify( - *args, func=lambda x: pytest.param(x, marks=pytest.mark.xfail(reason=reason, strict=strict, raises=raises)) - ) - - def run(self, *args: str) -> "Parameters": - return self._modify(*args, func=lambda x: pytest.param(x)) - - def eval(self) -> List[ParameterSet]: - return list(self.param_d.values()) - - -def available_containers_parameters() -> Parameters: - return Parameters(AVAILABLE_CONTAINERS.copy()) - - -def make_parameters(params: List[Union[str, ParameterSet]]) -> Parameters: - return Parameters([pytest.param(p) if isinstance(p, str) else p for p in params]) - - -class BaseE2ETest: - def gen_key(self): - # Get test name - caller_name = inspect.stack()[1][3] - return f"{self.__class__.__name__}/{caller_name}/{fake.unique.slug()}" diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py deleted file mode 100644 index 4142f23f7..000000000 --- a/tests/e2e/conftest.py +++ /dev/null @@ -1,147 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import time -import uuid -from datetime import datetime - -import pytest -from faker import Faker -from git import Repo - -from neptune import init_project -from neptune.internal.utils.s3 import get_boto_s3_client -from neptune.management import ( - add_project_member, - add_project_service_account, - create_project, -) -from neptune.management.internal.utils import normalize_project_name -from tests.e2e.utils import ( - Environment, - RawEnvironment, - a_project_name, - initialize_container, -) - -fake = Faker() - - -@pytest.fixture(scope="session") -def environment(): - raw_env = RawEnvironment() - workspace = raw_env.workspace_name - admin_token = raw_env.admin_neptune_api_token - user = raw_env.user_username - service_account_name = raw_env.service_account_name - - project_name = a_project_name(project_slug=fake.slug()) - project_identifier = normalize_project_name(name=project_name, workspace=workspace) - - created_project_identifier = create_project( - name=project_name, - visibility="priv", - workspace=workspace, - api_token=admin_token, - ) - - time.sleep(10) - - add_project_member( - project=created_project_identifier, - username=user, - role="contributor", - api_token=admin_token, - ) - - add_project_service_account( - project=created_project_identifier, - service_account_name=service_account_name, - role="contributor", - api_token=admin_token, - ) - - yield Environment( - workspace=workspace, - project=project_identifier, - user_token=raw_env.neptune_api_token, - admin_token=admin_token, - admin=raw_env.admin_username, - user=user, - service_account=raw_env.service_account_name, - ) - - project = init_project(project=created_project_identifier, api_token=admin_token) - project["finished"] = datetime.now() - project.stop() - - -@pytest.fixture(scope="session") -def container(request, environment): - exp = initialize_container(container_type=request.param, project=environment.project) - yield exp - exp.stop() - - -@pytest.fixture(scope="function") -def container_fn_scope(request, environment): - exp = initialize_container(container_type=request.param, project=environment.project) - yield exp - exp.stop() - - -@pytest.fixture(scope="session") -def containers_pair(request, environment): - container_a_type, container_b_type = request.param - container_a = initialize_container(container_type=container_a_type, project=environment.project) - container_b = initialize_container(container_type=container_b_type, project=environment.project) - - yield container_a, container_b - - container_b.stop() - container_a.stop() - - -@pytest.fixture(scope="session") -def bucket(): - bucket_name = os.environ.get("BUCKET_NAME") - - s3_client = get_boto_s3_client() - - yield bucket_name, s3_client - - -@pytest.fixture() -def common_tag(): - yield fake.nic_handle() - - -@pytest.fixture(scope="session") -def project(environment): - yield init_project(mode="read-only", project=environment.project, api_token=environment.user_token) - - -@pytest.fixture(scope="session") -def repo(tmp_path_factory) -> Repo: - path = tmp_path_factory.mktemp("git_repo") - repo = Repo.init(path) - file = path / f"{uuid.uuid4()}.txt" - with open(file, "w") as fp: - fp.write(str(uuid.uuid4())) - - repo.git.add(all=True) - repo.index.commit("Init commit") - return repo diff --git a/tests/e2e/exceptions.py b/tests/e2e/exceptions.py deleted file mode 100644 index 0654b0217..000000000 --- a/tests/e2e/exceptions.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.internal.exceptions import NeptuneException - - -class ConfigurationException(NeptuneException): - pass - - -class MissingEnvironmentVariable(ConfigurationException): - def __init__(self, missing_variable): - msg = f"Missing '{missing_variable}' in env configuration" - super().__init__(msg) diff --git a/tests/e2e/integrations/__init__.py b/tests/e2e/integrations/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/e2e/integrations/test_huggingface.py b/tests/e2e/integrations/test_huggingface.py deleted file mode 100644 index db85c1918..000000000 --- a/tests/e2e/integrations/test_huggingface.py +++ /dev/null @@ -1,585 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import time -from zipfile import ZipFile - -import numpy as np -import pytest -from faker import Faker - -from neptune import init_run -from neptune.table import TableEntry -from tests.e2e.base import BaseE2ETest -from tests.e2e.utils import ( - catch_time, - modified_environ, - tmp_context, -) - -torch = pytest.importorskip("torch") -transformers = pytest.importorskip("transformers") -transformers.integrations = pytest.importorskip("transformers.integrations") -transformers.utils = pytest.importorskip("transformers.utils") - -MAX_OVERWHELMING_FACTOR = 1.2 -SECONDS_TO_WAIT_FOR_UPDATE = 15 - - -transformers.utils.logging.set_verbosity_error() -fake = Faker() - - -class RegressionModelConfig(transformers.PretrainedConfig): - def __init__(self, a=2, b=3, **kwargs): - super().__init__(**kwargs) - self.a = a - self.b = b - - -class RegressionPreTrainedModel(transformers.PreTrainedModel): - config_class = RegressionModelConfig - base_model_prefix = "regression" - - def __init__(self, config): - super().__init__(config) - self.a = torch.nn.Parameter(torch.tensor(config.a).float()) - self.b = torch.nn.Parameter(torch.tensor(config.b).float()) - - def forward(self, input_x, labels=None): - y = input_x * self.a + self.b - if labels is None: - return (y,) - loss = torch.nn.functional.mse_loss(y, labels) - return loss, y - - -class RegressionDataset: - def __init__(self, a=2, b=3, length=64, seed=2501): - np.random.seed(seed) - - self.label_names = ["labels"] - self.length = length - self.x = np.random.normal(size=(length,)).astype(np.float32) - self.ys = [a * self.x + b + np.random.normal(scale=0.1, size=(length,)) for _ in self.label_names] - self.ys = [y.astype(np.float32) for y in self.ys] - - def __len__(self): - return self.length - - def __getitem__(self, i): - result = {name: y[i] for name, y in zip(self.label_names, self.ys)} - result["input_x"] = self.x[i] - return result - - -@pytest.mark.huggingface -@pytest.mark.integrations -class TestHuggingFace(BaseE2ETest): - @property - def _trainer_default_attributes(self): - config = RegressionModelConfig() - model = RegressionPreTrainedModel(config) - train_dataset = RegressionDataset(length=32) - validation_dataset = RegressionDataset(length=16) - - train_args = transformers.TrainingArguments("model", report_to=[], num_train_epochs=500, learning_rate=0.5) - - return { - "model": model, - "args": train_args, - "train_dataset": train_dataset, - "eval_dataset": validation_dataset, - } - - def _test_with_run_initialization(self, environment, *, pre, post): - with init_run(project=environment.project, api_token=environment.user_token) as run: - run_id = run["sys/id"].fetch() - pre(run) - - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - run = init_run( - with_id=run_id, - project=environment.project, - api_token=environment.user_token, - mode="read-only", - ) - post(run) - - def _test_monitoring_cpu_present(self, environment, table_entry: TableEntry) -> bool: - with init_run( - with_id=table_entry.get_attribute_value("sys/id"), - project=environment.project, - api_token=environment.user_token, - ) as neptune_run: - hash_key = list(neptune_run.get_structure()["monitoring"].keys())[0] - - return neptune_run.exists(f"monitoring/{hash_key}/cpu") - - def test_every_train_should_create_new_run(self, environment, project, common_tag): - trainer = transformers.Trainer( - **self._trainer_default_attributes, - callbacks=[ - transformers.integrations.NeptuneCallback( - api_token=environment.user_token, project=environment.project, tags=[common_tag] - ) - ], - ) - - expected_times = 5 - for _ in range(expected_times): - trainer.train() - - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - assert len(project.fetch_runs_table(tag=common_tag).to_rows()) == expected_times - - def test_runtime_factor(self, environment): - with catch_time() as standard: - trainer = transformers.Trainer(**self._trainer_default_attributes) - trainer.train() - del trainer - - with catch_time() as with_neptune_callback: - trainer = transformers.Trainer( - **self._trainer_default_attributes, - callbacks=[ - transformers.integrations.NeptuneCallback( - api_token=environment.user_token, project=environment.project - ) - ], - ) - trainer.train() - del trainer - - assert with_neptune_callback() / standard() <= MAX_OVERWHELMING_FACTOR - - def test_run_access_methods(self, environment): - callback = transformers.integrations.NeptuneCallback( - api_token=environment.user_token, project=environment.project - ) - trainer = transformers.Trainer(**self._trainer_default_attributes, callbacks=[callback]) - - assert callback.run.get_url() == transformers.integrations.NeptuneCallback.get_run(trainer).get_url() - - def test_initialization_with_run_provided(self, environment): - run = init_run(project=environment.project, api_token=environment.user_token) - callback = transformers.integrations.NeptuneCallback(run=run) - trainer = transformers.Trainer(**self._trainer_default_attributes, callbacks=[callback]) - - assert run.get_url() == transformers.integrations.NeptuneCallback.get_run(trainer).get_url() - - def test_run_reinitialization_failure(self, environment): - run = init_run(project=environment.project, api_token=environment.user_token) - - with modified_environ("NEPTUNE_API_TOKEN", "NEPTUNE_PROJECT"): - callback = transformers.integrations.NeptuneCallback(run=run) - trainer = transformers.Trainer(**self._trainer_default_attributes, callbacks=[callback]) - - trainer.train() - - with pytest.raises(transformers.integrations.NeptuneMissingConfiguration): - trainer.train() - - def test_run_access_without_callback_configured(self): - trainer = transformers.Trainer(**self._trainer_default_attributes) - - with pytest.raises(Exception): - transformers.integrations.NeptuneCallback.get_run(trainer) - - def test_log_parameters_with_base_namespace(self, environment): - base_namespace = "custom/base/path" - - def run_test(run): - callback = transformers.integrations.NeptuneCallback(run=run, base_namespace=base_namespace) - trainer = transformers.Trainer(**self._trainer_default_attributes, callbacks=[callback]) - trainer.train() - - def assert_metadata_structure(run): - assert run.exists(f"{base_namespace}/trainer_parameters") - assert run.exists(f"{base_namespace}/trainer_parameters/num_train_epochs") - assert run[f"{base_namespace}/trainer_parameters/num_train_epochs"].fetch() == 500 - - assert run.exists(f"{base_namespace}/model_parameters") - assert run.exists(f"{base_namespace}/model_parameters/a") - assert run.exists(f"{base_namespace}/model_parameters/b") - assert run[f"{base_namespace}/model_parameters/a"].fetch() == 2 - assert run[f"{base_namespace}/model_parameters/b"].fetch() == 3 - - self._test_with_run_initialization(environment, pre=run_test, post=assert_metadata_structure) - - def test_log_parameters_disabled(self, environment): - def run_test(run): - callback = transformers.integrations.NeptuneCallback(run=run, log_parameters=False) - trainer = transformers.Trainer(**self._trainer_default_attributes, callbacks=[callback]) - trainer.train() - - def assert_metadata_structure(run): - assert not run.exists("finetuning/trainer_parameters") - assert not run.exists("finetuning/model_parameters") - - self._test_with_run_initialization(environment, pre=run_test, post=assert_metadata_structure) - - def test_log_with_custom_base_namespace(self, environment): - base_namespace = "just/a/sample/path" - - def run_test(run): - callback = transformers.integrations.NeptuneCallback( - run=run, - base_namespace=base_namespace, - project=environment.project, - api_token=environment.user_token, - ) - trainer = transformers.Trainer(**self._trainer_default_attributes, callbacks=[callback]) - trainer.log({"metric1": 123, "another/metric": 0.2}) - trainer.train() - trainer.log({"after_training_metric": 2501}) - - def assert_metadata_structure(run): - assert run.exists(f"{base_namespace}/train") - assert run.exists(f"{base_namespace}/train/metric1") - assert run.exists(f"{base_namespace}/train/another/metric") - assert run.exists(f"{base_namespace}/train/after_training_metric") - - assert run[f"{base_namespace}/train/metric1"].fetch_last() == 123 - assert run[f"{base_namespace}/train/another/metric"].fetch_last() == 0.2 - assert run[f"{base_namespace}/train/after_training_metric"].fetch_last() == 2501 - - self._test_with_run_initialization(environment, pre=run_test, post=assert_metadata_structure) - - def test_integration_version_is_logged(self, environment): - def run_test(run): - callback = transformers.integrations.NeptuneCallback(run=run) - trainer = transformers.Trainer(**self._trainer_default_attributes, callbacks=[callback]) - trainer.train() - - def assert_metadata_structure(run): - assert run.exists("source_code/integrations/transformers") - - self._test_with_run_initialization(environment, pre=run_test, post=assert_metadata_structure) - - def test_non_monitoring_runs_creation(self, environment, project, common_tag): - # given - callback = transformers.integrations.NeptuneCallback( - project=environment.project, api_token=environment.user_token, tags=common_tag - ) - trainer = transformers.Trainer(**self._trainer_default_attributes, callbacks=[callback]) - - # when - trainer.log({"metric1": 123}) - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - - # then - runs = project.fetch_runs_table(tag=common_tag).to_rows() - assert len(runs) == 1 - with pytest.raises(ValueError): - runs[0].get_attribute_value("monitoring/cpu") - assert runs[0].get_attribute_value("finetuning/train/metric1") == 123 - - # when - trainer.train() - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - - # then - runs = project.fetch_runs_table(tag=common_tag).to_rows() - assert len(runs) == 1 - assert self._test_monitoring_cpu_present(environment, runs[0]) - - # when - trainer.log({"metric2": 234}) - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - - # then - runs = project.fetch_runs_table(tag=common_tag).to_rows() - assert len(runs) == 1 - # assert runs[0].get_attribute_value("monitoring/cpu") is not None - assert runs[0].get_attribute_value("finetuning/train/metric2") == 234 - - # when - trainer.train() - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - - # then - runs = sorted( - project.fetch_runs_table(tag=common_tag).to_rows(), - key=lambda run: run.get_attribute_value("sys/id"), - ) - assert len(runs) == 2 - assert self._test_monitoring_cpu_present(environment, runs[1]) - - # when - trainer.log({"metric3": 345}) - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - - # then - runs = sorted( - project.fetch_runs_table(tag=common_tag).to_rows(), - key=lambda run: run.get_attribute_value("sys/id"), - ) - assert len(runs) == 2 - assert runs[1].get_attribute_value("finetuning/train/metric3") == 345 - - def test_non_monitoring_runs_creation_with_initial_run(self, environment, project, common_tag): - # given - initial_run = init_run(project=environment.project, api_token=environment.user_token, tags=common_tag) - callback = transformers.integrations.NeptuneCallback( - project=environment.project, - api_token=environment.user_token, - tags=common_tag, - run=initial_run, - ) - trainer = transformers.Trainer(**self._trainer_default_attributes, callbacks=[callback]) - - # when - trainer.log({"metric1": 123}) - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - - # then - runs = project.fetch_runs_table(tag=common_tag).to_rows() - assert len(runs) == 1 - assert self._test_monitoring_cpu_present(environment, runs[0]) - assert runs[0].get_attribute_value("finetuning/train/metric1") == 123 - - # when - trainer.train() - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - - # then - runs = project.fetch_runs_table(tag=common_tag).to_rows() - assert len(runs) == 1 - assert self._test_monitoring_cpu_present(environment, runs[0]) - - # when - trainer.log({"metric2": 234}) - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - - # then - runs = project.fetch_runs_table(tag=common_tag).to_rows() - assert len(runs) == 1 - assert self._test_monitoring_cpu_present(environment, runs[0]) - assert runs[0].get_attribute_value("finetuning/train/metric2") == 234 - - # when - trainer.train() - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - - # then - runs = sorted( - project.fetch_runs_table(tag=common_tag).to_rows(), - key=lambda run: run.get_attribute_value("sys/id"), - ) - assert len(runs) == 2 - assert self._test_monitoring_cpu_present(environment, runs[0]) - - # when - trainer.log({"metric3": 345}) - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - - # then - runs = sorted( - project.fetch_runs_table(tag=common_tag).to_rows(), - key=lambda run: run.get_attribute_value("sys/id"), - ) - assert len(runs) == 2 - assert runs[1].get_attribute_value("finetuning/train/metric3") == 345 - - def test_hyperparameter_optimization(self, environment, project, common_tag): - # given - n_trials = 5 - - # and - def model_init(): - config = RegressionModelConfig() - return RegressionPreTrainedModel(config) - - # and - callback = transformers.integrations.NeptuneCallback( - project=environment.project, api_token=environment.user_token, tags=common_tag - ) - trainer_config = self._trainer_default_attributes - del trainer_config["model"] - trainer = transformers.Trainer(**trainer_config, model_init=model_init, callbacks=[callback]) - - # when - trainer.hyperparameter_search( - backend="optuna", n_trials=n_trials, hp_name=lambda trial: f"trial_{trial.number}" - ) - time.sleep(SECONDS_TO_WAIT_FOR_UPDATE) - - # then - runs = sorted( - project.fetch_runs_table(tag=common_tag).to_rows(), - key=lambda run: run.get_attribute_value("sys/id"), - ) - assert len(runs) == n_trials - for run_id, run in enumerate(runs): - assert run.get_attribute_value("finetuning/trial") == f"trial_{run_id}" - assert self._test_monitoring_cpu_present(environment, run) - - def test_usages(self): - # given - trainer_args = self._trainer_default_attributes - trainer_args["args"] = transformers.TrainingArguments("model", report_to=["all"]) - - # when - trainer = transformers.Trainer(**trainer_args) - - # then - assert "NeptuneCallback" in [type(callback).__name__ for callback in trainer.callback_handler.callbacks] - - # given - trainer_args = self._trainer_default_attributes - trainer_args["args"] = transformers.TrainingArguments("model", report_to=["neptune"]) - - # when - trainer = transformers.Trainer(**trainer_args) - - # then - assert "NeptuneCallback" in [type(callback).__name__ for callback in trainer.callback_handler.callbacks] - - # when - trainer = transformers.Trainer( - **self._trainer_default_attributes, callbacks=[transformers.integrations.NeptuneCallback] - ) - - # then - assert "NeptuneCallback" in [type(callback).__name__ for callback in trainer.callback_handler.callbacks] - - # when - trainer = transformers.Trainer( - **self._trainer_default_attributes, callbacks=[transformers.integrations.NeptuneCallback()] - ) - - # then - assert "NeptuneCallback" in [type(callback).__name__ for callback in trainer.callback_handler.callbacks] - - def _test_checkpoints_creation( - self, - environment, - log_checkpoints, - expected_checkpoints=None, - expected_checkpoints_number=None, - additional_training_args=None, - checkpoints_key="", - ): - if expected_checkpoints is not None: - expected_checkpoints.update({"/", "model"}) - - if additional_training_args is None: - additional_training_args = {} - - def run_test(run): - callback = transformers.integrations.NeptuneCallback(run=run, log_checkpoints=log_checkpoints) - training_args = self._trainer_default_attributes - training_args["args"] = transformers.TrainingArguments( - "model", - report_to=[], - num_train_epochs=500, - save_steps=1000, - save_strategy="steps", - **additional_training_args, - ) - trainer = transformers.Trainer(**training_args, callbacks=[callback]) - trainer.train() - - def assert_metadata_structure(run): - assert run.exists("finetuning/checkpoints") - assert run["finetuning/train/epoch"].fetch() == 500 - with tmp_context(): - run[f"finetuning/checkpoints/{checkpoints_key}"].download("checkpoints.zip") - - with ZipFile("checkpoints.zip") as handler: - subdirectories = set([os.path.dirname(x) for x in handler.namelist()]) - - if expected_checkpoints_number is not None: - assert len(subdirectories) == expected_checkpoints_number - - if expected_checkpoints is not None: - assert subdirectories == expected_checkpoints - handler.extractall(".") - - self._test_with_run_initialization(environment, pre=run_test, post=assert_metadata_structure) - - def _test_restore_from_checkpoint(self, environment): - def run_test(run): - callback = transformers.integrations.NeptuneCallback(run=run) - training_args = self._trainer_default_attributes - training_args["args"] = transformers.TrainingArguments("model", report_to=[], num_train_epochs=1000) - trainer = transformers.Trainer(**training_args, callbacks=[callback]) - checkpoint_id = max(os.listdir("model")) - trainer.train(resume_from_checkpoint=f"model/{checkpoint_id}") - - def assert_metadata_structure(run): - assert run["finetuning/train/epoch"].fetch() == 1000 - - self._test_with_run_initialization(environment, pre=run_test, post=assert_metadata_structure) - - @pytest.mark.parametrize("checkpoint_settings", [{}, {"save_total_limit": 1}, {"overwrite_output_dir": True}]) - def test_model_checkpoints_same(self, environment, checkpoint_settings): - with tmp_context(): - self._test_checkpoints_creation( - environment=environment, - log_checkpoints="same", - expected_checkpoints={"model/checkpoint-1000", "model/checkpoint-2000"}, - additional_training_args=checkpoint_settings, - ) - self._test_restore_from_checkpoint(environment=environment) - - @pytest.mark.parametrize("checkpoint_settings", [{}, {"save_total_limit": 1}, {"overwrite_output_dir": True}]) - def test_model_checkpoints_last(self, environment, checkpoint_settings): - with tmp_context(): - self._test_checkpoints_creation( - environment=environment, - log_checkpoints="last", - expected_checkpoints={"model/checkpoint-2000"}, - checkpoints_key="last", - additional_training_args=checkpoint_settings, - ) - self._test_restore_from_checkpoint(environment=environment) - - @pytest.mark.parametrize("checkpoint_settings", [{}, {"save_total_limit": 1}, {"overwrite_output_dir": True}]) - def test_model_checkpoints_best(self, environment, checkpoint_settings): - with tmp_context(): - self._test_checkpoints_creation( - environment=environment, - log_checkpoints="best", - additional_training_args={ - "load_best_model_at_end": True, - "evaluation_strategy": "steps", - "eval_steps": 500, - **checkpoint_settings, - }, - expected_checkpoints_number=3, - checkpoints_key="best", - ) - self._test_restore_from_checkpoint(environment=environment) - - def test_model_checkpoints_best_invalid_load_best_model_at_end(self, environment): - with init_run(project=environment.project, api_token=environment.user_token) as run: - callback = transformers.integrations.NeptuneCallback(run=run, log_checkpoints="best") - training_args = self._trainer_default_attributes - training_args["args"] = transformers.TrainingArguments( - "model", - report_to=[], - num_train_epochs=500, - learning_rate=0.5, - save_steps=500, - save_strategy="steps", - load_best_model_at_end=False, - evaluation_strategy="steps", - eval_steps=500, - ) - with pytest.raises(ValueError): - transformers.Trainer(**training_args, callbacks=[callback]) diff --git a/tests/e2e/integrations/test_lightning.py b/tests/e2e/integrations/test_lightning.py deleted file mode 100644 index 8918bea89..000000000 --- a/tests/e2e/integrations/test_lightning.py +++ /dev/null @@ -1,168 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import re - -import pytest - -import neptune -from tests.e2e.base import BaseE2ETest - -torch = pytest.importorskip("torch") -torch.utils.data = pytest.importorskip("torch.utils.data") -pytorch_lightning = pytest.importorskip("pytorch_lightning") -pytorch_lightning.callbacks = pytest.importorskip("pytorch_lightning.callbacks") -pytorch_lightning.loggers.neptune = pytest.importorskip("pytorch_lightning.loggers.neptune") - -LIGHTNING_ECOSYSTEM_ENV_PROJECT = "NEPTUNE_LIGHTNING_ECOSYSTEM_CI_PROJECT" - -skip_if_on_regular_env = pytest.mark.skipif( - LIGHTNING_ECOSYSTEM_ENV_PROJECT not in os.environ, reason="Tests weren't invoked in Lightning Ecosystem CI" -) -skip_if_on_lightning_ecosystem = pytest.mark.skipif( - LIGHTNING_ECOSYSTEM_ENV_PROJECT in os.environ, reason="Tests invoked in Lightning Ecosystem CI" -) - - -class RandomDataset(torch.utils.data.Dataset): - def __init__(self, size, length): - self.len = length - self.data = torch.randn(length, size) - - def __getitem__(self, index): - return self.data[index] - - def __len__(self): - return self.len - - -class BoringModel(pytorch_lightning.LightningModule): - def __init__(self): - super().__init__() - self.layer = torch.nn.Linear(32, 2) - - def forward(self, *args, **kwargs): - return self.layer(*args, **kwargs) - - def training_step(self, *args, **kwargs): - batch, *_ = args - loss = self(batch).sum() - self.log("train/loss", loss) - return {"loss": loss} - - def validation_step(self, *args, **kwargs): - batch, *_ = args - loss = self(batch, **kwargs).sum() - self.log("valid/loss", loss) - - def test_step(self, *args, **kwargs): - batch, *_ = args - loss = self(batch, **kwargs).sum() - self.log("test/loss", loss) - - def configure_optimizers(self): - return torch.optim.SGD(self.layer.parameters(), lr=0.1) - - -def prepare(project): - # given - run = neptune.init_run(name="Pytorch-Lightning integration", project=project) - # and - model_checkpoint = pytorch_lightning.callbacks.ModelCheckpoint( - dirpath="my_model/checkpoints/", - filename="{epoch:02d}-{valid/loss:.2f}", - save_weights_only=True, - save_top_k=2, - save_last=True, - monitor="valid/loss", - every_n_epochs=1, - ) - neptune_logger = pytorch_lightning.loggers.neptune.NeptuneLogger(run=run, prefix="custom_prefix") - # and (Subject) - model = BoringModel() - trainer = pytorch_lightning.Trainer( - limit_train_batches=1, - limit_val_batches=1, - log_every_n_steps=1, - max_epochs=3, - logger=neptune_logger, - callbacks=[model_checkpoint], - ) - train_data = torch.utils.data.DataLoader(RandomDataset(32, 64), batch_size=2) - val_data = torch.utils.data.DataLoader(RandomDataset(32, 64), batch_size=2) - test_data = torch.utils.data.DataLoader(RandomDataset(32, 64), batch_size=2) - - # then - trainer.fit(model, train_dataloaders=train_data, val_dataloaders=val_data) - trainer.test(model, dataloaders=test_data) - run.sync() - - return run - - -@pytest.fixture(scope="session") -def model_in_regular_env(environment): - yield prepare(project=environment.project) - - -@pytest.fixture(scope="session") -def model_in_lightning_ci_project(): - yield prepare(project=os.getenv("NEPTUNE_LIGHTNING_ECOSYSTEM_CI_PROJECT")) - - -@pytest.mark.integrations -@pytest.mark.lightning -class TestPytorchLightning(BaseE2ETest): - def _test_logging_values(self, pytorch_run): - # correct integration version is logged - if pytorch_run.exists("source_code/integrations/lightning"): - logged_version = pytorch_run["source_code/integrations/lightning"].fetch() - else: - logged_version = pytorch_run["source_code/integrations/pytorch-lightning"].fetch() - assert logged_version == pytorch_lightning.__version__ - - assert pytorch_run.exists("custom_prefix/valid/loss") - assert len(pytorch_run["custom_prefix/valid/loss"].fetch_values()) == 3 - - @skip_if_on_lightning_ecosystem - def test_logging_values(self, model_in_regular_env): - self._test_logging_values(model_in_regular_env) - - @skip_if_on_regular_env - def test_logging_values_in_lightning_ci(self, model_in_lightning_ci_project): - self._test_logging_values(model_in_lightning_ci_project) - - def _test_saving_models(self, pytorch_run): - best_model_path = pytorch_run["custom_prefix/model/best_model_path"].fetch() - assert re.match( - r".*my_model/checkpoints/epoch=.*-valid/loss=.*\.ckpt$", - best_model_path, - ) - assert pytorch_run["custom_prefix/model/best_model_score"].fetch() is not None - - # make sure that exactly `save_top_k` checkpoints - # NOTE: when `max_epochs` is close to `save_top_k` there may be less than `save_top_k` saved models - checkpoints = pytorch_run["custom_prefix/model/checkpoints"].fetch() - assert all((checkpoint.startswith("epoch=") for checkpoint in checkpoints)) - assert len(checkpoints) == 2 - - @skip_if_on_lightning_ecosystem - def test_saving_models(self, model_in_regular_env): - self._test_saving_models(model_in_regular_env) - - @skip_if_on_regular_env - def test_saving_models_in_lightning_ci(self, model_in_lightning_ci_project): - self._test_saving_models(model_in_lightning_ci_project) diff --git a/tests/e2e/integrations/test_mosaicml.py b/tests/e2e/integrations/test_mosaicml.py deleted file mode 100644 index 35369b2f9..000000000 --- a/tests/e2e/integrations/test_mosaicml.py +++ /dev/null @@ -1,104 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import pytest - -torch = pytest.importorskip("torch") -torch.utils.data = pytest.importorskip("torch.utils.data") -nn = pytest.importorskip("torch.nn") -F = pytest.importorskip("torch.nn.functional") - -torchvision = pytest.importorskip("torchvision") - -composer = pytest.importorskip("composer") -composer.loggers = pytest.importorskip("composer.loggers") -composer.models = pytest.importorskip("composer.models") -composer.algorithms = pytest.importorskip("composer.algorithms") -composer.callbacks = pytest.importorskip("composer.callbacks") - - -@pytest.fixture(scope="module") -def model() -> composer.models.ComposerClassifier: - # https://github.com/mosaicml/composer/blob/dev/examples/checkpoint_with_wandb.py - class Model(nn.Module): - """Toy convolutional neural network architecture in pytorch for MNIST.""" - - def __init__(self, num_classes: int = 10): - super().__init__() - - self.num_classes = num_classes - - self.conv1 = nn.Conv2d(1, 16, (3, 3), padding=0) - self.conv2 = nn.Conv2d(16, 32, (3, 3), padding=0) - self.bn = nn.BatchNorm2d(32) - self.fc1 = nn.Linear(32 * 16, 32) - self.fc2 = nn.Linear(32, num_classes) - - def forward(self, x): - out = self.conv1(x) - out = F.relu(out) - out = self.conv2(out) - out = self.bn(out) - out = F.relu(out) - out = F.adaptive_avg_pool2d(out, (4, 4)) - out = torch.flatten(out, 1, -1) - out = self.fc1(out) - out = F.relu(out) - return self.fc2(out) - - return composer.models.ComposerClassifier(module=Model(num_classes=10)) - - -@pytest.mark.integrations -@pytest.mark.mosaicml -def test_e2e(environment, model): - transform = torchvision.transforms.Compose([torchvision.transforms.ToTensor()]) - - train_dataset = torchvision.datasets.MNIST("data", download=True, train=True, transform=transform) - eval_dataset = torchvision.datasets.MNIST("data", download=True, train=False, transform=transform) - - train_dataset = torch.utils.data.Subset(train_dataset, indices=range(len(train_dataset) // 50)) - eval_dataset = torch.utils.data.Subset(eval_dataset, indices=range(len(eval_dataset) // 50)) - train_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=128) - eval_dataloader = torch.utils.data.DataLoader(eval_dataset, batch_size=128) - logger = composer.loggers.NeptuneLogger(project=environment.project, base_namespace="composer-training") - - trainer = composer.Trainer( - model=model, - train_dataloader=train_dataloader, - eval_dataloader=eval_dataloader, - max_duration="1ep", - algorithms=[ - composer.algorithms.ChannelsLast(), - composer.algorithms.CutMix(alpha=1.0), - composer.algorithms.LabelSmoothing(smoothing=0.1), - ], - loggers=logger, - callbacks=composer.callbacks.ImageVisualizer(), - ) - trainer.fit() - - logger.neptune_run.sync() - - assert logger.neptune_run.exists("composer-training") - - assert logger.neptune_run.exists("composer-training/Images/Train") - assert logger.neptune_run.exists("composer-training/Images/Eval") - - assert logger.neptune_run.exists("composer-training/metrics/loss/train/total") - - assert logger.neptune_run["composer-training/hyperparameters/num_nodes"].fetch() == 1 - - assert logger.neptune_run.exists("composer-training/traces/algorithm_traces/ChannelsLast") diff --git a/tests/e2e/integrations/test_zenml.py b/tests/e2e/integrations/test_zenml.py deleted file mode 100644 index bc94e0334..000000000 --- a/tests/e2e/integrations/test_zenml.py +++ /dev/null @@ -1,128 +0,0 @@ -import hashlib - -import pytest -from sklearn.datasets import load_digits -from sklearn.model_selection import train_test_split -from sklearn.svm import SVC - -import neptune -from tests.e2e.base import BaseE2ETest - -zenml = pytest.importorskip("zenml") -pipelines = pytest.importorskip("zenml.pipelines") -flavors = pytest.importorskip("zenml.integrations.neptune.flavors") - -NEPTUNE_EXPERIMENT_TRACKER_NAME = "neptune_tracker" -NEPTUNE_STACK_NAME = "neptune_stack" - - -@pytest.fixture(scope="session") -def registered_stack(zenml_client, experiment_tracker_comp, stack_with_neptune): - try: - zenml_client.initialize() - except zenml.exceptions.InitializationException: - pass - - zenml_client.activate_stack(NEPTUNE_STACK_NAME) - - -@pytest.fixture(scope="session") -def zenml_client(): - from zenml.client import Client - - return Client() - - -@pytest.fixture(scope="session") -def experiment_tracker_comp(zenml_client, environment): - try: - return zenml_client.create_stack_component( - name=NEPTUNE_EXPERIMENT_TRACKER_NAME, - component_type=zenml.enums.StackComponentType.EXPERIMENT_TRACKER, - flavor="neptune", - configuration={"api_token": environment.user_token, "project": environment.project}, - ) - except zenml.exceptions.StackComponentExistsError: - return zenml_client.get_stack_component( - component_type=zenml.enums.StackComponentType.EXPERIMENT_TRACKER, - name_id_or_prefix=NEPTUNE_EXPERIMENT_TRACKER_NAME, - ) - - -@pytest.fixture(scope="session") -def stack_with_neptune(zenml_client, experiment_tracker_comp): - a_id = zenml_client.active_stack.artifact_store.id - o_id = zenml_client.active_stack.orchestrator.id - e_id = experiment_tracker_comp.id - - try: - return zenml_client.create_stack( - name=NEPTUNE_STACK_NAME, - components={ - zenml.enums.StackComponentType.ARTIFACT_STORE: a_id, - zenml.enums.StackComponentType.ORCHESTRATOR: o_id, - zenml.enums.StackComponentType.EXPERIMENT_TRACKER: e_id, - }, - ) - except zenml.exceptions.StackExistsError: - return zenml_client.get_stack(name_id_or_prefix=NEPTUNE_STACK_NAME) - - -@zenml.steps.step( - experiment_tracker=NEPTUNE_EXPERIMENT_TRACKER_NAME, - settings={"experiment_tracker.neptune": flavors.NeptuneExperimentTrackerSettings(tags={"sklearn", "digits"})}, - enable_cache=False, -) -def example_step() -> None: - """A very minimalistic pipeline step. - Loads a sample dataset, trains a simple classifier and logs - a couple of metrics. - """ - neptune_run = zenml.integrations.neptune.experiment_trackers.run_state.get_neptune_run() - digits = load_digits() - data = digits.images.reshape((len(digits.images), -1)) - - x_train, x_test, y_train, y_test = train_test_split(data, digits.target, test_size=0.3) - gamma = 0.001 - neptune_run["params/gamma"] = gamma - model = SVC(gamma=gamma) - model.fit(x_train, y_train) - test_acc = model.score(x_test, y_test) - neptune_run["metrics/val_accuracy"] = test_acc - - -@pipelines.pipeline( - enable_cache=False, -) -def neptune_example_pipeline(ex_step): - """ - Link all the steps artifacts together - """ - ex_step() - - -@pytest.mark.integrations -@pytest.mark.zenml -class TestZenML(BaseE2ETest): - def _test_setup_creates_stack_with_neptune_experiment_tracker(self, zenml_client): - assert zenml_client.active_stack.experiment_tracker.name == NEPTUNE_EXPERIMENT_TRACKER_NAME - - def _test_pipeline_runs_without_errors(self): - run = neptune_example_pipeline(ex_step=example_step()) - run.run() - - self.zenml_run_name = run.get_runs()[-1].name - - def _test_fetch_neptune_run(self, environment): - custom_run_id = hashlib.md5(self.zenml_run_name.encode()).hexdigest() - neptune_run = neptune.init_run( - custom_run_id=custom_run_id, project=environment.project, api_token=environment.user_token - ) - assert neptune_run["params/gamma"].fetch() == 0.001 - assert neptune_run["sys/tags"].fetch() == {"sklearn", "digits"} - assert neptune_run["metrics/val_accuracy"].fetch() <= 1 - - def test_zenml(self, registered_stack, zenml_client, environment): - self._test_setup_creates_stack_with_neptune_experiment_tracker(zenml_client) - self._test_pipeline_runs_without_errors() - self._test_fetch_neptune_run(environment) diff --git a/tests/e2e/management/__init__.py b/tests/e2e/management/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/e2e/management/test_management.py b/tests/e2e/management/test_management.py deleted file mode 100644 index 1e6196f40..000000000 --- a/tests/e2e/management/test_management.py +++ /dev/null @@ -1,550 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import time -from functools import partial -from typing import ( - Callable, - Dict, - List, -) - -import backoff -import pytest - -from neptune import ( - Project, - init_model_version, -) -from neptune.internal.container_type import ContainerType -from neptune.management import ( - ProjectVisibility, - add_project_member, - add_project_service_account, - clear_trash, - create_project, - delete_project, - get_project_list, - get_project_member_list, - get_project_service_account_list, - get_workspace_member_list, - get_workspace_service_account_list, - get_workspace_status, - invite_to_workspace, - remove_project_member, - remove_project_service_account, - trash_objects, -) -from neptune.management.exceptions import ( - ProjectNotFound, - ProjectPrivacyRestrictedException, - UserAlreadyInvited, - UserNotExistsOrWithoutAccess, - WorkspaceOrUserNotFound, -) -from neptune.management.internal.utils import normalize_project_name -from neptune.table import Table -from tests.e2e.base import ( - BaseE2ETest, - fake, -) -from tests.e2e.utils import ( - Environment, - a_project_name, - initialize_container, -) - -WAIT_DURATION = 60 - - -@pytest.mark.management -class TestManagement(BaseE2ETest): - @staticmethod - def _assure_presence_and_role(*, username: str, expected_role: str, member_list: Dict[str, str]): - assert username in member_list - assert member_list.get(username) == expected_role - - def test_standard_scenario(self, environment: Environment): - project_name = a_project_name(project_slug=f"{fake.slug()}-mgmt") - project_identifier = normalize_project_name(name=project_name, workspace=environment.workspace) - - assert project_identifier not in get_project_list(api_token=environment.admin_token) - assert project_identifier not in get_project_list(api_token=environment.user_token) - - self._assure_presence_and_role( - username=environment.user, - expected_role="member", - member_list=get_workspace_member_list(workspace=environment.workspace, api_token=environment.admin_token), - ) - self._assure_presence_and_role( - username=environment.user, - expected_role="member", - member_list=get_workspace_member_list(workspace=environment.workspace, api_token=environment.user_token), - ) - self._assure_presence_and_role( - username=environment.service_account, - expected_role="member", - member_list=get_workspace_service_account_list( - workspace=environment.workspace, api_token=environment.user_token - ), - ) - - created_project_identifier = create_project( - name=project_name, - visibility="priv", - workspace=environment.workspace, - api_token=environment.admin_token, - ) - - assert created_project_identifier == project_identifier - assert created_project_identifier in get_project_list(api_token=environment.admin_token) - assert created_project_identifier not in get_project_list(api_token=environment.user_token) - - assert environment.user not in get_project_member_list( - project=created_project_identifier, api_token=environment.admin_token - ) - assert environment.service_account not in get_project_service_account_list( - project=created_project_identifier, api_token=environment.admin_token - ) - - add_project_service_account( - project=created_project_identifier, - service_account_name=environment.service_account, - role="contributor", - api_token=environment.admin_token, - ) - add_project_member( - project=created_project_identifier, - username=environment.user, - role="contributor", - api_token=environment.admin_token, - ) - - project_members = get_project_member_list(project=created_project_identifier, api_token=environment.admin_token) - assert environment.user in project_members - assert project_members.get(environment.user) == "contributor" - - project_members = get_project_member_list(project=created_project_identifier, api_token=environment.user_token) - assert environment.user in project_members - assert project_members.get(environment.user) == "contributor" - assert environment.service_account not in project_members - - assert created_project_identifier in get_project_list(api_token=environment.user_token) - - remove_project_member( - project=created_project_identifier, - username=environment.user, - api_token=environment.admin_token, - ) - remove_project_service_account( - project=created_project_identifier, - service_account_name=environment.service_account, - api_token=environment.admin_token, - ) - - assert created_project_identifier not in get_project_list(api_token=environment.user_token) - assert environment.user not in get_project_member_list( - project=created_project_identifier, api_token=environment.admin_token - ) - assert environment.service_account not in get_project_service_account_list( - project=created_project_identifier, api_token=environment.admin_token - ) - - delete_project(project=created_project_identifier, api_token=environment.admin_token) - - assert created_project_identifier not in get_project_list(api_token=environment.admin_token) - - def test_visibility_workspace(self, environment: "Environment"): - project_name = a_project_name(project_slug=f"{fake.slug()}-workspace") - project_identifier = normalize_project_name(name=project_name, workspace=environment.workspace) - - assert project_identifier not in get_project_list(api_token=environment.admin_token) - assert project_identifier not in get_project_list(api_token=environment.user_token) - - self._assure_presence_and_role( - username=environment.user, - expected_role="member", - member_list=get_workspace_member_list(workspace=environment.workspace, api_token=environment.admin_token), - ) - - created_project_identifier = create_project( - name=project_name, - visibility="workspace", - workspace=environment.workspace, - api_token=environment.admin_token, - ) - - assert created_project_identifier == project_identifier - assert created_project_identifier in get_project_list(api_token=environment.admin_token) - - self._assure_presence_and_role( - username=environment.user, - expected_role="owner", - member_list=get_project_member_list(project=created_project_identifier, api_token=environment.admin_token), - ) - assert environment.service_account not in get_project_service_account_list( - project=created_project_identifier, api_token=environment.admin_token - ) - - add_project_service_account( - project=created_project_identifier, - service_account_name=environment.service_account, - role="contributor", - api_token=environment.admin_token, - ) - - self._assure_presence_and_role( - username=environment.service_account, - expected_role="contributor", - member_list=get_project_service_account_list( - project=created_project_identifier, api_token=environment.admin_token - ), - ) - - with pytest.raises(UserNotExistsOrWithoutAccess): - remove_project_member( - project=created_project_identifier, - username=environment.user, - api_token=environment.admin_token, - ) - - remove_project_service_account( - project=created_project_identifier, - service_account_name=environment.service_account, - api_token=environment.admin_token, - ) - - self._assure_presence_and_role( - username=environment.user, - expected_role="owner", - member_list=get_project_member_list(project=created_project_identifier, api_token=environment.admin_token), - ) - assert environment.service_account not in get_project_service_account_list( - project=created_project_identifier, api_token=environment.admin_token - ) - - delete_project(project=created_project_identifier, api_token=environment.admin_token) - - assert project_identifier not in get_project_list(api_token=environment.admin_token) - - def test_create_project(self, environment: "Environment"): - project_name = a_project_name(project_slug=f"{fake.slug()}-create") - project_identifier = normalize_project_name(name=project_name, workspace=environment.workspace) - - assert project_identifier not in get_project_list(api_token=environment.user_token) - self._assure_presence_and_role( - username=environment.user, - expected_role="member", - member_list=get_workspace_member_list(workspace=environment.workspace, api_token=environment.user_token), - ) - - created_project_identifier = create_project( - name=project_name, - workspace=environment.workspace, - api_token=environment.user_token, - ) - - assert created_project_identifier == project_identifier - assert created_project_identifier in get_project_list(api_token=environment.user_token) - - delete_project(project=created_project_identifier, api_token=environment.admin_token) - - assert project_identifier not in get_project_list(api_token=environment.user_token) - - def test_invalid_visibility(self, environment: "Environment"): - project_name = a_project_name(project_slug=f"{fake.slug()}-create") - project_identifier = normalize_project_name(name=project_name, workspace=environment.workspace) - - assert project_identifier not in get_project_list(api_token=environment.user_token) - self._assure_presence_and_role( - username=environment.user, - expected_role="member", - member_list=get_workspace_member_list(workspace=environment.workspace, api_token=environment.user_token), - ) - - with pytest.raises(ProjectPrivacyRestrictedException): - create_project( - name=project_name, - workspace=environment.workspace, - api_token=environment.user_token, - # TODO(bartosz.prusak): this is an invalid setting because workspaces have "public" setting banned as - # default. The test should check if the workspace used has this ban set (and if not - skip this test). - visibility=ProjectVisibility.PUBLIC, - ) - - assert project_identifier not in get_project_list(api_token=environment.user_token) - - def _test_add_sa_to_project_as_owner(self, created_project_identifier: str, environment: "Environment"): - self._assure_presence_and_role( - username=environment.user, - expected_role="owner", - member_list=get_project_member_list(project=created_project_identifier, api_token=environment.user_token), - ) - - assert environment.service_account not in get_project_service_account_list( - project=created_project_identifier, api_token=environment.user_token - ) - - add_project_service_account( - project=created_project_identifier, - service_account_name=environment.service_account, - role="contributor", - api_token=environment.user_token, - ) - self._assure_presence_and_role( - username=environment.service_account, - expected_role="contributor", - member_list=get_project_service_account_list( - project=created_project_identifier, api_token=environment.user_token - ), - ) - - remove_project_service_account( - project=created_project_identifier, - service_account_name=environment.service_account, - api_token=environment.user_token, - ) - assert environment.service_account not in get_project_service_account_list( - project=created_project_identifier, api_token=environment.admin_token - ) - - def _test_add_user_to_project_as_sa(self, created_project_identifier: str, environment: "Environment"): - self._assure_presence_and_role( - username=environment.service_account, - expected_role="owner", - member_list=get_project_service_account_list( - project=created_project_identifier, api_token=environment.user_token - ), - ) - - assert environment.user not in get_project_member_list( - project=created_project_identifier, api_token=environment.user_token - ) - - add_project_member( - project=created_project_identifier, - username=environment.user, - role="contributor", - api_token=environment.admin_token, - ) - self._assure_presence_and_role( - username=environment.user, - expected_role="contributor", - member_list=get_project_member_list(project=created_project_identifier, api_token=environment.user_token), - ) - - remove_project_member( - project=created_project_identifier, - username=environment.user, - api_token=environment.admin_token, - ) - assert environment.user not in get_project_member_list( - project=created_project_identifier, api_token=environment.user_token - ) - - def test_invite_as_non_admin(self, environment: "Environment"): - project_name = a_project_name(project_slug=f"{fake.slug()}-invitation") - project_identifier = normalize_project_name(name=project_name, workspace=environment.workspace) - - created_project_identifier = create_project( - name=project_name, - workspace=environment.workspace, - api_token=environment.user_token, - ) - - assert created_project_identifier == project_identifier - assert created_project_identifier in get_project_list(api_token=environment.user_token) - - # user who created a project (`user_token` owner) will be automatically project owner - sa_is_project_owner = ( - get_project_service_account_list(project=created_project_identifier, api_token=environment.user_token).get( - environment.service_account - ) - == "owner" - ) - user_is_project_owner = ( - get_project_member_list(project=created_project_identifier, api_token=environment.user_token).get( - environment.user - ) - == "owner" - ) - if sa_is_project_owner and not user_is_project_owner: - # SA has access to project, so tests are run as SA - self._test_add_user_to_project_as_sa(created_project_identifier, environment) - elif user_is_project_owner and not sa_is_project_owner: - # SA doesn't have access to project, so tests are run as user - self._test_add_sa_to_project_as_owner(created_project_identifier, environment) - else: - raise AssertionError("Expected to only SA or user to be owner of newly created project.") - - delete_project(project=created_project_identifier, api_token=environment.admin_token) - - assert project_identifier not in get_project_list(api_token=environment.user_token) - - def test_invite_to_workspace(self, environment: "Environment"): - with pytest.raises(UserAlreadyInvited): - invite_to_workspace( - username=environment.user, workspace=environment.workspace, api_token=environment.admin_token - ) - - with pytest.raises(UserAlreadyInvited): - invite_to_workspace( - username=environment.user, - workspace=environment.workspace, - api_token=environment.admin_token, - role="admin", - ) - - with pytest.raises(WorkspaceOrUserNotFound): - invite_to_workspace( - username="non-existent-user", workspace=environment.workspace, api_token=environment.admin_token - ) - - with pytest.raises(WorkspaceOrUserNotFound): - invite_to_workspace( - username=environment.user, workspace="non-existent-workspace", api_token=environment.admin_token - ) - - def test_workspace_status(self, environment: "Environment"): - status = get_workspace_status(workspace=environment.workspace, api_token=environment.admin_token) - - assert "storageBytesAvailable" in status - assert "storageBytesLimit" in status - assert "storageBytesUsed" in status - assert status["storageBytesAvailable"] >= 0 - assert status["storageBytesLimit"] >= 0 - assert status["storageBytesUsed"] >= 0 - - -@pytest.mark.management -class TestTrashObjects(BaseE2ETest): - """ - Test trash_objects - """ - - def test_trash_objects_wrong_project(self): - with pytest.raises(ProjectNotFound): - trash_objects("org/non-existent-project", ["RUN-1", "RUN-2", "RUN-3"]) - - def test_trash_runs(self, project, environment): - run1_id = initialize_container(ContainerType.RUN, project=environment.project)["sys/id"].fetch() - run2_id = initialize_container(ContainerType.RUN, project=environment.project)["sys/id"].fetch() - # wait for elastic index to refresh - self.wait_for_containers([run1_id, run2_id], project.fetch_runs_table) - - # WHEN trash one run and one model - trash_objects(environment.project, [run1_id]) - - # THEN trashed runs are not fetched - self.wait_for_containers([run2_id], project.fetch_runs_table) - - # WHEN trash the other run - trash_objects(environment.project, [run2_id]) - - # THEN no runs are fetched - self.wait_for_containers([], project.fetch_runs_table) - - @pytest.mark.skip(reason="Model is not supported") - def test_trash_runs_and_models(self, project, environment): - # WITH runs and models - run1_id = initialize_container(ContainerType.RUN, project=environment.project)["sys/id"].fetch() - run2_id = initialize_container(ContainerType.RUN, project=environment.project)["sys/id"].fetch() - model1_id = initialize_container(ContainerType.MODEL, project=environment.project)["sys/id"].fetch() - model2_id = initialize_container(ContainerType.MODEL, project=environment.project)["sys/id"].fetch() - # wait for elastic index to refresh - self.wait_for_containers([run1_id, run2_id], project.fetch_runs_table) - self.wait_for_containers([model1_id, model2_id], project.fetch_models_table) - - # WHEN trash one run and one model - trash_objects(environment.project, [run1_id, model1_id]) - - # THEN trashed runs are not fetched - self.wait_for_containers([run2_id], project.fetch_runs_table) - # AND trashed models are not fetched - self.wait_for_containers([model2_id], project.fetch_models_table) - - @pytest.mark.skip(reason="Model is not supported") - def test_trash_model_version(self, environment): - # WITH model - model = initialize_container(ContainerType.MODEL, project=environment.project) - model_id = model["sys/id"].fetch() - # AND model's model versions - model_version1_id = init_model_version(model=model_id, project=environment.project)["sys/id"].fetch() - model_version2_id = init_model_version(model=model_id, project=environment.project)["sys/id"].fetch() - self.wait_for_containers([model_version1_id, model_version2_id], model.fetch_model_versions_table) - - # WHEN model version is trashed - trash_objects(environment.project, model_version1_id) - - # THEN expect this version to not be fetched anymore - self.wait_for_containers([model_version2_id], model.fetch_model_versions_table) - - # WHEN whole model is trashed - trash_objects(environment.project, model_id) - - # THEN expect none of its versions to be fetched anymore - self.wait_for_containers([], model.fetch_model_versions_table) - - @backoff.on_exception(partial(backoff.expo, base=4), Exception, max_time=WAIT_DURATION) - def wait_for_containers(self, ids: List[str], container_provider: Callable[[], Table]): - fetched_entries = container_provider().to_pandas() - actual_ids = fetched_entries["sys/id"].tolist() if len(fetched_entries) > 0 else [] - assert sorted(actual_ids) == sorted(ids) - - -@pytest.mark.management -class TestDeleteFromTrash: - - @pytest.mark.parametrize( - ("n_runs", "n_models"), - [ - pytest.param( - 2, - 0, - marks=pytest.mark.skip(reason="Project is not supported"), - ), - pytest.param( - 2, - 1, - marks=pytest.mark.skip(reason="Project is not supported"), - ), - ], - ) - def test_delete_from_trash(self, environment, n_runs: int, n_models: int): - # given - runs = [initialize_container(ContainerType.RUN, project=environment.project) for _ in range(n_runs)] - models = [initialize_container(ContainerType.MODEL, project=environment.project) for _ in range(n_models)] - - run_ids = [run["sys/id"].fetch() for run in runs] - model_ids = [model["sys/id"].fetch() for model in models] - time.sleep(WAIT_DURATION) - - with initialize_container(ContainerType.PROJECT, project=environment.project) as project: - trash_objects(environment.project, run_ids + model_ids) - time.sleep(WAIT_DURATION) - - # when - clear_trash(environment.project) - time.sleep(WAIT_DURATION) - - # then - self.wait_for_containers_in_trash(0, 0, project) - - @backoff.on_exception(backoff.expo, Exception, max_time=WAIT_DURATION) - def wait_for_containers_in_trash(self, expected_run_count: int, expected_model_count: int, project: Project): - trashed_runs = project.fetch_runs_table(trashed=True).to_rows() - trashed_models = project.fetch_models_table(trashed=True).to_rows() - assert len(trashed_models) == expected_model_count - assert len(trashed_runs) == expected_run_count diff --git a/tests/e2e/plot_utils.py b/tests/e2e/plot_utils.py deleted file mode 100644 index 8fa60c5a3..000000000 --- a/tests/e2e/plot_utils.py +++ /dev/null @@ -1,133 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import altair as alt -import matplotlib.pyplot as plt -import numpy as np -import plotly.express as px -import seaborn as sns -from bokeh import ( - models, - palettes, - plotting, - sampledata, -) -from PIL import Image -from vega_datasets import data - - -def generate_pil_image(): - data = np.random.randint(low=0, high=256, size=32 * 32 * 3, dtype=np.uint8) - data = data.reshape(32, 32, 3) - pil_image = Image.fromarray(data, "RGB") - return pil_image - - -def generate_matplotlib_figure(): - rect = 0.1, 0.1, 0.8, 0.8 - fig = plt.figure() - fig.add_axes(rect, label="label1") - fig.add_axes(rect, label="label2") - fig.add_axes(rect, frameon=False, facecolor="g") - fig.add_axes(rect, polar=True) - return fig - - -def generate_altair_chart(): - source = data.cars() - - brush = alt.selection(type="interval") - - points = ( - alt.Chart(source) - .mark_point() - .encode( - x="Horsepower:Q", y="Miles_per_Gallon:Q", color=alt.condition(brush, "Origin:N", alt.value("lightgray")) - ) - .add_selection(brush) - ) - - # TODO: return chart once problem with altair and JSONSchema is solved - # https://github.com/altair-viz/altair/issues/2705 - # bars = ( - # alt.Chart(source).mark_bar().encode(y="Origin:N", color="Origin:N", x="count(Origin):Q").transform_filter(brush) - # ) - # chart = points & bars - # return chart - return points - - -def generate_brokeh_figure(): - sampledata.download() - - from bokeh.sampledata.unemployment import data as unemployment - from bokeh.sampledata.us_counties import data as counties - - palette2 = tuple(reversed(palettes.Viridis6)) - - cnts = {code: county for code, county in counties.items() if county["state"] == "tx"} - - county_xs = [county["lons"] for county in cnts.values()] - county_ys = [county["lats"] for county in cnts.values()] - - county_names = [county["name"] for county in cnts.values()] - county_rates = [unemployment[county_id] for county_id in cnts] - color_mapper = models.LogColorMapper(palette=palette2) - - chart_data = dict( - x=county_xs, - y=county_ys, - name=county_names, - rate=county_rates, - ) - - TOOLS = "pan,wheel_zoom,reset,hover,save" - - bokeh_figure = plotting.figure( - title="Texas Unemployment, 2009", - tools=TOOLS, - x_axis_location=None, - y_axis_location=None, - tooltips=[("Name", "@name"), ("Unemployment rate", "@rate%"), ("(Long, Lat)", "($x, $y)")], - ) - bokeh_figure.grid.grid_line_color = None - bokeh_figure.hover.point_policy = "follow_mouse" - - bokeh_figure.patches( - "x", - "y", - source=chart_data, - fill_color={"field": "rate", "transform": color_mapper}, - fill_alpha=0.7, - line_color="white", - line_width=0.5, - ) - - return bokeh_figure - - -def generate_plotly_figure(): - df = px.data.tips() - plotly_fig = px.histogram(df, x="total_bill", y="tip", color="sex", marginal="rug", hover_data=df.columns) - - return plotly_fig - - -def generate_seaborn_figure(): - sample_size = 30 - x = np.random.rand(sample_size) * 2 * np.pi - data = {"x": x, "y": np.sin(x), "c": np.random.randint(0, 2, sample_size), "arch": x > np.pi} - seaborn_fig = sns.relplot(data=data, x="x", y="y", hue="c", col="arch") - return seaborn_fig diff --git a/tests/e2e/pytest.ini b/tests/e2e/pytest.ini deleted file mode 100644 index ca56b3b31..000000000 --- a/tests/e2e/pytest.ini +++ /dev/null @@ -1,9 +0,0 @@ -[pytest] -markers = - s3: a test is using AWS S3 - management: a test is using Management API - integrations: a test is testing an integration - lightning: a test is testing pytorch-lightning integration (can also run with Lightning EcoSystem) - huggingface: a test is testing an HuggingFace Transformers integration - zenml: a test is running zenml tests - mosaicml: a test is running mosaicml tests diff --git a/tests/e2e/standard/__init__.py b/tests/e2e/standard/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/e2e/standard/test_artifacts.py b/tests/e2e/standard/test_artifacts.py deleted file mode 100644 index ac172673c..000000000 --- a/tests/e2e/standard/test_artifacts.py +++ /dev/null @@ -1,233 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import tempfile -from pathlib import ( - Path, - PurePosixPath, -) - -import pytest - -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.objects import NeptuneObject -from tests.e2e.base import ( - AVAILABLE_CONTAINERS, - BaseE2ETest, - fake, -) -from tests.e2e.utils import ( - tmp_context, - with_check_if_file_appears, -) - - -@pytest.mark.xfail( - reason="Artifact methods disabled", - strict=True, - raises=( - NeptuneUnsupportedFunctionalityException, - ModuleNotFoundError, - ), -) -class TestArtifacts(BaseE2ETest): - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_local_creation(self, container: NeptuneObject): - first, second = self.gen_key(), self.gen_key() - filename = fake.unique.file_name() - - with tmp_context() as tmp: - with open(filename, "w", encoding="utf-8") as handler: - handler.write(fake.paragraph(nb_sentences=5)) - - container[first].track_files(".") - container[second].track_files(f"file://{tmp}") - - container.sync() - - assert container[first].fetch_hash() == container[second].fetch_hash() - assert container[first].fetch_files_list() == container[second].fetch_files_list() - - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_assignment(self, container: NeptuneObject): - first, second = self.gen_key(), self.gen_key() - filename = fake.unique.file_name() - - with tmp_context(): - with open(filename, "w", encoding="utf-8") as handler: - handler.write(fake.paragraph(nb_sentences=5)) - - container[first].track_files(filename) - container.wait() - container[second] = container[first].fetch() - container.sync() - - assert container[first].fetch_hash() == container[second].fetch_hash() - assert container[first].fetch_files_list() == container[second].fetch_files_list() - - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_local_download(self, container: NeptuneObject): - first, second = self.gen_key(), self.gen_key() - filename, filepath = fake.unique.file_name(), fake.unique.file_path(depth=3, absolute=False) - - with tmp_context() as tmp: - with open(filename, "w", encoding="utf-8") as handler: - handler.write(fake.paragraph(nb_sentences=5)) - - os.makedirs(Path(filepath).parent, exist_ok=True) - with open(filepath, "w", encoding="utf-8") as handler: - handler.write(fake.paragraph(nb_sentences=5)) - - # Relative path - container[first].track_files(filename) - # Absolute path - container[second].track_files(f"file://{tmp}") - - container.sync() - - with tmp_context(): - with with_check_if_file_appears(Path(f"artifacts/{filename}")): - container[first].download("artifacts/") - - with with_check_if_file_appears(Path(filepath)): - container[second].download() - - @pytest.mark.s3 - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_s3_creation(self, container: NeptuneObject, bucket, environment): - first, second, prefix = ( - self.gen_key(), - self.gen_key(), - f"{environment.project}/{self.gen_key()}/{type(container).__name__}", - ) - filename = fake.unique.file_name() - - bucket_name, s3_client = bucket - - with tmp_context(): - with open(filename, "w", encoding="utf-8") as handler: - handler.write(fake.paragraph(nb_sentences=5)) - - s3_client.meta.client.upload_file(filename, bucket_name, f"{prefix}/{filename}") - - container[first].track_files(f"s3://{bucket_name}/{prefix}/{filename}") - container[second].track_files(f"s3://{bucket_name}/{prefix}") - - container.sync() - - assert container[first].fetch_hash() == container[second].fetch_hash() - assert container[first].fetch_files_list() == container[second].fetch_files_list() - - @pytest.mark.s3 - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_s3_download(self, container: NeptuneObject, bucket, environment): - first = self.gen_key() - prefix = f"{environment.project}/{self.gen_key()}/{type(container).__name__}" - filename, filepath = fake.unique.file_name(), fake.unique.file_path(depth=3, absolute=False) - - bucket_name, s3_client = bucket - - with tmp_context(): - with open(filename, "w", encoding="utf-8") as handler: - handler.write(fake.paragraph(nb_sentences=5)) - - os.makedirs(Path(filepath).parent, exist_ok=True) - with open(filepath, "w", encoding="utf-8") as handler: - handler.write(fake.paragraph(nb_sentences=5)) - - s3_client.meta.client.upload_file(filename, bucket_name, f"{prefix}/{filename}") - s3_client.meta.client.upload_file(filepath, bucket_name, f"{prefix}/{filepath}") - - container[first].track_files(f"s3://{bucket_name}/{prefix}") - - container.sync() - - with tempfile.TemporaryDirectory() as tmp: - with with_check_if_file_appears(f"{tmp}/{filename}"): - container[first].download(tmp) - - with tmp_context(): - with with_check_if_file_appears(filename): - container[first].download() - - @pytest.mark.s3 - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_s3_existing(self, container: NeptuneObject, bucket, environment): - first, second, prefix = ( - self.gen_key(), - self.gen_key(), - f"{environment.project}/{self.gen_key()}/{type(container).__name__}", - ) - filename, filepath = fake.file_name(), fake.file_path(depth=3, absolute=False) - - bucket_name, s3_client = bucket - - with tmp_context(): - with open(filename, "w", encoding="utf-8") as handler: - handler.write(fake.paragraph(nb_sentences=5)) - - os.makedirs(Path(filepath).parent, exist_ok=True) - with open(filepath, "w", encoding="utf-8") as handler: - handler.write(fake.paragraph(nb_sentences=5)) - - s3_client.meta.client.upload_file(filename, bucket_name, f"{prefix}/{filename}") - s3_client.meta.client.upload_file(filepath, bucket_name, f"{prefix}/{filepath}") - - # Track all files - "a" and "b" to first artifact - container[first].track_files(f"s3://{bucket_name}/{prefix}/") - - # Track only the "a" file to second artifact - container[second].track_files(f"s3://{bucket_name}/{prefix}/{filename}") - container.sync() - - # Add "b" file to existing second artifact - # so it should be now identical as first - container[second].track_files( - f"s3://{bucket_name}/{prefix}/{filepath}", - destination=str(PurePosixPath(filepath).parent), - ) - container.sync() - - assert container[first].fetch_hash() == container[second].fetch_hash() - assert container[first].fetch_files_list() == container[second].fetch_files_list() - - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_local_existing(self, container: NeptuneObject): - first, second = self.gen_key(), self.gen_key() - filename, filepath = fake.file_name(), fake.file_path(depth=3, absolute=False) - - with tmp_context() as tmp: - with open(filename, "w", encoding="utf-8") as handler: - handler.write(fake.paragraph(nb_sentences=5)) - - os.makedirs(Path(filepath).parent, exist_ok=True) - with open(filepath, "w", encoding="utf-8") as handler: - handler.write(fake.paragraph(nb_sentences=5)) - - # Track all files - "a" and "b" to first artifact - container[first].track_files(".") - - # Track only the "a" file to second artifact - container[second].track_files(f"file://{Path(tmp)}/{filename}") - container.sync() - - # Add "b" file to existing second artifact - # so it should be now identical as first - container[second].track_files(filepath, destination=str(Path(filepath).parent)) - container.sync() - - assert container[first].fetch_hash() == container[second].fetch_hash() - assert container[first].fetch_files_list() == container[second].fetch_files_list() diff --git a/tests/e2e/standard/test_base.py b/tests/e2e/standard/test_base.py deleted file mode 100644 index 97e14839e..000000000 --- a/tests/e2e/standard/test_base.py +++ /dev/null @@ -1,177 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import random -from datetime import ( - datetime, - timezone, -) - -import pytest - -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.objects import NeptuneObject -from tests.e2e.base import ( - AVAILABLE_CONTAINERS, - BaseE2ETest, - fake, -) - - -class TestAtoms(BaseE2ETest): - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - @pytest.mark.parametrize("value", [random.randint(0, 100), random.random(), fake.boolean(), fake.word()]) - def test_simple_assign_and_fetch(self, container: NeptuneObject, value): - key = self.gen_key() - - container[key] = value - container.sync() - assert container[key].fetch() == value - - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_simple_assign_datetime(self, container: NeptuneObject): - key = self.gen_key() - now = datetime.now() - - container[key] = now - container.sync() - - # expect truncate to milliseconds and add UTC timezone - expected_now = now.astimezone(timezone.utc).replace(microsecond=int(now.microsecond / 1000) * 1000) - assert container[key].fetch() == expected_now - - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_fetch_non_existing_key(self, container: NeptuneObject): - key = self.gen_key() - with pytest.raises(AttributeError): - container[key].fetch() - - @pytest.mark.xfail(reason="Field deletion disabled", raises=NeptuneUnsupportedFunctionalityException, strict=True) - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_delete_atom(self, container: NeptuneObject): - key = self.gen_key() - value = fake.name() - - container[key] = value - container.sync() - - assert container[key].fetch() == value - - del container[key] - with pytest.raises(AttributeError): - container[key].fetch() - - -class TestNamespace(BaseE2ETest): - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_reassigning(self, container: NeptuneObject): - namespace = self.gen_key() - key = f"{fake.unique.word()}/{fake.unique.word()}" - value = fake.name() - - # Assign a namespace - container[namespace] = {f"{key}": value} - container.sync() - - assert container[f"{namespace}/{key}"].fetch() == value - - # Direct reassign internal value - value = fake.name() - container[f"{namespace}/{key}"] = value - container.sync() - - assert container[f"{namespace}/{key}"].fetch() == value - - # Reassigning by namespace - value = fake.name() - container[namespace] = {f"{key}": value} - container.sync() - - assert container[f"{namespace}/{key}"].fetch() == value - - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_distinct_types(self, container: NeptuneObject): - namespace = self.gen_key() - key = f"{fake.unique.word()}/{fake.unique.word()}" - value = random.randint(0, 100) - - container[namespace] = {f"{key}": value} - container.sync() - - assert container[f"{namespace}/{key}"].fetch() == value - - new_value = fake.name() - - with pytest.raises(ValueError): - container[namespace] = {f"{key}": new_value} - container.sync() - - @pytest.mark.xfail(reason="Field deletion disabled", raises=NeptuneUnsupportedFunctionalityException, strict=True) - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_delete_namespace(self, container: NeptuneObject): - namespace = fake.unique.word() - key1 = fake.unique.word() - key2 = fake.unique.word() - value1 = fake.name() - value2 = fake.name() - - container[namespace][key1] = value1 - container[namespace][key2] = value2 - container.sync() - - assert container[namespace][key1].fetch() == value1 - assert container[namespace][key2].fetch() == value2 - - del container[namespace] - with pytest.raises(AttributeError): - container[namespace][key1].fetch() - with pytest.raises(AttributeError): - container[namespace][key2].fetch() - - -class TestStringSet(BaseE2ETest): - neptune_tags_path = "sys/tags" - - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_do_not_accept_non_tag_path(self, container: NeptuneObject): - random_path = "some/path" - container[random_path].add(fake.unique.word()) - container.sync() - - with pytest.raises(AttributeError): - # backends accepts `'sys/tags'` only - container[random_path].fetch() - - @pytest.mark.xfail(reason="StringSet remove disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_add_and_remove_tags(self, container: NeptuneObject): - remaining_tag1 = fake.unique.word() - remaining_tag2 = fake.unique.word() - to_remove_tag1 = fake.unique.word() - to_remove_tag2 = fake.unique.word() - - container.sync() - if container.exists(self.neptune_tags_path): - container[self.neptune_tags_path].clear() - container[self.neptune_tags_path].add(remaining_tag1) - container[self.neptune_tags_path].add([to_remove_tag1, remaining_tag2]) - container[self.neptune_tags_path].remove(to_remove_tag1) - container[self.neptune_tags_path].remove(to_remove_tag2) # remove non existing tag - container.sync() - - assert container[self.neptune_tags_path].fetch() == { - remaining_tag1, - remaining_tag2, - } diff --git a/tests/e2e/standard/test_cli.py b/tests/e2e/standard/test_cli.py deleted file mode 100644 index 0fcfe2a72..000000000 --- a/tests/e2e/standard/test_cli.py +++ /dev/null @@ -1,276 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import os -import re -from pathlib import Path - -import pytest -from click.testing import CliRunner - -import neptune -from neptune.cli import sync -from neptune.cli.commands import clear -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.exceptions import NeptuneException -from neptune.internal.utils.utils import IS_WINDOWS -from neptune.types import File -from tests.e2e.base import ( - AVAILABLE_CONTAINERS, - BaseE2ETest, - fake, -) -from tests.e2e.utils import ( - DISABLE_SYSLOG_KWARGS, - initialize_container, - reinitialize_container, - tmp_context, -) - -runner = CliRunner() - - -@pytest.mark.xfail(reason="cli commands are disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) -class TestCli(BaseE2ETest): - SYNCHRONIZED_SYSID_RE = r"[\w-]+/[\w-]+/([\w-]+)" - - @pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) - def test_sync_container(self, container_type, environment): - with tmp_context() as tmp: - key = self.gen_key() - original_value = fake.unique.word() - updated_value = fake.unique.word() - - with initialize_container(container_type=container_type, project=environment.project) as container: - # assign original value - container[key] = original_value - container.wait() - container_id = container._id - container_sys_id = container._sys_id - - self.stop_synchronization_process(container) - # add random property - the queue will not be empty on close - container[self.gen_key()] = fake.unique.word() - - # manually add operations to queue - queue_dir = list(Path("./.neptune/async/").glob(f"{container_type}__{container_id}__*"))[0] - with open(queue_dir / "last_put_version", encoding="utf-8") as last_put_version_f: - last_put_version = int(last_put_version_f.read()) - with open(queue_dir / "data-1.log", "a", encoding="utf-8") as queue_f: - queue_f.write( - json.dumps( - { - "obj": { - "type": "AssignString", - "path": key.split("/"), - "value": updated_value, - }, - "version": last_put_version + 1, - } - ) - ) - queue_f.write( - json.dumps( - { - "obj": { - "type": "CopyAttribute", - "path": ["copy"] + key.split("/"), - "container_id": container_id, - "container_type": container_type, - "source_path": key.split("/"), - "source_attr_name": "String", - }, - "version": last_put_version + 2, - } - ) - ) - with open(queue_dir / "last_put_version", "w", encoding="utf-8") as last_put_version_f: - last_put_version_f.write(str(last_put_version + 2)) - - with reinitialize_container( - container_sys_id, container_type, project=environment.project, mode="read-only" - ) as container: - # server should have the original value - assert container[key].fetch() == original_value - - # run neptune sync - result = runner.invoke(sync, ["--path", tmp], catch_exceptions=False) - assert result.exit_code == 0 - - with reinitialize_container(container_sys_id, container_type, project=environment.project) as container: - # and we should get the updated value from server - assert container[key].fetch() == updated_value - assert container["copy/" + key].fetch() == updated_value - - @staticmethod - def stop_synchronization_process(container): - container._op_processor._consumer.interrupt() - - @pytest.mark.skipif(IS_WINDOWS, reason="Disabled functionality raise exception that breaks state of file system") - def test_offline_sync(self, environment): - with tmp_context() as tmp: - # create run in offline mode - run = neptune.init_run( - mode="offline", - project=environment.project, - **DISABLE_SYSLOG_KWARGS, - ) - # assign some values - key = self.gen_key() - val = fake.word() - run[key] = val - - # and some file - key2 = self.gen_key() - val2 = File.from_content(b"dummybytes") - run[key2].upload(val2) - - # and stop it - run.stop() - - # run asynchronously - result = runner.invoke(sync, ["--path", tmp, "-p", environment.project], catch_exceptions=False) - assert result.exit_code == 0 - - # we'll have to parse sync output to determine short_id - sys_id_found = re.search(self.SYNCHRONIZED_SYSID_RE, result.stdout) - assert len(sys_id_found.groups()) == 1 - sys_id = sys_id_found.group(1) - - run2 = neptune.init_run(with_id=sys_id, project=environment.project) - assert run2[key].fetch() == val - run2[key2].download() - with open(f"{tmp}/{key2.split('/')[-1]}.bin", "rb") as file: - assert file.read() == b"dummybytes" - run2.stop() - - @pytest.mark.parametrize("container_type", ["run"]) - def test_clear_command_offline_and_online_containers(self, environment, container_type): - with tmp_context() as tmp: - key = self.gen_key() - - with initialize_container(container_type=container_type, project=environment.project) as container: - self.stop_synchronization_process(container) - - container[key] = fake.unique.word() - container_path = container._op_processor.data_path - container_sys_id = container._sys_id - - with initialize_container( - container_type=container_type, project=environment.project, mode="offline" - ) as container: - container[key] = fake.unique.word() - offline_container_path = container._op_processor.data_path - offline_container_id = container._id - - assert os.path.exists(container_path) - assert os.path.exists(offline_container_path) - - result = runner.invoke(clear, args=["--path", tmp], input="y", catch_exceptions=False) - - assert result.exit_code == 0 - - assert not os.path.exists(container_path) - assert not os.path.exists(offline_container_path) - assert result.output.splitlines() == [ - "", - "Unsynchronized objects:", - f"- {environment.project}/{container_sys_id}", - "", - "Unsynchronized offline objects:", - f"- offline/{offline_container_id}", - "", - "Do you want to delete the listed metadata? [y/N]: y", - f"Deleted: {offline_container_path.resolve()}", - f"Deleted: {container_path.resolve()}", - ] - - @pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) - def test_clear_command_online_containers(self, environment, container_type): - with tmp_context() as tmp: - key = self.gen_key() - - with initialize_container(container_type=container_type, project=environment.project) as container: - self.stop_synchronization_process(container) - - container[key] = fake.unique.word() - container_path = container._op_processor.data_path - container_sys_id = container._sys_id - - assert os.path.exists(container_path) - - result = runner.invoke(clear, args=["--path", tmp], input="y", catch_exceptions=False) - assert result.exit_code == 0 - - assert not os.path.exists(container_path) - assert result.output.splitlines() == [ - "", - "Unsynchronized objects:", - f"- {environment.project}/{container_sys_id}", - "", - "Do you want to delete the listed metadata? [y/N]: y", - f"Deleted: {container_path.resolve()}", - ] - - @pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) - def test_sync_should_delete_directories(self, environment, container_type): - with tmp_context() as tmp: - key = self.gen_key() - - with initialize_container(container_type=container_type, project=environment.project) as container: - self.stop_synchronization_process(container) - - container[key] = fake.unique.word() - container_path = container._op_processor.data_path - - assert os.path.exists(container_path) - - result = runner.invoke(sync, args=["--path", tmp], catch_exceptions=False) - assert result.exit_code == 0 - - assert not os.path.exists(container_path) - - @pytest.mark.parametrize( - "container_type", - [ - pytest.param( - "model", - marks=pytest.mark.skip( - ( - "By coincidence, the test is passing as " - "NeptuneUnsupportedFunctionalityException is subclass of NeptuneException" - ) - ), - ), - pytest.param( - "model_version", - marks=pytest.mark.skip( - ( - "By coincidence, the test is passing as " - "NeptuneUnsupportedFunctionalityException is subclass of NeptuneException" - ) - ), - ), - pytest.param("project", marks=pytest.mark.skip(reason="Project not supported")), - ], - ) - def test_cannot_offline_non_runs(self, environment, container_type): - with pytest.raises(NeptuneException, match=r"Project can't be initialized in OFFLINE mode"): - initialize_container( - container_type=container_type, - project=environment.project, - mode="offline", - ) diff --git a/tests/e2e/standard/test_copy.py b/tests/e2e/standard/test_copy.py deleted file mode 100644 index c403b3d7d..000000000 --- a/tests/e2e/standard/test_copy.py +++ /dev/null @@ -1,61 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import itertools -import random - -import pytest - -from neptune.objects import NeptuneObject -from tests.e2e.base import ( - AVAILABLE_CONTAINERS, - BaseE2ETest, - fake, -) - -# List of every possible container type pair for instance: "run-run, run-model, model-model_version, ..." - -ALL_CONTAINERS_PAIRS = list( - map( - lambda p: pytest.param( - p[0].values + p[1].values, marks=p[0].marks + p[1].marks if p[0].marks != p[1].marks else p[0].marks - ), - itertools.product(AVAILABLE_CONTAINERS, AVAILABLE_CONTAINERS), - ) -) - - -class TestCopying(BaseE2ETest): - @pytest.mark.parametrize("containers_pair", ALL_CONTAINERS_PAIRS, indirect=True) - @pytest.mark.parametrize("value", [random.randint(0, 100), random.random(), fake.boolean(), fake.word()]) - def test_copy(self, containers_pair: (NeptuneObject, NeptuneObject), value): - container_a, container_b = containers_pair - - src, destination, destination2 = self.gen_key(), self.gen_key(), self.gen_key() - - container_a[src] = value - container_a.sync() - - container_b[destination] = container_a[src] - - # TODO: This is a workaround for partitioned async - container_b.wait() - - container_b[destination2] = container_b[destination] - container_b.sync() - - assert container_a[src].fetch() == value - assert container_b[destination].fetch() == value - assert container_b[destination2].fetch() == value diff --git a/tests/e2e/standard/test_fetch_tables.py b/tests/e2e/standard/test_fetch_tables.py deleted file mode 100644 index ebd6b44fd..000000000 --- a/tests/e2e/standard/test_fetch_tables.py +++ /dev/null @@ -1,449 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import datetime -import random -import time -import uuid - -import pytest - -import neptune -from neptune.exceptions import NeptuneInvalidQueryException -from neptune.internal.utils.utils import IS_MACOS -from neptune.objects import Model -from tests.e2e.base import ( - BaseE2ETest, - fake, -) -from tests.e2e.utils import a_key - -WAIT_DURATION = 60 - - -class TestFetchTable(BaseE2ETest): - @pytest.mark.parametrize("with_query", [True, False]) - def test_fetch_runs_by_tag(self, environment, project, with_query): - tag1, tag2 = str(uuid.uuid4()), str(uuid.uuid4()) - - with neptune.init_run(project=environment.project) as run: - run_id1 = run["sys/id"].fetch() - run["sys/tags"].add(tag1) - run["sys/tags"].add(tag2) - - with neptune.init_run(project=environment.project) as run: - run["sys/tags"].add(tag2) - - # wait for the cache to fill - time.sleep(WAIT_DURATION) - - if with_query: - kwargs = {"query": f"(sys/tags: stringSet CONTAINS '{tag1}')"} - else: - kwargs = {"tag": [tag1, tag2]} - - runs = project.fetch_runs_table(progress_bar=False, **kwargs).to_rows() - - assert len(runs) == 1 - assert runs[0].get_attribute_value("sys/id") == run_id1 - - @pytest.mark.skip(reason="Model is not supported") - @pytest.mark.parametrize("with_query", [True, False]) - @pytest.mark.parametrize("container_fn_scope", ["model"], indirect=True) - def test_fetch_model_versions_with_correct_ids(self, container_fn_scope: Model, environment, with_query: bool): - model_sys_id = container_fn_scope["sys/id"].fetch() - versions_to_initialize = 5 - - for _ in range(versions_to_initialize): - with neptune.init_model_version(model=model_sys_id, project=environment.project): - pass - - # wait for the elasticsearch cache to fill - time.sleep(WAIT_DURATION) - - query = "" if with_query else None - versions_table = sorted( - container_fn_scope.fetch_model_versions_table(query=query, progress_bar=False).to_rows(), - key=lambda r: r.get_attribute_value("sys/id"), - ) - assert len(versions_table) == versions_to_initialize - for index in range(versions_to_initialize): - assert versions_table[index].get_attribute_value("sys/id") == f"{model_sys_id}-{index + 1}" - - versions_table_gen = container_fn_scope.fetch_model_versions_table( - query=query, ascending=True, progress_bar=False - ) - for te1, te2 in zip(list(versions_table_gen), versions_table): - assert te1._id == te2._id - assert te1._container_type == te2._container_type - - def _test_fetch_from_container(self, init_container, get_containers_as_rows): - container_id1, container_id2 = None, None - key1 = self.gen_key() - key2 = f"{self.gen_key()}/{self.gen_key()}" - value1 = random.randint(1, 100) - value2 = fake.name() - - with init_container() as container: - container_id1 = container["sys/id"].fetch() - container[key1] = value1 - container[key2] = value2 - container.sync() - - with init_container() as container: - container_id2 = container["sys/id"].fetch() - container[key1] = value1 - container.sync() - - # wait for the cache to fill - time.sleep(WAIT_DURATION) - - containers_as_rows = get_containers_as_rows() - container1 = next(filter(lambda m: m.get_attribute_value("sys/id") == container_id1, containers_as_rows)) - container2 = next(filter(lambda m: m.get_attribute_value("sys/id") == container_id2, containers_as_rows)) - - assert container1.get_attribute_value(key1) == value1 - assert container1.get_attribute_value(key2) == value2 - assert container2.get_attribute_value(key1) == value1 - with pytest.raises(ValueError): - container2.get_attribute_value(key2) - - def get_container1(**kwargs): - containers_as_rows = get_containers_as_rows(**kwargs) - return next(filter(lambda m: m.get_attribute_value("sys/id") == container_id1, containers_as_rows)) - - non_filtered = get_container1() - assert non_filtered.get_attribute_value(key1) == value1 - assert non_filtered.get_attribute_value(key2) == value2 - - columns_none = get_container1(columns=None) - assert columns_none.get_attribute_value(key1) == value1 - assert columns_none.get_attribute_value(key2) == value2 - - columns_empty = get_container1(columns=[]) - with pytest.raises(ValueError): - columns_empty.get_attribute_value(key1) - with pytest.raises(ValueError): - columns_empty.get_attribute_value(key2) - - columns_with_one_key = get_container1(columns=[key1]) - assert columns_with_one_key.get_attribute_value(key1) == value1 - with pytest.raises(ValueError): - columns_with_one_key.get_attribute_value(key2) - - columns_with_one_key = get_container1(columns=[key2]) - with pytest.raises(ValueError): - columns_with_one_key.get_attribute_value(key1) - assert columns_with_one_key.get_attribute_value(key2) == value2 - - def test_fetch_runs_table(self, environment, project): - def init_run(): - return neptune.init_run(project=environment.project) - - def get_runs_as_rows(**kwargs): - return project.fetch_runs_table(**kwargs, progress_bar=False).to_rows() - - self._test_fetch_from_container(init_run, get_runs_as_rows) - - @pytest.mark.skip(reason="Model is not supported") - def test_fetch_models_table(self, environment, project): - def init_run(): - return neptune.init_model(project=environment.project, key=a_key()) - - def get_models_as_rows(**kwargs): - return project.fetch_models_table(**kwargs, progress_bar=False).to_rows() - - self._test_fetch_from_container(init_run, get_models_as_rows) - - @pytest.mark.parametrize( - "container", - [ - pytest.param( - "model", - marks=pytest.mark.skip(reason="Model is not supported"), - ) - ], - indirect=True, - ) - def test_fetch_model_versions_table(self, container: Model, environment): - model_sys_id = container["sys/id"].fetch() - - def init_run(): - return neptune.init_model_version(model=model_sys_id, project=environment.project) - - def get_model_versions_as_rows(**kwargs): - return container.fetch_model_versions_table(**kwargs, progress_bar=False).to_rows() - - self._test_fetch_from_container(init_run, get_model_versions_as_rows) - - @pytest.mark.parametrize( - "container", - [ - pytest.param( - "model", - marks=pytest.mark.skip(reason="Model not implemented"), - ) - ], - indirect=True, - ) - def test_fetch_model_versions_table_by_query(self, container, environment): - model_sys_id = container["sys/id"].fetch() - key = "some_key" - vals = ["some_val_1", "some_val_2"] - names = ["name_1", "name_2"] - - for name, val in zip(names, vals): - with neptune.init_model_version(model=model_sys_id, name=name, project=environment.project) as mv: - mv[key] = val - - time.sleep(WAIT_DURATION) - - for val, expected_names in zip(vals + ["non_existent_val"], [[names[0]], [names[1]], []]): - model_versions = container.fetch_model_versions_table( - query=f"({key}: string = '{val}')", - sort_by="name", - ).to_rows() - assert sorted([mv.get_attribute_value("sys/name") for mv in model_versions]) == sorted(expected_names) - - @pytest.mark.skip("Tags are temporarily disabled - will be brought back in 2.0.0") - @pytest.mark.parametrize("with_query", [True, False]) - def test_fetch_runs_table_by_state(self, environment, project, with_query): - tag = str(uuid.uuid4()) - random_val = random.random() - - with neptune.init_run(project=environment.project, tags=tag) as run: - run["some_random_val"] = random_val - - time.sleep(WAIT_DURATION) - - if with_query: - kwargs = {"query": "(sys/state: experimentState = idle)"} - else: - kwargs = {"state": "inactive"} - - runs = project.fetch_runs_table(**kwargs, progress_bar=False).to_pandas() - - assert not runs.empty - assert tag in runs["sys/tags"].values - assert random_val in runs["some_random_val"].values - - @pytest.mark.parametrize("ascending", [True, False]) - @pytest.mark.parametrize("with_query", [True, False]) - def test_fetch_runs_table_sorting(self, environment, project, ascending, with_query): - # given - with neptune.init_run(project=environment.project, custom_run_id="run1") as run: - run["metrics/accuracy"] = 0.95 - run["some_val"] = "b" - - with neptune.init_run(project=environment.project, custom_run_id="run2") as run: - run["metrics/accuracy"] = 0.90 - run["some_val"] = "a" - - time.sleep(WAIT_DURATION) - query = "" if with_query else None - - # when - runs = project.fetch_runs_table( - query=query, sort_by="sys/creation_time", ascending=ascending, progress_bar=False - ).to_pandas() - - # then - # runs are correctly sorted by creation time -> run1 was first - assert not runs.empty - run_list = runs["sys/custom_run_id"].dropna().to_list() - if ascending: - assert run_list == ["run1", "run2"] - else: - assert run_list == ["run2", "run1"] - - # when - runs = project.fetch_runs_table( - query=query, sort_by="metrics/accuracy", ascending=ascending, progress_bar=False - ).to_pandas() - - # then - assert not runs.empty - run_list = runs["sys/custom_run_id"].dropna().to_list() - - if ascending: - assert run_list == ["run2", "run1"] - else: - assert run_list == ["run1", "run2"] - - # when - runs = project.fetch_runs_table( - query=query, sort_by="some_val", ascending=ascending, progress_bar=False - ).to_pandas() - - # then - assert not runs.empty - run_list = runs["sys/custom_run_id"].dropna().to_list() - - if ascending: - assert run_list == ["run2", "run1"] - else: - assert run_list == ["run1", "run2"] - - @pytest.mark.parametrize("with_query", [True, False]) - def test_fetch_runs_table_non_atomic_type(self, environment, project, with_query): - # test if now it fails when we add a non-atomic type to that field - - query = "" if with_query else None - # given - with neptune.init_run(project=environment.project, custom_run_id="run3") as run: - run["metrics/accuracy"] = 0.9 - - with neptune.init_run(project=environment.project, custom_run_id="run4") as run: - for i in range(5): - run["metrics/accuracy"].log(0.95) - - time.sleep(WAIT_DURATION) - - # then - with pytest.raises(ValueError): - project.fetch_runs_table(query=query, sort_by="metrics/accuracy", progress_bar=False) - - @pytest.mark.parametrize("with_query", [True, False]) - def test_fetch_runs_table_datetime_parsed(self, environment, project, with_query): - # given - with neptune.init_run(project=environment.project) as run: - run["some_timestamp"] = datetime.datetime.now() - - time.sleep(WAIT_DURATION) - - # when - query = "" if with_query else None - runs = project.fetch_runs_table( - query=query, columns=["sys/creation_time", "some_timestamp"], progress_bar=False - ).to_pandas() - - # then - assert isinstance(runs["sys/creation_time"].iloc[0], datetime.datetime) - assert isinstance(runs["some_timestamp"].iloc[0], datetime.datetime) - - @pytest.mark.parametrize("with_query", [True, False]) - def test_fetch_runs_table_limit(self, environment, project, with_query): - # given - with neptune.init_run(project=environment.project) as run: - run["some_val"] = "a" - - with neptune.init_run(project=environment.project) as run: - run["some_val"] = "b" - - time.sleep(WAIT_DURATION) - - # when - query = "" if with_query else None - runs = project.fetch_runs_table(query=query, limit=1, progress_bar=False).to_pandas() - - # then - assert len(runs) == 1 - - @pytest.mark.skipif(IS_MACOS, reason="MacOS behaves strangely on github actions") - def test_fetch_runs_table_raw_query_trashed(self, environment, project): - # given - val: float = 2.2 - with neptune.init_run(project=environment.project, custom_run_id="run1") as run: - run["key"] = val - - with neptune.init_run(project=environment.project, custom_run_id="run2") as run: - run["key"] = val - - time.sleep(WAIT_DURATION) - - # when - runs = project.fetch_runs_table(query=f"(key: float = {val})", progress_bar=False, trashed=False).to_pandas() - - # then - run_list = runs["sys/custom_run_id"].dropna().to_list() - assert ["run1", "run2"] == sorted(run_list) - - # when - neptune.management.trash_objects( - project=environment.project, ids=runs[runs["sys/custom_run_id"] == "run2"]["sys/id"].item() - ) - - time.sleep(WAIT_DURATION) - - runs = project.fetch_runs_table(query=f"(key: float = {val})", progress_bar=False, trashed=True).to_pandas() - - # then - run_list = runs["sys/custom_run_id"].dropna().to_list() - assert ["run2"] == run_list - - # when - runs = project.fetch_runs_table(query=f"(key: float = {val})", progress_bar=False, trashed=None).to_pandas() - - # then - run_list = runs["sys/custom_run_id"].dropna().to_list() - assert ["run1", "run2"] == sorted(run_list) - - def test_fetch_runs_invalid_query_handling(self, project): - # given - runs_table = project.fetch_runs_table(query="key: float = (-_-)", progress_bar=False) - - # then - with pytest.raises(NeptuneInvalidQueryException): - next(iter(runs_table)) - - @pytest.mark.skip(reason="Model is not supported") - @pytest.mark.skipif(IS_MACOS, reason="MacOS behaves strangely on github actions") - def test_fetch_models_raw_query_trashed(self, environment, project): - # given - val: float = 2.2 - with neptune.init_model(project=environment.project, key=a_key(), name="name-1") as model: - model["key"] = val - - with neptune.init_model(project=environment.project, key=a_key(), name="name-2") as model: - model["key"] = val - - time.sleep(WAIT_DURATION) - - # when - models = project.fetch_models_table( - query=f"(key: float = {val})", progress_bar=False, trashed=False - ).to_pandas() - - # then - model_list = models["sys/name"].dropna().to_list() - assert sorted(model_list) == sorted(["name-1", "name-2"]) - - # when - neptune.management.trash_objects( - project=environment.project, ids=models[models["sys/name"] == "name-1"]["sys/id"].item() - ) - - time.sleep(WAIT_DURATION) - - trashed_vals = [True, False, None] - expected_model_names = [["name-1"], ["name-2"], ["name-1", "name-2"]] - - for trashed, model_names in zip(trashed_vals, expected_model_names): - # when - models = project.fetch_models_table( - query=f"(key: float = {val})", progress_bar=False, trashed=trashed - ).to_pandas() - - # then - model_list = models["sys/name"].dropna().to_list() - assert sorted(model_list) == sorted(model_names) - - def test_fetch_models_invalid_query_handling(self, project): - # given - runs_table = project.fetch_models_table(query="key: float = (-_-)", progress_bar=False) - - # then - with pytest.raises(NeptuneInvalidQueryException): - next(iter(runs_table)) diff --git a/tests/e2e/standard/test_files.py b/tests/e2e/standard/test_files.py deleted file mode 100644 index 5c6677697..000000000 --- a/tests/e2e/standard/test_files.py +++ /dev/null @@ -1,559 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import random -import uuid -from itertools import product -from typing import Set -from unittest.mock import Mock -from zipfile import ZipFile - -import pytest - -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.backends import hosted_file_operations -from neptune.internal.backends.api_model import ( - MultipartConfig, - OptionalFeatures, -) -from neptune.internal.backends.hosted_neptune_backend import HostedNeptuneBackend -from neptune.internal.types.file_types import FileType -from neptune.objects import NeptuneObject -from neptune.types import ( - File, - FileSet, -) -from tests.e2e.base import ( - AVAILABLE_CONTAINERS, - BaseE2ETest, - available_containers_parameters, - fake, -) -from tests.e2e.plot_utils import ( - generate_altair_chart, - generate_brokeh_figure, - generate_matplotlib_figure, - generate_pil_image, - generate_plotly_figure, - generate_seaborn_figure, -) -from tests.e2e.utils import ( - SIZE_1KB, - SIZE_1MB, - initialize_container, - preserve_cwd, - tmp_context, -) - - -@pytest.mark.xfail(reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) -class TestUpload(BaseE2ETest): - @pytest.mark.parametrize( - "container", - available_containers_parameters().skip("run", reason="Skipped as whole class is expected to fail").eval(), - indirect=True, - ) - def test_using_new_api(self, container: NeptuneObject): - assert isinstance(container._backend, HostedNeptuneBackend) - assert container._backend._client_config.has_feature(OptionalFeatures.MULTIPART_UPLOAD) - assert isinstance(container._backend._client_config.multipart_config, MultipartConfig) - - def _test_upload(self, container: NeptuneObject, file_type: FileType, file_size: int): - key = self.gen_key() - extension = fake.file_extension() - downloaded_filename = fake.file_name() - content = os.urandom(file_size) - - if file_type is FileType.LOCAL_FILE: - filename = fake.file_name(extension=extension) - with open(filename, "wb") as file: - file.write(content) - - file = File.from_path(filename) - elif file_type is FileType.IN_MEMORY: - file = File.from_content(content, extension=extension) - elif file_type is FileType.STREAM: - file = File.from_stream(io.BytesIO(content), extension=extension) - else: - raise ValueError() - - container[key].upload(file) - container.sync() - container[key].download(downloaded_filename) - - assert container[key].fetch_extension() == extension - assert os.path.getsize(downloaded_filename) == file_size - with open(downloaded_filename, "rb") as file: - downloaded_content = file.read() - assert len(content) == file_size - assert downloaded_content == content - - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - @pytest.mark.parametrize("file_type", list(FileType)) - def test_single_upload(self, container: NeptuneObject, file_type: FileType): - file_size = 100 * SIZE_1KB # 100 kB, single upload - self._test_upload(container, file_type, file_size) - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_multipart_upload(self, container: NeptuneObject): - file_size = 10 * SIZE_1MB # 10 MB, multipart - self._test_upload(container, FileType.IN_MEMORY, file_size) - - def test_file_changed_during_upload(self, environment, monkeypatch): - key = self.gen_key() - file_size = 11 * SIZE_1MB # 11 MB, multipart with 3 parts - intermediate_size = 6 * SIZE_1MB # 6 MB, second part < 5MB - filename = fake.file_name() - downloaded_filename = fake.file_name() - - _upload_raw_data = hosted_file_operations.upload_raw_data - - run = initialize_container( - container_type="run", - project=environment.project, - mode="sync", - ) - - with tmp_context(): - # create file_size file - with open(filename, "wb") as file: - file.write(b"\0" * file_size) - - class UploadedFileChanger: - def __init__(self): - self.upload_part_iteration = 0 - - def __call__(self, *args, **kwargs): - # file starts to change and after uploading first part it's at intermediate_size - if self.upload_part_iteration == 0: - with open(filename, "wb") as file: - file.write(b"\0" * intermediate_size) - # after that it's back at file_size - elif self.upload_part_iteration == 1: - with open(filename, "wb") as file: - file.write(b"\0" * file_size) - self.upload_part_iteration += 1 - - return _upload_raw_data(*args, **kwargs) - - hacked_upload_raw_data = UploadedFileChanger() - monkeypatch.setattr( - hosted_file_operations, - "upload_raw_data", - Mock(wraps=hacked_upload_raw_data), - ) - - run[key].upload(filename) - - run[key].download(downloaded_filename) - - assert os.path.getsize(downloaded_filename) == file_size - with open(downloaded_filename, "rb") as file: - content = file.read() - assert len(content) == file_size - assert content == b"\0" * file_size - # handling restart + 3 for actual upload - assert hacked_upload_raw_data.upload_part_iteration == 5 - - @pytest.mark.xfail(reason="Field deletion disabled", raises=NeptuneUnsupportedFunctionalityException, strict=True) - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_replace_float_attribute_with_uploaded_file(self, container: NeptuneObject): - key = self.gen_key() - file_size = 100 * SIZE_1KB # 100 kB - filename = fake.file_name() - downloaded_filename = fake.file_name() - - with tmp_context(): - # create file_size file - with open(filename, "wb") as file: - file.write(b"\0" * file_size) - - # set key to a float and sync it separately - container[key] = 42.0 - container.sync() - - # delete and upload in the same queue flush - container[key].pop() - container[key].upload(filename) - - container.sync() - container[key].download(downloaded_filename) - - assert os.path.getsize(downloaded_filename) == file_size - with open(downloaded_filename, "rb") as file: - content = file.read() - assert len(content) == file_size - assert content == b"\0" * file_size - - def test_upload_with_changed_working_directory(self, environment): - os.makedirs("some_other_folder", exist_ok=True) - - key_in_mem = self.gen_key() - key_from_disk = self.gen_key() - - with preserve_cwd("some_other_folder"): - run = initialize_container(container_type="run", project=environment.project) - # upload file from memory - run[key_in_mem].upload(File.from_content("abcd")) - - # upload file from disk - filename = fake.file_name() - with open(filename, "w") as fp: - fp.write("test content") - - run[key_from_disk].upload(filename) - - run.sync() - - assert run.exists(key_in_mem) - assert run.exists(key_from_disk) - run.stop() - - -@pytest.mark.xfail(reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) -class TestFileSet(BaseE2ETest): - def _test_fileset(self, container: NeptuneObject, large_file_size: int, small_files_no: int): - key = self.gen_key() - large_filename = fake.file_name() - small_files = [ - (f"{uuid.uuid4()}.{fake.file_extension()}", fake.sentence().encode("utf-8")) for _ in range(small_files_no) - ] - - with tmp_context(): - # create single large file (multipart) and a lot of very small files - with open(large_filename, "wb") as file: - file.write(b"\0" * large_file_size) - for filename, contents in small_files: - with open(filename, "wb") as file: - file.write(contents) - - small_filenames = [filename for filename, _ in small_files] - # make sure there are no duplicates - assert len({large_filename, *small_filenames}) == len(small_files) + 1 - - # when one file as fileset uploaded - container[key].upload_files([large_filename]) - - # then check if will be downloaded - container.sync() - container[key].download("downloaded1.zip") - - with ZipFile("downloaded1.zip") as zipped: - assert set(zipped.namelist()) == {large_filename, "/"} - with zipped.open(large_filename, "r") as file: - content = file.read() - assert len(content) == large_file_size - assert content == b"\0" * large_file_size - - # when small files as fileset uploaded - container[key].upload_files(small_filenames) - - # then check if everything will be downloaded - container.sync() - container[key].download("downloaded2.zip") - - with ZipFile("downloaded2.zip") as zipped: - assert set(zipped.namelist()) == {large_filename, "/", *small_filenames} - with zipped.open(large_filename, "r") as file: - content = file.read() - assert len(content) == large_file_size - assert content == b"\0" * large_file_size - for filename, expected_content in small_files: - with zipped.open(filename, "r") as file: - content = file.read() - assert len(content) == len(expected_content) - assert content == expected_content - - # when first file is removed - container[key].delete_files([large_filename]) - - # then check if the rest will be downloaded - container.sync() - container[key].download("downloaded3.zip") - - with ZipFile("downloaded3.zip") as zipped: - assert set(zipped.namelist()) == {"/", *small_filenames} - for filename, expected_content in small_files: - with zipped.open(filename, "r") as file: - content = file.read() - assert len(content) == len(expected_content) - assert content == expected_content - - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_fileset(self, container: NeptuneObject): - # 100 kB, single upload for large file - large_file_size = 100 * SIZE_1KB - small_files_no = 10 - self._test_fileset(container, large_file_size, small_files_no) - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_fileset_with_multipart(self, container: NeptuneObject): - # 10 MB, multipart upload for large file - large_file_size = 10 * SIZE_1MB - small_files_no = 100 - self._test_fileset(container, large_file_size, small_files_no) - - @classmethod - def _gen_tree_paths(cls, depth, width=2) -> Set: - """Generates all subdirectories of some random tree directory structure""" - this_level_dirs = (fake.word() + "/" for _ in range(width)) - if depth == 1: - return set(this_level_dirs) - else: - subpaths = cls._gen_tree_paths(depth=depth - 1, width=width) - new_paths = set("".join(prod) for prod in product(subpaths, this_level_dirs)) - subpaths.update(new_paths) - return subpaths - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_fileset_nested_structure(self, container: NeptuneObject): - key = self.gen_key() - possible_paths = self._gen_tree_paths(depth=3) - - small_files = [ - ( - f"{path}{uuid.uuid4()}.{fake.file_extension()}", - os.urandom(random.randint(SIZE_1KB, 100 * SIZE_1KB)), - ) - for path in possible_paths - ] - - with tmp_context(): - # create dirs - for dir_path in possible_paths: - os.makedirs(dir_path, exist_ok=True) - # create a lot of very small files in different directories - for filename, contents in small_files: - with open(filename, "wb") as file: - file.write(contents) - - small_filenames = [filename for filename, _ in small_files] - # make sure there are no duplicates - assert len({*small_filenames}) == len(small_files) - - # when small files as fileset uploaded - container[key].upload_files(".") - - # then check if everything will be downloaded - container.sync() - container[key].download("downloaded.zip") - - with ZipFile("downloaded.zip") as zipped: - assert set(zipped.namelist()) == { - "/", - *possible_paths, - *small_filenames, - } - for filename, expected_content in small_files: - with zipped.open(filename, "r") as file: - content = file.read() - assert len(content) == len(expected_content) - assert content == expected_content - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_reset_fileset(self, container: NeptuneObject): - key = self.gen_key() - filename1 = fake.file_name() - filename2 = fake.file_name() - content1 = os.urandom(random.randint(SIZE_1KB, 100 * SIZE_1KB)) - content2 = os.urandom(random.randint(SIZE_1KB, 100 * SIZE_1KB)) - - with tmp_context(): - # create file1 and file2 - with open(filename1, "wb") as file1, open(filename2, "wb") as file2: - file1.write(content1) - file2.write(content2) - - # upload file1 to initial fileset - container[key].upload_files(filename1) - - # then replace [file1] set with [file2] to the same key - container.sync() - container[key] = FileSet([filename2]) - - # check if there's content of SECOND uploaded file - container.sync() - container[key].download("downloaded.zip") - with ZipFile("downloaded.zip") as zipped: - assert set(zipped.namelist()) == {filename2, "/"} - with zipped.open(filename2, "r") as file: - content = file.read() - assert len(content) == len(content2) - assert content == content2 - - @pytest.mark.parametrize("container", ["run"], indirect=True) - @pytest.mark.parametrize( - "delete_attribute", - [ - pytest.param( - True, - marks=pytest.mark.xfail( - reason="Field deletion disabled", - raises=NeptuneUnsupportedFunctionalityException, - strict=True, - ), - ), - False, - ], - ) - def test_single_file_override(self, container: NeptuneObject, delete_attribute: bool): - key = self.gen_key() - filename1 = fake.file_name() - filename2 = fake.file_name() - content1 = os.urandom(random.randint(SIZE_1KB, 100 * SIZE_1KB)) - content2 = os.urandom(random.randint(SIZE_1KB, 100 * SIZE_1KB)) - downloaded_filename = fake.file_name() - - with tmp_context(): - # create file1 and file2 - with open(filename1, "wb") as file1, open(filename2, "wb") as file2: - file1.write(content1) - file2.write(content2) - - # upload file1 to key - container[key].upload(filename1) - - if delete_attribute: - # delete attribute - del container[key] - # make sure that attribute does not exist - container.sync() - with pytest.raises(AttributeError): - container[key].download(downloaded_filename) - - # then upload file2 to the same key - container[key].upload(filename2) - - # check if there's content of SECOND uploaded file - container.sync() - container[key].download(downloaded_filename) - with open(downloaded_filename, "rb") as file: - content = file.read() - assert len(content) == len(content2) - assert content == content2 - - @pytest.mark.parametrize("container", ["run"], indirect=True) - @pytest.mark.parametrize( - "delete_attribute", - [ - pytest.param( - True, - marks=pytest.mark.xfail( - reason="Field deletion disabled", - raises=NeptuneUnsupportedFunctionalityException, - strict=True, - ), - ), - False, - ], - ) - def test_fileset_file_override(self, container: NeptuneObject, delete_attribute: bool): - key = self.gen_key() - filename = fake.file_name() - content1 = os.urandom(random.randint(SIZE_1KB, 100 * SIZE_1KB)) - content2 = os.urandom(random.randint(SIZE_1KB, 100 * SIZE_1KB)) - - with tmp_context(): - # create file - with open(filename, "wb") as file1: - file1.write(content1) - # upload file1 to key - container[key].upload_files([filename]) - - if delete_attribute: - # delete attribute - del container[key] - # make sure that attribute does not exist - container.sync() - with pytest.raises(AttributeError): - container[key].download("failed_download.zip") - - # override file content - with open(filename, "wb") as file: - file.write(content2) - # then upload file2 to the same key - container[key].upload_files([filename]) - - # check if there's content of ONLY SECOND uploaded file - container.sync() - container[key].download("downloaded.zip") - - with ZipFile("downloaded.zip") as zipped: - assert set(zipped.namelist()) == {filename, "/"} - with zipped.open(filename, "r") as file: - content = file.read() - assert len(content) == len(content2) - assert content == content2 - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_list_fileset_files(self, container: NeptuneObject): - key = self.gen_key() - filename = fake.file_name() - content = os.urandom(random.randint(SIZE_1KB, 100 * SIZE_1KB)) - - with tmp_context(): - # create file - with open(filename, "wb") as file1: - file1.write(content) - - file_size = os.path.getsize(filename) - # upload file1 to key - container[key].upload_files([filename]) - container.sync() - - file_list = container[key].list_fileset_files() - assert len(file_list) == 1 - assert file_list[0].name == filename - assert file_list[0].file_type == "file" - assert file_list[0].size == file_size - - container[key].delete_files(filename) - container.sync() - assert container[key].list_fileset_files() == [] - - -@pytest.mark.xfail(reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) -class TestPlotObjectsAssignment(BaseE2ETest): - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_pil_image(self, container: NeptuneObject): - pil_image = generate_pil_image() - container["pil_image"] = pil_image - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_matplotlib_figure(self, container: NeptuneObject): - figure = generate_matplotlib_figure() - container["matplotlib_figure"] = figure - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_altair_chart(self, container: NeptuneObject): - altair_chart = generate_altair_chart() - container["altair_chart"] = altair_chart - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_brokeh_figure(self, container: NeptuneObject): - brokeh_figure = generate_brokeh_figure() - container["brokeh_figure"] = brokeh_figure - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_plotly_figure(self, container: NeptuneObject): - plotly_figure = generate_plotly_figure() - container["plotly_figure"] = plotly_figure - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_seaborn_figure(self, container: NeptuneObject): - seaborn_figure = generate_seaborn_figure() - container["seaborn_figure"] = seaborn_figure diff --git a/tests/e2e/standard/test_init.py b/tests/e2e/standard/test_init.py deleted file mode 100644 index 70c012c2c..000000000 --- a/tests/e2e/standard/test_init.py +++ /dev/null @@ -1,230 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import pytest - -import neptune -from neptune.exceptions import NeptuneModelKeyAlreadyExistsError -from neptune.objects import ( - Model, - Project, -) -from neptune.types import GitRef -from tests.e2e.base import ( - AVAILABLE_CONTAINERS, - BaseE2ETest, - fake, -) -from tests.e2e.utils import ( - initialize_container, - reinitialize_container, - with_check_if_file_appears, -) - - -class TestInitRun(BaseE2ETest): - def test_custom_run_id(self, environment): - custom_run_id = "-".join((fake.word() for _ in range(3))) - with neptune.init_run(custom_run_id=custom_run_id, project=environment.project) as run: - key = self.gen_key() - val = fake.word() - run[key] = val - run.sync() - - with neptune.init_run(custom_run_id=custom_run_id, project=environment.project) as exp2: - assert exp2[key].fetch() == val - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - def test_send_source_code(self, environment): - with neptune.init_run( - source_files="**/*.py", - name="E2e init source code", - project=environment.project, - ) as exp: - # download sources - exp.sync() - with with_check_if_file_appears("files.zip"): - exp["source_code/files"].download() - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - def test_git_client_repository(self, environment): - with neptune.init_run( - git_ref=GitRef(repository_path="."), - project=environment.project, - ) as exp: - # download sources - exp.sync() - assert exp.exists("source_code/git") - - with neptune.init_run( - git_ref=True, - project=environment.project, - ) as exp: - # download sources - exp.sync() - assert exp.exists("source_code/git") - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - def test_git_disabled(self, environment, recwarn): - with neptune.init_run( - git_ref=GitRef.DISABLED, - project=environment.project, - ) as exp: - # download sources - exp.sync() - assert not exp.exists("source_code/git") - - with neptune.init_run( - git_ref=False, - project=environment.project, - ) as exp: - # download sources - exp.sync() - assert not exp.exists("source_code/git") - - assert len(recwarn) == 0 # upload was not skipped due to an exception that would raise a warning - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - def test_infer_dependencies(self, environment): - with neptune.init_run( - project=environment.project, - dependencies="infer", - ) as exp: - exp.sync() - - assert exp.exists("source_code/requirements") - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - def test_upload_dependency_file(self, environment): - filename = fake.file_name(extension="txt") - with open(filename, "w") as file: - file.write("some-dependency==1.0.0") - - with neptune.init_run( - project=environment.project, - dependencies=filename, - ) as exp: - exp.sync() - - exp["source_code/requirements"].download("requirements.txt") - - with open("requirements.txt", "r") as file: - assert file.read() == "some-dependency==1.0.0" - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - def test_warning_raised_if_dependency_file_non_existent(self, capsys, environment): - with neptune.init_run(dependencies="some_non_existent_file", project=environment.project): - ... - - captured = capsys.readouterr() - assert "'some_non_existent_file' does not exist" in captured.out - assert "ERROR" in captured.out - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - def test_tracking_uncommitted_changes(self, repo, environment): - file = repo.working_dir + "/some_file.txt" - with open(file, "w") as fp: - fp.write("some-content\n") - - repo.git.add(file) - - with neptune.init_run(project=environment.project, git_ref=GitRef(repository_path=repo.working_dir)) as run: - run.sync() - assert run.exists("source_code/diff") - run["source_code/diff"].download() - with open("diff.patch") as fp: - assert "some-content" in fp.read() - - -@pytest.mark.skip(reason="Project is not supported") -class TestInitProject(BaseE2ETest): - def test_resuming_project(self, environment): - exp = neptune.init_project(project=environment.project) - - key = self.gen_key() - val = fake.word() - exp[key] = val - exp.sync() - - exp.stop() - - exp2 = neptune.init_project(project=environment.project) - assert exp2[key].fetch() == val - - def test_init_and_readonly(self, environment): - project: Project = neptune.init_project(project=environment.project) - - key = f"{self.gen_key()}-" + "-".join((fake.word() for _ in range(4))) - val = fake.word() - project[key] = val - project.sync() - project.stop() - - read_only_project = neptune.init_project(mode="read-only", project=environment.project) - read_only_project.sync() - - assert set(read_only_project.get_structure()["sys"]) == { - "creation_time", - "id", - "modification_time", - "monitoring_time", - "name", - "ping_time", - "running_time", - "size", - "state", - "tags", - "visibility", - } - assert read_only_project[key].fetch() == val - - -class TestInitModel(BaseE2ETest): - @pytest.mark.parametrize( - "container", - [ - pytest.param( - "model", - marks=pytest.mark.skip(reason="model is not supported"), - ) - ], - indirect=True, - ) - def test_fail_reused_model_key(self, container: Model, environment): - with pytest.raises(NeptuneModelKeyAlreadyExistsError): - model_key = container["sys/id"].fetch().split("-")[1] - neptune.init_model(key=model_key, project=environment.project) - - -class TestReinitialization(BaseE2ETest): - @pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) - def test_resuming_container(self, container_type, environment): - container = initialize_container(container_type=container_type, project=environment.project) - sys_id = container["sys/id"].fetch() - - key = self.gen_key() - val = fake.word() - container[key] = val - container.sync() - container.stop() - - reinitialized = reinitialize_container( - sys_id=sys_id, - container_type=container.container_type.value, - project=environment.project, - ) - assert reinitialized[key].fetch() == val - - reinitialized.stop() diff --git a/tests/e2e/standard/test_multiple.py b/tests/e2e/standard/test_multiple.py deleted file mode 100644 index 700430199..000000000 --- a/tests/e2e/standard/test_multiple.py +++ /dev/null @@ -1,129 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import concurrent.futures -import random - -import pytest - -import neptune -from tests.e2e.base import ( - BaseE2ETest, - fake, -) -from tests.e2e.utils import reinitialize_container - - -def store_in_container(sys_id: str, project: str, container_type: str, destination: str): - container = reinitialize_container( - sys_id=sys_id, - container_type=container_type, - project=project, - capture_stdout=False, - capture_stderr=False, - capture_hardware_metrics=False, - capture_traceback=False, - ) - container[destination] = fake.color() - container.sync() - - -class TestMultiple(BaseE2ETest): - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_single_thread(self, container: neptune.Run, environment): - sys_id = container["sys/id"].fetch() - number_of_reinitialized = 5 - namespace = self.gen_key() - - reinitialized = [ - reinitialize_container( - sys_id=sys_id, - container_type=container.container_type.value, - project=environment.project, - capture_stdout=False, - capture_stderr=False, - capture_hardware_metrics=False, - capture_traceback=False, - ) - for _ in range(number_of_reinitialized) - ] - - container[f"{namespace}/{fake.unique.word()}"] = fake.color() - container.sync() - - random.shuffle(reinitialized) - for reinitialized_container in reinitialized: - reinitialized_container[f"{namespace}/{fake.unique.word()}"] = fake.color() - - random.shuffle(reinitialized) - for reinitialized_container in reinitialized: - reinitialized_container.sync() - - container.sync() - - assert len(container[namespace].fetch()) == number_of_reinitialized + 1 - - for r in reinitialized: - r.stop() - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_multiple_processes(self, container: neptune.Run, environment): - number_of_reinitialized = 10 - namespace = self.gen_key() - - container[f"{namespace}/{fake.unique.word()}"] = fake.color() - - with concurrent.futures.ProcessPoolExecutor(max_workers=3) as executor: - futures = [ - executor.submit( - store_in_container, - sys_id=container["sys/id"].fetch(), - container_type=container.container_type.value, - project=environment.project, - destination=f"{namespace}/{fake.unique.word()}", - ) - for _ in range(number_of_reinitialized) - ] - for future in concurrent.futures.as_completed(futures): - _ = future.result() - - container.sync() - - assert len(container[namespace].fetch()) == number_of_reinitialized + 1 - - @pytest.mark.parametrize("container", ["run"], indirect=True) - def test_multiple_threads(self, container: neptune.Run, environment): - number_of_reinitialized = 10 - namespace = self.gen_key() - - container[f"{namespace}/{fake.unique.word()}"] = fake.color() - - with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor: - futures = [ - executor.submit( - store_in_container, - sys_id=container["sys/id"].fetch(), - container_type=container.container_type.value, - project=environment.project, - destination=f"{namespace}/{fake.unique.word()}", - ) - for _ in range(number_of_reinitialized) - ] - for future in concurrent.futures.as_completed(futures): - _ = future.result() - - container.sync() - - assert len(container[namespace].fetch()) == number_of_reinitialized + 1 diff --git a/tests/e2e/standard/test_multiprocessing.py b/tests/e2e/standard/test_multiprocessing.py deleted file mode 100644 index 07f4c6171..000000000 --- a/tests/e2e/standard/test_multiprocessing.py +++ /dev/null @@ -1,41 +0,0 @@ -import os -import signal -import unittest -from multiprocessing import Barrier - -import pytest - -from neptune.internal.utils.utils import IS_WINDOWS -from tests.e2e.base import AVAILABLE_CONTAINERS -from tests.e2e.utils import ( - Environment, - initialize_container, -) - - -@unittest.skipIf(IS_WINDOWS, "Windows does not support fork") -@pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) -def test_fork_child_parent_info_exchange(container_type: str, environment: Environment): - barrier = Barrier(2) - with initialize_container(container_type=container_type, project=environment.project) as container: - child_pid = os.fork() - if child_pid == 0: - # child process exec - container["child_key"] = "child_value" - container.wait() - barrier.wait() # after barrier both processes have sent data - - container.sync() - assert container["parent_key"].fetch() == "parent_value" - - os.kill(os.getpid(), signal.SIGTERM) # kill child process, as it has cloned testing runtime - else: - # parent process exec - container["parent_key"] = "parent_value" - container.wait() - barrier.wait() # after barrier both processes have sent data - - container.sync() - assert container["child_key"].fetch() == "child_value" - - os.waitpid(child_pid, 0) diff --git a/tests/e2e/standard/test_series.py b/tests/e2e/standard/test_series.py deleted file mode 100644 index b78c6ea68..000000000 --- a/tests/e2e/standard/test_series.py +++ /dev/null @@ -1,164 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import random -import time -from contextlib import contextmanager - -import pytest - -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.objects import NeptuneObject -from neptune.types import ( - FileSeries, - FloatSeries, - StringSeries, -) -from tests.e2e.base import ( - AVAILABLE_CONTAINERS, - BaseE2ETest, - fake, - make_parameters, -) -from tests.e2e.utils import ( - generate_image, - image_to_png, - tmp_context, -) - -Image = pytest.importorskip("PIL.Image") - -BASIC_SERIES_TYPES = ( - make_parameters(["strings", "floats", "files"]) - .xfail("files", reason="File funcitonality disabled", raises=NeptuneUnsupportedFunctionalityException) - .eval() -) - - -@pytest.mark.xfail( - reason="fetch_last and download_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException -) -class TestSeries(BaseE2ETest): - @pytest.mark.parametrize("series_type", BASIC_SERIES_TYPES) - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_log(self, container: NeptuneObject, series_type: str): - with self.run_then_assert(container, series_type) as ( - namespace, - values, - steps, - timestamps, - ): - for value, step, timestamp in zip(values, steps, timestamps): - namespace.log(value, step=step, timestamp=timestamp) - - @pytest.mark.parametrize("series_type", BASIC_SERIES_TYPES) - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_append(self, container: NeptuneObject, series_type: str): - with self.run_then_assert(container, series_type) as (namespace, values, steps, timestamps): - for value, step, timestamp in zip(values, steps, timestamps): - namespace.append(value, step=step, timestamp=timestamp) - - @pytest.mark.parametrize("series_type", BASIC_SERIES_TYPES) - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_extend(self, container: NeptuneObject, series_type: str): - with self.run_then_assert(container, series_type) as (namespace, values, steps, timestamps): - namespace.extend([values[0]], steps=[steps[0]], timestamps=[timestamps[0]]) - namespace.extend(values[1:], steps=steps[1:], timestamps=timestamps[1:]) - - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_float_series_type_assign(self, container: NeptuneObject): - with self.run_then_assert(container, "floats") as (namespace, values, steps, timestamps): - namespace.assign(FloatSeries(values=values, steps=steps, timestamps=timestamps)) - - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_string_series_type_assign(self, container: NeptuneObject): - with self.run_then_assert(container, "strings") as (namespace, values, steps, timestamps): - namespace.assign(StringSeries(values=values, steps=steps, timestamps=timestamps)) - - @pytest.mark.xfail( - reason="File funcitonality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException - ) - @pytest.mark.parametrize("container", AVAILABLE_CONTAINERS, indirect=True) - def test_file_series_type_assign(self, container: NeptuneObject): - with self.run_then_assert(container, "files") as (namespace, values, steps, timestamps): - namespace.assign(FileSeries(values=values, steps=steps, timestamps=timestamps)) - - @contextmanager - def run_then_assert(self, container: NeptuneObject, series_type: str): - steps = sorted(random.sample(range(1, 100), 5)) - timestamps = [ - 1675876469.0, - 1675876470.0, - 1675876471.0, - 1675876472.0, - 1675876473.0, - ] - key = self.gen_key() - - if series_type == "floats": - # given - values = list(random.random() for _ in range(5)) - - # when - yield container[key], values, steps, timestamps - container.sync() - - # then - assert container[key].fetch_last() == values[-1] - assert list(container[key].fetch_values()["value"]) == values - assert list(container[key].fetch_values()["step"]) == steps - assert ( - list(map(lambda t: time.mktime(t.utctimetuple()), container[key].fetch_values()["timestamp"])) - == timestamps - ) - - elif series_type == "strings": - # given - values = list(fake.word() for _ in range(5)) - - # when - yield container[key], values, steps, timestamps - - container.sync() - - # then - assert container[key].fetch_last() == values[-1] - assert list(container[key].fetch_values()["value"]) == values - assert list(container[key].fetch_values()["step"]) == steps - assert ( - list(map(lambda t: time.mktime(t.utctimetuple()), container[key].fetch_values()["timestamp"])) - == timestamps - ) - - elif series_type == "files": - # given - images = list(generate_image(size=2**n) for n in range(7, 12)) - - # when - yield container[key], images, steps, timestamps - - container.sync() - - # then - with tmp_context(): - container[key].download_last("last") - container[key].download("all") - - with Image.open("last/4.png") as img: - assert img == image_to_png(image=images[-1]) - - for i in range(5): - with Image.open(f"all/{i}.png") as img: - assert img == image_to_png(image=images[i]) diff --git a/tests/e2e/standard/test_stage_transitions.py b/tests/e2e/standard/test_stage_transitions.py deleted file mode 100644 index 132df0e3f..000000000 --- a/tests/e2e/standard/test_stage_transitions.py +++ /dev/null @@ -1,82 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import pytest - -from neptune.exceptions import NeptuneCannotChangeStageManually -from neptune.objects import ModelVersion -from tests.e2e.base import BaseE2ETest - - -class TestStageTransitions(BaseE2ETest): - @pytest.mark.parametrize( - "container", - [ - pytest.param( - "model_version", - marks=pytest.mark.skip(reason="Model version not implemented"), - ) - ], - indirect=True, - ) - def test_transitions(self, container: ModelVersion): - container["a"] = 14 - - assert container["sys/stage"].fetch() == "none" - - container.change_stage("staging") - container.sync() - - assert container["sys/stage"].fetch() == "staging" - - container.change_stage("production") - container.sync() - - assert container["sys/stage"].fetch() == "production" - - container.change_stage("none") - container.sync() - - assert container["sys/stage"].fetch() == "none" - - @pytest.mark.parametrize( - "container", - [ - pytest.param( - "model_version", - marks=pytest.mark.skip(reason="Model version not implemented"), - ) - ], - indirect=True, - ) - def test_fail_on_unknown_stage_value(self, container: ModelVersion): - with pytest.raises(ValueError): - container.change_stage("unknown") - container.sync() - - @pytest.mark.parametrize( - "container", - [ - pytest.param( - "model_version", - marks=pytest.mark.skip(reason="Model version not implemented"), - ) - ], - indirect=True, - ) - def test_fail_on_manual(self, container: ModelVersion): - with pytest.raises(NeptuneCannotChangeStageManually): - container["sys/stage"] = "staging" - container.sync() diff --git a/tests/e2e/utils.py b/tests/e2e/utils.py deleted file mode 100644 index c06af5c83..000000000 --- a/tests/e2e/utils.py +++ /dev/null @@ -1,235 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__all__ = [ - "with_check_if_file_appears", - "tmp_context", - "a_project_name", - "a_key", - "Environment", - "initialize_container", - "reinitialize_container", - "modified_environ", - "catch_time", - "SIZE_1KB", - "SIZE_1MB", -] - -import io -import os -import random -import string -import tempfile -from contextlib import contextmanager -from datetime import datetime -from math import sqrt -from time import perf_counter - -import numpy -from attr import dataclass - -import neptune -from neptune.internal.container_type import ContainerType -from tests.e2e.exceptions import MissingEnvironmentVariable - - -def _remove_file_if_exists(filepath): - try: - os.remove(filepath) - except OSError: - pass - - -SIZE_1MB = 2**20 -SIZE_1KB = 2**10 - -# init kwargs which significantly reduce operations noise -DISABLE_SYSLOG_KWARGS = { - "capture_stdout": False, - "capture_stderr": False, - "capture_hardware_metrics": False, -} - - -@contextmanager -def with_check_if_file_appears(filepath): - """Checks if file will be present when leaving the block. - File is removed if exists when entering the block.""" - _remove_file_if_exists(filepath) - - try: - yield - finally: - assert os.path.exists(filepath) - _remove_file_if_exists(filepath) - - -@contextmanager -def preserve_cwd(path): - cwd = os.getcwd() - os.chdir(path) - try: - yield - finally: - os.chdir(cwd) - - -@contextmanager -def tmp_context(): - with tempfile.TemporaryDirectory() as tmp: - with preserve_cwd(tmp): - yield tmp - - -def generate_image(*, size: int) -> "Image": # noqa: F821 - """generate image of size in bytes""" - from PIL import Image - - width = int(sqrt(size / 3)) # 3 bytes per one pixel in square image - random_numbers = numpy.random.rand(width, width, 3) * 255 - return Image.fromarray(random_numbers.astype("uint8")).convert("RGB") - - -def image_to_png(*, image: "Image") -> "PngImageFile": # noqa: F821 - from PIL.PngImagePlugin import PngImageFile - - png_buf = io.BytesIO() - image.save(png_buf, format="png") - png_buf.seek(0) - return PngImageFile(png_buf) - - -def a_key(): - return "".join(random.choices(string.ascii_uppercase, k=10)) - - -def a_project_name(project_slug: str): - project_name = f"e2e-{datetime.now().strftime('%Y%m%d-%H%M')}-{project_slug}" - - return project_name - - -class RawEnvironment: - """Load environment variables required to run e2e tests""" - - def __init__(self): - env = os.environ - try: - # Target workspace name - self.workspace_name = env["WORKSPACE_NAME"] - # Admin user - self.admin_username = env["ADMIN_USERNAME"] - # Admin user API token - self.admin_neptune_api_token = env["ADMIN_NEPTUNE_API_TOKEN"] - # Member user - self.user_username = env["USER_USERNAME"] - # SA name - self.service_account_name = env["SERVICE_ACCOUNT_NAME"] - # Member user or SA API token - self.neptune_api_token = env["NEPTUNE_API_TOKEN"] - except KeyError as e: - raise MissingEnvironmentVariable(missing_variable=e.args[0]) from e - - -@dataclass -class Environment: - workspace: str - project: str - user_token: str # token of `user` or `service_account` - admin_token: str - admin: str - user: str - service_account: str - - -def initialize_container(container_type, project, **extra_args): - if isinstance(container_type, ContainerType): - container_type = container_type.value - - if container_type == "project": - return neptune.init_project(project=project, **extra_args) - - if container_type == "run": - return neptune.init_run(project=project, **extra_args) - - if container_type == "model": - return neptune.init_model(key=a_key(), project=project, **extra_args) - - if container_type == "model_version": - model = neptune.init_model(key=a_key(), project=project, **extra_args) - model_sys_id = model["sys/id"].fetch() - model.stop() - - return neptune.init_model_version(model=model_sys_id, project=project, **extra_args) - - raise NotImplementedError(container_type) - - -def reinitialize_container(sys_id: str, container_type: str, project: str, **kwargs): - if container_type == "project": - # exactly same as initialize_container(project), for convenience - return neptune.init_project(project=project, **kwargs) - - if container_type == "run": - return neptune.init_run(with_id=sys_id, project=project, **kwargs) - - if container_type == "model": - return neptune.init_model(with_id=sys_id, project=project, **kwargs) - - if container_type == "model_version": - return neptune.init_model_version(with_id=sys_id, project=project, **kwargs) - - raise NotImplementedError() - - -# from https://stackoverflow.com/a/62956469 -@contextmanager -def catch_time() -> float: - start = perf_counter() - yield lambda: perf_counter() - start - - -# from https://stackoverflow.com/a/34333710 -@contextmanager -def modified_environ(*remove, **update): - """ - Temporarily updates the ``os.environ`` dictionary in-place. - - The ``os.environ`` dictionary is updated in-place so that the modification - is sure to work in all situations. - - :param remove: Environment variables to remove. - :param update: Dictionary of environment variables and values to add/update. - """ - env = os.environ - update = update or {} - remove = remove or [] - - # List of environment variables being updated or removed. - stomped = (set(update.keys()) | set(remove)) & set(env.keys()) - # Environment variables and values to restore on exit. - update_after = {k: env[k] for k in stomped} - # Environment variables and values to remove on exit. - remove_after = frozenset(k for k in update if k not in env) - - try: - env.update(update) - for k in remove: - env.pop(k, None) - yield - finally: - env.update(update_after) - for k in remove_after: - env.pop(k) diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py deleted file mode 100644 index 62a86a5be..000000000 --- a/tests/unit/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/data/local_artifact_drivers_data/dir_to_link/file_in_linked_dir.txt b/tests/unit/data/local_artifact_drivers_data/dir_to_link/file_in_linked_dir.txt deleted file mode 100755 index 76aa37485..000000000 --- a/tests/unit/data/local_artifact_drivers_data/dir_to_link/file_in_linked_dir.txt +++ /dev/null @@ -1 +0,0 @@ -My parent dir will be symlinked somewhere. \ No newline at end of file diff --git a/tests/unit/data/local_artifact_drivers_data/file_to_link.txt b/tests/unit/data/local_artifact_drivers_data/file_to_link.txt deleted file mode 100755 index 6138d797c..000000000 --- a/tests/unit/data/local_artifact_drivers_data/file_to_link.txt +++ /dev/null @@ -1 +0,0 @@ -I'm file which'll be symlinked in some path. \ No newline at end of file diff --git a/tests/unit/data/local_artifact_drivers_data/files_to_track/file1.txt b/tests/unit/data/local_artifact_drivers_data/files_to_track/file1.txt deleted file mode 100755 index a1c457de3..000000000 --- a/tests/unit/data/local_artifact_drivers_data/files_to_track/file1.txt +++ /dev/null @@ -1 +0,0 @@ -File in base catalog. \ No newline at end of file diff --git a/tests/unit/data/local_artifact_drivers_data/files_to_track/sub_dir/file_in_subdir.txt b/tests/unit/data/local_artifact_drivers_data/files_to_track/sub_dir/file_in_subdir.txt deleted file mode 100755 index 6eb759f6b..000000000 --- a/tests/unit/data/local_artifact_drivers_data/files_to_track/sub_dir/file_in_subdir.txt +++ /dev/null @@ -1 +0,0 @@ -I'm a little bit nested. \ No newline at end of file diff --git a/tests/unit/neptune/__init__.py b/tests/unit/neptune/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/unit/neptune/backend_test_mixin.py b/tests/unit/neptune/backend_test_mixin.py deleted file mode 100644 index 0a8c924dd..000000000 --- a/tests/unit/neptune/backend_test_mixin.py +++ /dev/null @@ -1,55 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from mock import MagicMock - - -class BackendTestMixin: - @staticmethod - def _get_swagger_client_mock( - swagger_client_factory, - min_recommended=None, - min_compatible=None, - max_compatible=None, - ): - py_lib_versions = type("py_lib_versions", (object,), {})() - setattr(py_lib_versions, "minRecommendedVersion", min_recommended) - setattr(py_lib_versions, "minCompatibleVersion", min_compatible) - setattr(py_lib_versions, "maxCompatibleVersion", max_compatible) - - artifacts = type("artifacts", (object,), {})() - setattr(artifacts, "enabled", True) - setattr(artifacts, "apiVersion", 2) - - multipart_upload = type("multiPartUpload", (object,), {})() - setattr(multipart_upload, "enabled", True) - setattr(multipart_upload, "minChunkSize", 204800) # 200KB - setattr(multipart_upload, "maxChunkSize", 1073741824) - setattr(multipart_upload, "maxChunkCount", 1000) - setattr(multipart_upload, "maxSinglePartSize", 204800) # 200KB - - client_config = type("client_config_response_result", (object,), {})() - setattr(client_config, "pyLibVersions", py_lib_versions) - setattr(client_config, "artifacts", artifacts) - setattr(client_config, "multiPartUpload", multipart_upload) - setattr(client_config, "apiUrl", "ui.neptune.ai") - setattr(client_config, "applicationUrl", "ui.neptune.ai") - - swagger_client = MagicMock() - swagger_client.api.getClientConfig.return_value.response.return_value.result = client_config - swagger_client_factory.return_value = swagger_client - - return swagger_client diff --git a/tests/unit/neptune/management/__init__.py b/tests/unit/neptune/management/__init__.py deleted file mode 100644 index d71b3273e..000000000 --- a/tests/unit/neptune/management/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/management/internal/__init__.py b/tests/unit/neptune/management/internal/__init__.py deleted file mode 100644 index d71b3273e..000000000 --- a/tests/unit/neptune/management/internal/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/management/internal/test_api.py b/tests/unit/neptune/management/internal/test_api.py deleted file mode 100644 index 596ce5b8e..000000000 --- a/tests/unit/neptune/management/internal/test_api.py +++ /dev/null @@ -1,131 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import unittest -from unittest.mock import ( - MagicMock, - call, - patch, -) - -import pytest -from bravado.exception import HTTPNotFound -from bravado.response import BravadoResponse - -from neptune import ANONYMOUS_API_TOKEN -from neptune.envs import PROJECT_ENV_NAME -from neptune.internal.backends.hosted_client import DEFAULT_REQUEST_KWARGS -from neptune.internal.backends.neptune_backend_mock import NeptuneBackendMock -from neptune.internal.envs import API_TOKEN_ENV_NAME -from neptune.management import ( - clear_trash, - delete_objects_from_trash, - trash_objects, -) -from neptune.management.exceptions import ProjectNotFound - - -@patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) -class TestTrashObjects(unittest.TestCase): - PROJECT_NAME = "organization/project" - - @classmethod - def setUpClass(cls) -> None: - os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN - - @classmethod - def setUp(cls) -> None: - if PROJECT_ENV_NAME in os.environ: - del os.environ[PROJECT_ENV_NAME] - - @patch("neptune.management.internal.api.logger") - @patch("neptune.management.internal.api._get_leaderboard_client") - def test_project_trash_objects(self, _get_leaderboard_client_mock, _mock_logger): - # given - trash_experiments_mock = _get_leaderboard_client_mock().api.trashExperiments - - mock_response = MagicMock(spec=BravadoResponse(MagicMock(), MagicMock())) - trash_experiments_mock.return_value.response.return_value = mock_response - - mock_response.result.errors = ["some_test_error1", "some_test_error2"] - mock_response.result.updatedExperimentIdentifiers = ["RUN-1"] - - # when - trash_objects(self.PROJECT_NAME, ["RUN-1", "MOD", "MOD-1"]) - - # then - self.assertEqual(1, trash_experiments_mock.call_count) - self.assertEqual( - call( - projectIdentifier="organization/project", - experimentIdentifiers=[ - "organization/project/RUN-1", - "organization/project/MOD", - "organization/project/MOD-1", - ], - **DEFAULT_REQUEST_KWARGS, - ), - trash_experiments_mock.call_args, - ) - _mock_logger.info.assert_called_once_with("Successfully trashed objects: %d. Number of failures: %d.", 1, 2) - self.assertEqual(_mock_logger.warning.mock_calls, [call("some_test_error1"), call("some_test_error2")]) - - @patch("neptune.management.internal.api._get_leaderboard_client") - def test_trash_objects_invalid_project_name(self, _get_leaderboard_client_mock): - _get_leaderboard_client_mock().api.trashExperiments.side_effect = HTTPNotFound(MagicMock()) - with pytest.raises(ProjectNotFound): - trash_objects(self.PROJECT_NAME, ["RUN-1", "MOD", "MOD-1"]) - - @patch("neptune.management.internal.api._get_leaderboard_client") - def test_project_delete_objects_from_trash(self, _get_leaderboard_client_mock): - # given - delete_experiments_from_trash_mock = _get_leaderboard_client_mock().api.deleteExperiments - - # when - delete_objects_from_trash(self.PROJECT_NAME, ["RUN-1", "MOD", "MOD-1"]) - - # then - assert delete_experiments_from_trash_mock.call_count == 1 - self.assertEqual( - call( - projectIdentifier="organization/project", - experimentIdentifiers=[ - "organization/project/RUN-1", - "organization/project/MOD", - "organization/project/MOD-1", - ], - **DEFAULT_REQUEST_KWARGS, - ), - delete_experiments_from_trash_mock.call_args, - ) - - @patch("neptune.management.internal.api._get_leaderboard_client") - def test_project_clear_trash(self, _get_leaderboard_client_mock): - # given - clear_trash_mock = _get_leaderboard_client_mock().api.deleteAllExperiments - - # when - clear_trash(self.PROJECT_NAME) - - # then - assert clear_trash_mock.call_count == 1 - self.assertEqual( - call( - projectIdentifier="organization/project", - **DEFAULT_REQUEST_KWARGS, - ), - clear_trash_mock.call_args, - ) diff --git a/tests/unit/neptune/management/internal/test_utils.py b/tests/unit/neptune/management/internal/test_utils.py deleted file mode 100644 index dc61ecd64..000000000 --- a/tests/unit/neptune/management/internal/test_utils.py +++ /dev/null @@ -1,42 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import unittest - -from neptune.management.exceptions import ( - ConflictingWorkspaceName, - InvalidProjectName, - MissingWorkspaceName, -) -from neptune.management.internal.utils import normalize_project_name - - -class TestManagementUtils(unittest.TestCase): - def test_normalize_project_name(self): - self.assertEqual("jackie/sandbox", normalize_project_name(name="jackie/sandbox")) - self.assertEqual("jackie/sandbox", normalize_project_name(name="sandbox", workspace="jackie")) - self.assertEqual( - "jackie/sandbox", - normalize_project_name(name="jackie/sandbox", workspace="jackie"), - ) - - with self.assertRaises(InvalidProjectName): - normalize_project_name(name="nothing/else/matters") - - with self.assertRaises(MissingWorkspaceName): - normalize_project_name(name="sandbox") - - with self.assertRaises(ConflictingWorkspaceName): - normalize_project_name(name="jackie/sandbox", workspace="john") diff --git a/tests/unit/neptune/new/__init__.py b/tests/unit/neptune/new/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/unit/neptune/new/api/__init__.py b/tests/unit/neptune/new/api/__init__.py deleted file mode 100644 index 8d06af532..000000000 --- a/tests/unit/neptune/new/api/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/api/test_fetching_series_values.py b/tests/unit/neptune/new/api/test_fetching_series_values.py deleted file mode 100644 index a3c03bd85..000000000 --- a/tests/unit/neptune/new/api/test_fetching_series_values.py +++ /dev/null @@ -1,160 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from datetime import datetime - -from mock import ( - Mock, - call, -) - -from neptune.api.fetching_series_values import fetch_series_values -from neptune.api.models import ( - FloatPointValue, - FloatSeriesValues, -) - - -def test__empty(): - # given - getter_mock = Mock() - getter_mock.side_effect = [ - FloatSeriesValues(total=0, values=[]), - ] - - # when - results = fetch_series_values( - getter=getter_mock, - path="some/path", - ) - - # then - assert list(results) == [] - assert getter_mock.call_count == 1 - assert getter_mock.call_args_list == [ - call(from_step=None, limit=1), - ] - - -def test__single_value(): - # given - now = datetime.now() - getter_mock = Mock() - getter_mock.side_effect = [ - FloatSeriesValues(total=1, values=[FloatPointValue(step=1, value=1.0, timestamp=now)]), - ] - - # when - results = fetch_series_values( - getter=getter_mock, - path="some/path", - ) - - # then - assert list(results) == [FloatPointValue(step=1, value=1.0, timestamp=now)] - assert getter_mock.call_count == 1 - assert getter_mock.call_args_list == [ - call(from_step=None, limit=1), - ] - - -def test__multiple_values(): - # given - now = datetime.now() - getter_mock = Mock() - getter_mock.side_effect = [ - FloatSeriesValues( - total=3, - values=[ - FloatPointValue(step=1, value=1.0, timestamp=now), - ], - ), - FloatSeriesValues( - total=3, - values=[ - FloatPointValue(step=1, value=1.0, timestamp=now), - FloatPointValue(step=2, value=2.0, timestamp=now.replace(second=2)), - FloatPointValue(step=3, value=3.0, timestamp=now.replace(second=3)), - ], - ), - ] - - # when - results = fetch_series_values( - getter=getter_mock, - path="some/path", - step_size=2, - ) - - # then - assert list(results) == [ - FloatPointValue(step=1, value=1.0, timestamp=now), - FloatPointValue(step=2, value=2.0, timestamp=now.replace(second=2)), - FloatPointValue(step=3, value=3.0, timestamp=now.replace(second=3)), - ] - assert getter_mock.call_count == 2 - assert getter_mock.call_args_list == [ - call(from_step=None, limit=1), - call(from_step=0.0, limit=2), - ] - - -def test__multiple_pages(): - # given - now = datetime.now() - getter_mock = Mock() - getter_mock.side_effect = [ - FloatSeriesValues( - total=4, - values=[ - FloatPointValue(step=1, value=1.0, timestamp=now), - ], - ), - FloatSeriesValues( - total=4, - values=[ - FloatPointValue(step=1, value=1.0, timestamp=now), - FloatPointValue(step=2, value=2.0, timestamp=now.replace(second=2)), - ], - ), - FloatSeriesValues( - total=4, - values=[ - FloatPointValue(step=3, value=3.0, timestamp=now.replace(second=3)), - FloatPointValue(step=4, value=4.0, timestamp=now.replace(second=4)), - ], - ), - ] - - # when - results = fetch_series_values( - getter=getter_mock, - path="some/path", - step_size=2, - ) - - # then - assert list(results) == [ - FloatPointValue(step=1, value=1.0, timestamp=now), - FloatPointValue(step=2, value=2.0, timestamp=now.replace(second=2)), - FloatPointValue(step=3, value=3.0, timestamp=now.replace(second=3)), - FloatPointValue(step=4, value=4.0, timestamp=now.replace(second=4)), - ] - assert getter_mock.call_count == 3 - assert getter_mock.call_args_list == [ - call(from_step=None, limit=1), - call(from_step=0.0, limit=2), - call(from_step=2.0, limit=2), - ] diff --git a/tests/unit/neptune/new/api/test_models.py b/tests/unit/neptune/new/api/test_models.py deleted file mode 100644 index a6c9539ce..000000000 --- a/tests/unit/neptune/new/api/test_models.py +++ /dev/null @@ -1,2808 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import datetime - -import pytest -from mock import Mock - -from neptune.api.models import ( - ArtifactField, - BoolField, - DateTimeField, - Field, - FieldDefinition, - FieldType, - FileEntry, - FileField, - FileSetField, - FloatField, - FloatSeriesField, - FloatSeriesValues, - GitRefField, - ImageSeriesField, - ImageSeriesValues, - IntField, - LeaderboardEntriesSearchResult, - LeaderboardEntry, - NextPage, - NotebookRefField, - ObjectStateField, - QueryFieldDefinitionsResult, - QueryFieldsExperimentResult, - QueryFieldsResult, - StringField, - StringSeriesField, - StringSeriesValues, - StringSetField, -) -from neptune.api.proto.neptune_pb.api.model.attributes_pb2 import ( - ProtoNextPageDTO, - ProtoQueryAttributesExperimentResultDTO, - ProtoQueryAttributesResultDTO, -) -from neptune.api.proto.neptune_pb.api.model.leaderboard_entries_pb2 import ( - ProtoAttributeDTO, - ProtoAttributesDTO, - ProtoBoolAttributeDTO, - ProtoDatetimeAttributeDTO, - ProtoFloatAttributeDTO, - ProtoFloatSeriesAttributeDTO, - ProtoIntAttributeDTO, - ProtoLeaderboardEntriesSearchResultDTO, - ProtoStringAttributeDTO, - ProtoStringSetAttributeDTO, -) -from neptune.api.proto.neptune_pb.api.model.series_values_pb2 import ( - ProtoFloatPointValueDTO, - ProtoFloatSeriesValuesDTO, -) - - -def test__float_field__from_dict(): - # given - data = {"attributeType": "float", "attributeName": "some/float", "value": 18.5} - - # when - result = FloatField.from_dict(data) - - # then - assert result.path == "some/float" - assert result.value == 18.5 - - -def test__float_field__from_model(): - # given - model = Mock(attributeType="float", attributeName="some/float", value=18.5) - - # when - result = FloatField.from_model(model) - - # then - assert result.path == "some/float" - assert result.value == 18.5 - - -def test__float_field__from_proto(): - # given - proto = ProtoFloatAttributeDTO( - attribute_name="some/float", - attribute_type="float", - value=18.5, - ) - - # when - result = FloatField.from_proto(proto) - - # then - assert result.path == "some/float" - assert result.value == 18.5 - - -def test__int_field__from_dict(): - # given - data = {"attributeType": "int", "attributeName": "some/int", "value": 18} - - # when - result = IntField.from_dict(data) - - # then - assert result.path == "some/int" - assert result.value == 18 - - -def test__int_field__from_model(): - # given - model = Mock(attributeType="int", attributeName="some/int", value=18) - - # when - result = IntField.from_model(model) - - # then - assert result.path == "some/int" - assert result.value == 18 - - -def test__int_field__from_proto(): - # given - proto = ProtoIntAttributeDTO( - attribute_name="some/int", - attribute_type="int", - value=18, - ) - - # when - result = IntField.from_proto(proto) - - # then - assert result.path == "some/int" - assert result.value == 18 - - -def test__string_field__from_dict(): - # given - data = {"attributeType": "string", "attributeName": "some/string", "value": "hello"} - - # when - result = StringField.from_dict(data) - - # then - assert result.path == "some/string" - assert result.value == "hello" - - -def test__string_field__from_model(): - # given - model = Mock(attributeType="string", attributeName="some/string", value="hello") - - # when - result = StringField.from_model(model) - - # then - assert result.path == "some/string" - assert result.value == "hello" - - -def test__string_field__from_proto(): - # given - proto = ProtoStringAttributeDTO( - attribute_name="some/string", - attribute_type="string", - value="hello", - ) - - # when - result = StringField.from_proto(proto) - - # then - assert result.path == "some/string" - assert result.value == "hello" - - -def test__string_field__from_dict__empty(): - # given - data = {"attributeType": "string", "attributeName": "some/string", "value": ""} - - # when - result = StringField.from_dict(data) - - # then - assert result.path == "some/string" - assert result.value == "" - - -def test__string_field__from_model__empty(): - # given - model = Mock(attributeType="string", attributeName="some/string", value="") - - # when - result = StringField.from_model(model) - - # then - assert result.path == "some/string" - assert result.value == "" - - -def test__string_field__from_proto__empty(): - # given - proto = ProtoStringAttributeDTO( - attribute_name="some/string", - attribute_type="string", - value="", - ) - - # when - result = StringField.from_proto(proto) - - # then - assert result.path == "some/string" - assert result.value == "" - - -def test__bool_field__from_dict(): - # given - data = {"attributeType": "bool", "attributeName": "some/bool", "value": True} - - # when - result = BoolField.from_dict(data) - - # then - assert result.path == "some/bool" - assert result.value is True - - -def test__bool_field__from_model(): - # given - model = Mock(attributeType="bool", attributeName="some/bool", value=True) - - # when - result = BoolField.from_model(model) - - # then - assert result.path == "some/bool" - assert result.value is True - - -def test__bool_field__from_proto(): - # given - proto = ProtoBoolAttributeDTO( - attribute_name="some/bool", - attribute_type="bool", - value=True, - ) - - # when - result = BoolField.from_proto(proto) - - # then - assert result.path == "some/bool" - assert result.value is True - - -def test__datetime_field__from_dict(): - # given - data = {"attributeType": "datetime", "attributeName": "some/datetime", "value": "2024-01-01T00:12:34.567890Z"} - - # when - result = DateTimeField.from_dict(data) - - # then - assert result.path == "some/datetime" - assert result.value == datetime.datetime(2024, 1, 1, 0, 12, 34, 567890) - - -def test__datetime_field__from_model(): - # given - model = Mock(attributeType="datetime", attributeName="some/datetime", value="2024-01-01T00:12:34.567890Z") - - # when - result = DateTimeField.from_model(model) - - # then - assert result.path == "some/datetime" - assert result.value == datetime.datetime(2024, 1, 1, 0, 12, 34, 567890) - - -def test__datetime_field__from_proto(): - # given - at = datetime.datetime(2024, 1, 1, 0, 12, 34, 123000, tzinfo=datetime.timezone.utc) - - proto = ProtoDatetimeAttributeDTO( - attribute_name="some/datetime", attribute_type="datetime", value=int(at.timestamp() * 1000) - ) - - # when - result = DateTimeField.from_proto(proto) - - # then - assert result.path == "some/datetime" - assert result.value == at - - -def test__float_series_field__from_dict(): - # given - data = { - "attributeType": "floatSeries", - "attributeName": "some/floatSeries", - "last": 19.5, - } - - # when - result = FloatSeriesField.from_dict(data) - - # then - assert result.path == "some/floatSeries" - assert result.last == 19.5 - - -def test__float_series_field__from_dict__no_last(): - # given - data = { - "attributeType": "floatSeries", - "attributeName": "some/floatSeries", - } - - # when - result = FloatSeriesField.from_dict(data) - - # then - assert result.path == "some/floatSeries" - assert result.last is None - - -def test__float_series_field__from_model(): - # given - model = Mock( - attributeType="floatSeries", - attributeName="some/floatSeries", - last=19.5, - ) - - # when - result = FloatSeriesField.from_model(model) - - # then - assert result.path == "some/floatSeries" - assert result.last == 19.5 - - -def test__float_series_field__from_model__no_last(): - # given - model = Mock( - attributeType="floatSeries", - attributeName="some/floatSeries", - last=None, - ) - - # when - result = FloatSeriesField.from_model(model) - - # then - assert result.path == "some/floatSeries" - assert result.last is None - - -def test__float_series_field__from_proto(): - # given - proto = ProtoFloatSeriesAttributeDTO( - attribute_name="some/floatSeries", - attribute_type="floatSeries", - last=19.5, - ) - - # when - result = FloatSeriesField.from_proto(proto) - - # then - assert result.path == "some/floatSeries" - assert result.last == 19.5 - - -def test__float_series_field__from_proto__no_last(): - # given - proto = ProtoFloatSeriesAttributeDTO( - attribute_name="some/floatSeries", - attribute_type="floatSeries", - last=None, - ) - - # when - result = FloatSeriesField.from_proto(proto) - - # then - assert result.path == "some/floatSeries" - assert result.last is None - - -def test__string_series_field__from_dict(): - # given - data = { - "attributeType": "stringSeries", - "attributeName": "some/stringSeries", - "last": "hello", - } - - # when - result = StringSeriesField.from_dict(data) - - # then - assert result.path == "some/stringSeries" - assert result.last == "hello" - - -def test__string_series_field__from_dict__no_last(): - # given - data = { - "attributeType": "stringSeries", - "attributeName": "some/stringSeries", - } - - # when - result = StringSeriesField.from_dict(data) - - # then - assert result.path == "some/stringSeries" - assert result.last is None - - -def test__string_series_field__from_model(): - # given - model = Mock( - attributeType="stringSeries", - attributeName="some/stringSeries", - last="hello", - ) - - # when - result = StringSeriesField.from_model(model) - - # then - assert result.path == "some/stringSeries" - assert result.last == "hello" - - -def test__string_series_field__from_model__no_last(): - # given - model = Mock( - attributeType="stringSeries", - attributeName="some/stringSeries", - last=None, - ) - - # when - result = StringSeriesField.from_model(model) - - # then - assert result.path == "some/stringSeries" - assert result.last is None - - -def test__string_series_field__from_proto(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - StringSeriesField.from_proto(proto) - - -def test__image_series_field__from_dict(): - # given - data = { - "attributeType": "imageSeries", - "attributeName": "some/imageSeries", - "lastStep": 15.0, - } - - # when - result = ImageSeriesField.from_dict(data) - - # then - assert result.path == "some/imageSeries" - assert result.last_step == 15.0 - - -def test__image_series_field__from_dict__no_last_step(): - # given - data = { - "attributeType": "imageSeries", - "attributeName": "some/imageSeries", - } - - # when - result = ImageSeriesField.from_dict(data) - - # then - assert result.path == "some/imageSeries" - assert result.last_step is None - - -def test__image_series_field__from_model(): - # given - model = Mock( - attributeType="imageSeries", - attributeName="some/imageSeries", - lastStep=15.0, - ) - - # when - result = ImageSeriesField.from_model(model) - - # then - assert result.path == "some/imageSeries" - assert result.last_step == 15.0 - - -def test__image_series_field__from_model__no_last_step(): - # given - model = Mock( - attributeType="imageSeries", - attributeName="some/imageSeries", - lastStep=None, - ) - - # when - result = ImageSeriesField.from_model(model) - - # then - assert result.path == "some/imageSeries" - assert result.last_step is None - - -def test__image_series_field__from_proto(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - ImageSeriesField.from_proto(proto) - - -def test__string_set_field__from_dict(): - # given - data = { - "attributeType": "stringSet", - "attributeName": "some/stringSet", - "values": ["hello", "world"], - } - - # when - result = StringSetField.from_dict(data) - - # then - assert result.path == "some/stringSet" - assert result.values == {"hello", "world"} - - -def test__string_set_field__from_dict__empty(): - # given - data = { - "attributeType": "stringSet", - "attributeName": "some/stringSet", - "values": [], - } - - # when - result = StringSetField.from_dict(data) - - # then - assert result.path == "some/stringSet" - assert result.values == set() - - -def test__string_set_field__from_model(): - # given - model = Mock( - attributeType="stringSet", - attributeName="some/stringSet", - values=["hello", "world"], - ) - - # when - result = StringSetField.from_model(model) - - # then - assert result.path == "some/stringSet" - assert result.values == {"hello", "world"} - - -def test__string_set_field__from_model__empty(): - # given - model = Mock( - attributeType="stringSet", - attributeName="some/stringSet", - values=[], - ) - - # when - result = StringSetField.from_model(model) - - # then - assert result.path == "some/stringSet" - assert result.values == set() - - -def test__string_set_field__from_proto(): - # given - proto = ProtoStringSetAttributeDTO( - attribute_name="some/stringSet", - attribute_type="stringSet", - value=["hello", "world"], - ) - - # when - result = StringSetField.from_proto(proto) - - # then - assert result.path == "some/stringSet" - assert result.values == {"hello", "world"} - - -def test__string_set_field__from_proto__empty(): - # given - proto = ProtoStringSetAttributeDTO( - attribute_name="some/stringSet", - attribute_type="stringSet", - value=[], - ) - - # when - result = StringSetField.from_proto(proto) - - # then - assert result.path == "some/stringSet" - assert result.values == set() - - -def test__file_field__from_dict(): - # given - data = { - "attributeType": "file", - "attributeName": "some/file", - "name": "file.txt", - "size": 1024, - "ext": "txt", - } - - # when - result = FileField.from_dict(data) - - # then - assert result.path == "some/file" - assert result.name == "file.txt" - assert result.size == 1024 - assert result.ext == "txt" - - -def test__file_field__from_model(): - # given - model = Mock( - attributeType="file", - attributeName="some/file", - size=1024, - ext="txt", - ) - model.name = "file.txt" - - # when - result = FileField.from_model(model) - - # then - assert result.path == "some/file" - assert result.name == "file.txt" - assert result.size == 1024 - assert result.ext == "txt" - - -def test__file_field__from_proto(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - FileField.from_proto(proto) - - -@pytest.mark.parametrize("state,expected", [("running", "Active"), ("idle", "Inactive")]) -def test__object_state_field__from_dict(state, expected): - # given - data = {"attributeType": "experimentState", "attributeName": "sys/state", "value": state} - - # when - result = ObjectStateField.from_dict(data) - - # then - assert result.path == "sys/state" - assert result.value == expected - - -@pytest.mark.parametrize("state,expected", [("running", "Active"), ("idle", "Inactive")]) -def test__object_state_field__from_model(state, expected): - # given - model = Mock(attributeType="experimentState", attributeName="sys/state", value=state) - - # when - result = ObjectStateField.from_model(model) - - # then - assert result.path == "sys/state" - assert result.value == expected - - -@pytest.mark.parametrize("state,expected", [("running", "Active"), ("idle", "Inactive")]) -def test__object_state_field__from_proto(state, expected): - # given - model = Mock() - - # then - with pytest.raises(NotImplementedError): - ObjectStateField.from_proto(model) - - -def test__file_set_field__from_dict(): - # given - data = { - "attributeType": "fileSet", - "attributeName": "some/fileSet", - "size": 3072, - } - - # when - result = FileSetField.from_dict(data) - - # then - assert result.path == "some/fileSet" - assert result.size == 3072 - - -def test__file_set_field__from_model(): - # given - model = Mock( - attributeType="fileSet", - attributeName="some/fileSet", - size=3072, - ) - - # when - result = FileSetField.from_model(model) - - # then - assert result.path == "some/fileSet" - assert result.size == 3072 - - -def test__file_set_field__from_proto(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - FileSetField.from_proto(proto) - - -def test__notebook_ref_field__from_dict(): - # given - data = { - "attributeType": "notebookRef", - "attributeName": "some/notebookRef", - "notebookName": "Data Processing.ipynb", - } - - # when - result = NotebookRefField.from_dict(data) - - # then - assert result.path == "some/notebookRef" - assert result.notebook_name == "Data Processing.ipynb" - - -def test__notebook_ref_field__from_dict__no_notebook_name(): - # given - data = { - "attributeType": "notebookRef", - "attributeName": "some/notebookRef", - } - - # when - result = NotebookRefField.from_dict(data) - - # then - assert result.path == "some/notebookRef" - assert result.notebook_name is None - - -def test__notebook_ref_field__from_model(): - # given - model = Mock( - attributeType="notebookRef", - attributeName="some/notebookRef", - notebookName="Data Processing.ipynb", - ) - - # when - result = NotebookRefField.from_model(model) - - # then - assert result.path == "some/notebookRef" - assert result.notebook_name == "Data Processing.ipynb" - - -def test__notebook_ref_field__from_model__no_notebook_name(): - # given - model = Mock( - attributeType="notebookRef", - attributeName="some/notebookRef", - notebookName=None, - ) - - # when - result = NotebookRefField.from_model(model) - - # then - assert result.path == "some/notebookRef" - assert result.notebook_name is None - - -def test__notebook_ref_field__from_proto(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - NotebookRefField.from_proto(proto) - - -def test__notebook_ref_field__from_proto__no_notebook_name(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - NotebookRefField.from_proto(proto) - - -def test__git_ref_field__from_dict(): - # given - data = { - "attributeType": "gitRef", - "attributeName": "some/gitRef", - "commit": { - "commitId": "b2d7f8a", - }, - } - - # when - result = GitRefField.from_dict(data) - - # then - assert result.path == "some/gitRef" - assert result.commit.commit_id == "b2d7f8a" - - -def test__git_ref_field__from_dict__no_commit(): - # given - data = { - "attributeType": "gitRef", - "attributeName": "some/gitRef", - } - - # when - result = GitRefField.from_dict(data) - - # then - assert result.path == "some/gitRef" - assert result.commit is None - - -def test__git_ref_field__from_model(): - # given - model = Mock( - attributeType="gitRef", - attributeName="some/gitRef", - commit=Mock( - commitId="b2d7f8a", - ), - ) - - # when - result = GitRefField.from_model(model) - - # then - assert result.path == "some/gitRef" - assert result.commit.commit_id == "b2d7f8a" - - -def test__git_ref_field__from_model__no_commit(): - # given - model = Mock( - attributeType="gitRef", - attributeName="some/gitRef", - commit=None, - ) - - # when - result = GitRefField.from_model(model) - - # then - assert result.path == "some/gitRef" - assert result.commit is None - - -def test__git_ref_field__from_proto(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - GitRefField.from_proto(proto) - - -def test__git_ref_field__from_proto__no_commit(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - GitRefField.from_proto(proto) - - -def test__artifact_field__from_dict(): - # given - data = { - "attributeType": "artifact", - "attributeName": "some/artifact", - "hash": "f192cddb2b98c0b4c72bba22b68d2245", - } - - # when - result = ArtifactField.from_dict(data) - - # then - assert result.path == "some/artifact" - assert result.hash == "f192cddb2b98c0b4c72bba22b68d2245" - - -def test__artifact_field__from_model(): - # given - model = Mock( - attributeType="artifact", - attributeName="some/artifact", - hash="f192cddb2b98c0b4c72bba22b68d2245", - ) - - # when - result = ArtifactField.from_model(model) - - # then - assert result.path == "some/artifact" - assert result.hash == "f192cddb2b98c0b4c72bba22b68d2245" - - -def test__artifact_field__from_proto(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - ArtifactField.from_proto(proto) - - -def test__field__from_dict__float(): - # given - data = { - "path": "some/float", - "type": "float", - "floatProperties": {"attributeType": "float", "attributeName": "some/float", "value": 18.5}, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/float" - assert isinstance(result, FloatField) - assert result.value == 18.5 - - -def test__field__from_model__float(): - # given - model = Mock( - path="some/float", - type="float", - floatProperties=Mock(attributeType="float", attributeName="some/float", value=18.5), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/float" - assert isinstance(result, FloatField) - assert result.value == 18.5 - - -def test__field__from_proto__float(): - # given - proto = ProtoAttributeDTO( - name="some/float", - type="float", - float_properties=ProtoFloatAttributeDTO( - attribute_name="some/float", - attribute_type="float", - value=18.5, - ), - ) - - # when - result = Field.from_proto(proto) - - # then - assert result.path == "some/float" - assert isinstance(result, FloatField) - assert result.value == 18.5 - - -def test__field__from_dict__int(): - # given - data = { - "path": "some/int", - "type": "int", - "intProperties": {"attributeType": "int", "attributeName": "some/int", "value": 18}, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/int" - assert isinstance(result, IntField) - assert result.value == 18 - - -def test__field__from_model__int(): - # given - model = Mock( - path="some/int", type="int", intProperties=Mock(attributeType="int", attributeName="some/int", value=18) - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/int" - assert isinstance(result, IntField) - assert result.value == 18 - - -def test__field__from_proto__int(): - # given - proto = ProtoAttributeDTO( - name="some/int", - type="int", - int_properties=ProtoIntAttributeDTO( - attribute_name="some/int", - attribute_type="int", - value=18, - ), - ) - - # when - result = Field.from_proto(proto) - - # then - assert result.path == "some/int" - assert isinstance(result, IntField) - assert result.value == 18 - - -def test__field__from_dict__string(): - # given - data = { - "path": "some/string", - "type": "string", - "stringProperties": {"attributeType": "string", "attributeName": "some/string", "value": "hello"}, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/string" - assert isinstance(result, StringField) - assert result.value == "hello" - - -def test__field__from_model__string(): - # given - model = Mock( - path="some/string", - type="string", - stringProperties=Mock(attributeType="string", attributeName="some/string", value="hello"), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/string" - assert isinstance(result, StringField) - assert result.value == "hello" - - -def test__field__from_proto__string(): - # given - proto = ProtoAttributeDTO( - name="some/string", - type="string", - string_properties=ProtoStringAttributeDTO( - attribute_name="some/string", - attribute_type="string", - value="hello", - ), - ) - - # when - result = Field.from_proto(proto) - - # then - assert result.path == "some/string" - assert isinstance(result, StringField) - assert result.value == "hello" - - -def test__field__from_dict__bool(): - # given - data = { - "path": "some/bool", - "type": "bool", - "boolProperties": {"attributeType": "bool", "attributeName": "some/bool", "value": True}, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/bool" - assert isinstance(result, BoolField) - assert result.value is True - - -def test__field__from_model__bool(): - # given - model = Mock( - path="some/bool", type="bool", boolProperties=Mock(attributeType="bool", attributeName="some/bool", value=True) - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/bool" - assert isinstance(result, BoolField) - assert result.value is True - - -def test__field__from_proto__bool(): - # given - proto = ProtoAttributeDTO( - name="some/bool", - type="bool", - bool_properties=ProtoBoolAttributeDTO( - attribute_name="some/bool", - attribute_type="bool", - value=True, - ), - ) - - # when - result = Field.from_proto(proto) - - # then - assert result.path == "some/bool" - assert isinstance(result, BoolField) - assert result.value is True - - -def test__field__from_dict__datetime(): - # given - data = { - "path": "some/datetime", - "type": "datetime", - "datetimeProperties": { - "attributeType": "datetime", - "attributeName": "some/datetime", - "value": "2024-01-01T00:12:34.567890Z", - }, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/datetime" - assert isinstance(result, DateTimeField) - assert result.value == datetime.datetime(2024, 1, 1, 0, 12, 34, 567890) - - -def test__field__from_model__datetime(): - # given - model = Mock( - path="some/datetime", - type="datetime", - datetimeProperties=Mock( - attributeType="datetime", attributeName="some/datetime", value="2024-01-01T00:12:34.567890Z" - ), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/datetime" - assert isinstance(result, DateTimeField) - assert result.value == datetime.datetime(2024, 1, 1, 0, 12, 34, 567890) - - -def test__field__from_proto__datetime(): - # given - at = datetime.datetime(2021, 1, 1, 0, 12, 34, 123000, tzinfo=datetime.timezone.utc) - - # and - proto = ProtoAttributeDTO( - name="some/datetime", - type="datetime", - datetime_properties=ProtoDatetimeAttributeDTO( - attribute_name="some/datetime", - attribute_type="datetime", - value=int(at.timestamp() * 1000), - ), - ) - - # when - result = Field.from_proto(proto) - - # then - assert result.path == "some/datetime" - assert isinstance(result, DateTimeField) - assert result.value == at - - -def test__field__from_dict__float_series(): - # given - data = { - "path": "some/floatSeries", - "type": "floatSeries", - "floatSeriesProperties": {"attributeType": "floatSeries", "attributeName": "some/floatSeries", "last": 19.5}, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/floatSeries" - assert isinstance(result, FloatSeriesField) - assert result.last == 19.5 - - -def test__field__from_model__float_series(): - # given - model = Mock( - path="some/floatSeries", - type="floatSeries", - floatSeriesProperties=Mock(attributeType="floatSeries", attributeName="some/floatSeries", last=19.5), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/floatSeries" - assert isinstance(result, FloatSeriesField) - assert result.last == 19.5 - - -def test__field__from_proto__float_series(): - # given - proto = ProtoAttributeDTO( - name="some/floatSeries", - type="floatSeries", - float_series_properties=ProtoFloatSeriesAttributeDTO( - attribute_name="some/floatSeries", - attribute_type="floatSeries", - last=19.5, - ), - ) - - # when - result = Field.from_proto(proto) - - # then - assert result.path == "some/floatSeries" - assert isinstance(result, FloatSeriesField) - assert result.last == 19.5 - - -def test__field__from_dict__string_series(): - # given - data = { - "path": "some/stringSeries", - "type": "stringSeries", - "stringSeriesProperties": { - "attributeType": "stringSeries", - "attributeName": "some/stringSeries", - "last": "hello", - }, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/stringSeries" - assert isinstance(result, StringSeriesField) - assert result.last == "hello" - - -def test__field__from_model__string_series(): - # given - model = Mock( - path="some/stringSeries", - type="stringSeries", - stringSeriesProperties=Mock(attributeType="stringSeries", attributeName="some/stringSeries", last="hello"), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/stringSeries" - assert isinstance(result, StringSeriesField) - assert result.last == "hello" - - -def test__field__from_proto__string_series(): - # given - proto = Mock(name="some/stringSeries", type="stringSeries", string_series_properties=Mock()) - - # when - with pytest.raises(NotImplementedError): - Field.from_proto(proto) - - -def test__field__from_dict__image_series(): - # given - data = { - "path": "some/imageSeries", - "type": "imageSeries", - "imageSeriesProperties": { - "attributeType": "imageSeries", - "attributeName": "some/imageSeries", - "lastStep": 15.0, - }, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/imageSeries" - assert isinstance(result, ImageSeriesField) - assert result.last_step == 15.0 - - -def test__field__from_model__image_series(): - # given - model = Mock( - path="some/imageSeries", - type="imageSeries", - imageSeriesProperties=Mock(attributeType="imageSeries", attributeName="some/imageSeries", lastStep=15.0), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/imageSeries" - assert isinstance(result, ImageSeriesField) - assert result.last_step == 15.0 - - -def test__field__from_proto__image_series(): - # given - proto = Mock(name="some/imageSeries", type="imageSeries", image_series_properties=Mock()) - - # when - with pytest.raises(NotImplementedError): - Field.from_proto(proto) - - -def test__field__from_dict__string_set(): - # given - data = { - "path": "some/stringSet", - "type": "stringSet", - "stringSetProperties": { - "attributeType": "stringSet", - "attributeName": "some/stringSet", - "values": ["hello", "world"], - }, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/stringSet" - assert isinstance(result, StringSetField) - assert result.values == {"hello", "world"} - - -def test__field__from_model__string_set(): - # given - model = Mock( - path="some/stringSet", - type="stringSet", - stringSetProperties=Mock(attributeType="stringSet", attributeName="some/stringSet", values=["hello", "world"]), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/stringSet" - assert isinstance(result, StringSetField) - assert result.values == {"hello", "world"} - - -def test__field__from_proto__string_set(): - # given - proto = ProtoAttributeDTO( - name="some/stringSet", - type="stringSet", - string_set_properties=ProtoStringSetAttributeDTO( - attribute_name="some/stringSet", - attribute_type="stringSet", - value=["hello", "world"], - ), - ) - - # when - result = Field.from_proto(proto) - - # then - assert result.path == "some/stringSet" - assert isinstance(result, StringSetField) - assert result.values == {"hello", "world"} - - -def test__field__from_dict__file(): - # given - data = { - "path": "some/file", - "type": "file", - "fileProperties": { - "attributeType": "file", - "attributeName": "some/file", - "name": "file.txt", - "size": 1024, - "ext": "txt", - }, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/file" - assert isinstance(result, FileField) - assert result.name == "file.txt" - assert result.size == 1024 - assert result.ext == "txt" - - -def test__field__from_model__file(): - # given - model = Mock( - path="some/file", - type="file", - fileProperties=Mock(attributeType="file", attributeName="some/file", size=1024, ext="txt"), - ) - model.fileProperties.name = "file.txt" - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/file" - assert isinstance(result, FileField) - assert result.name == "file.txt" - assert result.size == 1024 - assert result.ext == "txt" - - -def test__field__from_proto__file(): - # given - proto = Mock(name="some/file", type="file", file_properties=Mock()) - - # then - with pytest.raises(NotImplementedError): - FileField.from_proto(proto) - - -def test__field__from_dict__object_state(): - # given - data = { - "path": "sys/state", - "type": "experimentState", - "experimentStateProperties": { - "attributeType": "experimentState", - "attributeName": "sys/state", - "value": "running", - }, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "sys/state" - assert isinstance(result, ObjectStateField) - assert result.value == "Active" - - -def test__field__from_model__object_state(): - # given - model = Mock( - path="sys/state", - type="experimentState", - experimentStateProperties=Mock(attributeType="experimentState", attributeName="sys/state", value="running"), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "sys/state" - assert isinstance(result, ObjectStateField) - assert result.value == "Active" - - -def test__field__from_proto__object_state(): - # given - proto = Mock(name="sys/state", type="experimentState", experiment_state_properties=Mock()) - - # when - with pytest.raises(NotImplementedError): - Field.from_proto(proto) - - -def test__field__from_dict__file_set(): - # given - data = { - "path": "some/fileSet", - "type": "fileSet", - "fileSetProperties": {"attributeType": "fileSet", "attributeName": "some/fileSet", "size": 3072}, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/fileSet" - assert isinstance(result, FileSetField) - assert result.size == 3072 - - -def test__field__from_model__file_set(): - # given - model = Mock( - path="some/fileSet", - type="fileSet", - fileSetProperties=Mock(attributeType="fileSet", attributeName="some/fileSet", size=3072), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/fileSet" - assert isinstance(result, FileSetField) - assert result.size == 3072 - - -def test__field__from_proto__file_set(): - # given - proto = Mock(name="some/fileSet", type="fileSet", file_set_properties=Mock()) - - # then - with pytest.raises(NotImplementedError): - FileSetField.from_proto(proto) - - -def test__field__from_dict__notebook_ref(): - # given - data = { - "path": "some/notebookRef", - "type": "notebookRef", - "notebookRefProperties": { - "attributeType": "notebookRef", - "attributeName": "some/notebookRef", - "notebookName": "Data Processing.ipynb", - }, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/notebookRef" - assert isinstance(result, NotebookRefField) - assert result.notebook_name == "Data Processing.ipynb" - - -def test__field__from_model__notebook_ref(): - # given - model = Mock( - path="some/notebookRef", - type="notebookRef", - notebookRefProperties=Mock( - attributeType="notebookRef", attributeName="some/notebookRef", notebookName="Data Processing.ipynb" - ), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/notebookRef" - assert isinstance(result, NotebookRefField) - assert result.notebook_name == "Data Processing.ipynb" - - -def test__field__from_proto__notebook_ref(): - # given - proto = Mock(name="some/notebookRef", type="notebookRef", notebook_ref_properties=Mock()) - - # then - with pytest.raises(NotImplementedError): - NotebookRefField.from_proto(proto) - - -def test__field__from_dict__git_ref(): - # given - data = { - "path": "some/gitRef", - "type": "gitRef", - "gitRefProperties": { - "attributeType": "gitRef", - "attributeName": "some/gitRef", - "commit": {"commitId": "b2d7f8a"}, - }, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/gitRef" - assert isinstance(result, GitRefField) - assert result.commit.commit_id == "b2d7f8a" - - -def test__field__from_model__git_ref(): - # given - model = Mock( - path="some/gitRef", - type="gitRef", - gitRefProperties=Mock(attributeType="gitRef", attributeName="some/gitRef", commit=Mock(commitId="b2d7f8a")), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/gitRef" - assert isinstance(result, GitRefField) - assert result.commit.commit_id == "b2d7f8a" - - -def test__field__from_proto__git_ref(): - # given - proto = Mock(name="some/gitRef", type="gitRef", git_ref_properties=Mock()) - - # then - with pytest.raises(NotImplementedError): - GitRefField.from_proto(proto) - - -def test__field__from_dict__artifact(): - # given - data = { - "path": "some/artifact", - "type": "artifact", - "artifactProperties": { - "attributeType": "artifact", - "attributeName": "some/artifact", - "hash": "f192cddb2b98c0b4c72bba22b68d2245", - }, - } - - # when - result = Field.from_dict(data) - - # then - assert result.path == "some/artifact" - assert isinstance(result, ArtifactField) - assert result.hash == "f192cddb2b98c0b4c72bba22b68d2245" - - -def test__field__from_model__artifact(): - # given - model = Mock( - path="some/artifact", - type="artifact", - artifactProperties=Mock( - attributeType="artifact", attributeName="some/artifact", hash="f192cddb2b98c0b4c72bba22b68d2245" - ), - ) - - # when - result = Field.from_model(model) - - # then - assert result.path == "some/artifact" - assert isinstance(result, ArtifactField) - assert result.hash == "f192cddb2b98c0b4c72bba22b68d2245" - - -def test__field__from_proto__artifact(): - # given - proto = Mock(name="some/artifact", type="artifact", artifact_properties=Mock()) - - # then - with pytest.raises(NotImplementedError): - ArtifactField.from_proto(proto) - - -def test__field_definition__from_dict(): - # given - data = { - "name": "some/float", - "type": "float", - } - - # when - result = FieldDefinition.from_dict(data) - - # then - assert result.path == "some/float" - assert result.type == FieldType.FLOAT - - -def test__field_definition__from_model(): - # given - model = Mock( - type="float", - ) - model.name = "some/float" - - # when - result = FieldDefinition.from_model(model) - - # then - assert result.path == "some/float" - assert result.type == FieldType.FLOAT - - -def test__field_definition__from_proto(): - # given - proto = ProtoAttributeDTO( - name="some/float", - type="float", - ) - - # when - result = FieldDefinition.from_proto(proto) - - # then - assert result.path == "some/float" - assert result.type == FieldType.FLOAT - - -def test__leaderboard_entry__from_dict(): - # given - data = { - "experimentId": "some-id", - "attributes": [ - { - "path": "some/float", - "type": "float", - "floatProperties": {"attributeType": "float", "attributeName": "some/float", "value": 18.5}, - }, - { - "path": "some/int", - "type": "int", - "intProperties": {"attributeType": "int", "attributeName": "some/int", "value": 18}, - }, - { - "path": "some/string", - "type": "string", - "stringProperties": {"attributeType": "string", "attributeName": "some/string", "value": "hello"}, - }, - ], - } - - # when - result = LeaderboardEntry.from_dict(data) - - # then - assert result.object_id == "some-id" - assert len(result.fields) == 3 - - float_field = result.fields[0] - assert isinstance(float_field, FloatField) - assert float_field.path == "some/float" - assert float_field.value == 18.5 - - int_field = result.fields[1] - assert isinstance(int_field, IntField) - assert int_field.path == "some/int" - - string_field = result.fields[2] - assert isinstance(string_field, StringField) - assert string_field.path == "some/string" - - -def test__leaderboard_entry__from_model(): - # given - model = Mock( - experimentId="some-id", - attributes=[ - Mock( - path="some/float", - type="float", - floatProperties=Mock(attributeType="float", attributeName="some/float", value=18.5), - ), - Mock( - path="some/int", type="int", intProperties=Mock(attributeType="int", attributeName="some/int", value=18) - ), - Mock( - path="some/string", - type="string", - stringProperties=Mock(attributeType="string", attributeName="some/string", value="hello"), - ), - ], - ) - - # when - result = LeaderboardEntry.from_model(model) - - # then - assert result.object_id == "some-id" - assert len(result.fields) == 3 - - float_field = result.fields[0] - assert isinstance(float_field, FloatField) - assert float_field.path == "some/float" - assert float_field.value == 18.5 - - int_field = result.fields[1] - assert isinstance(int_field, IntField) - assert int_field.path == "some/int" - - string_field = result.fields[2] - assert isinstance(string_field, StringField) - assert string_field.path == "some/string" - - -def test__leaderboard_entry__from_proto(): - # given - proto = ProtoAttributesDTO( - experiment_id="some-id", - attributes=[ - ProtoAttributeDTO( - name="some/float", - type="float", - float_properties=ProtoFloatAttributeDTO( - attribute_name="some/float", - attribute_type="float", - value=18.5, - ), - ), - ProtoAttributeDTO( - name="some/int", - type="int", - int_properties=ProtoIntAttributeDTO( - attribute_name="some/int", - attribute_type="int", - value=18, - ), - ), - ProtoAttributeDTO( - name="some/string", - type="string", - string_properties=ProtoStringAttributeDTO( - attribute_name="some/string", - attribute_type="string", - value="hello", - ), - ), - ], - ) - - # when - result = LeaderboardEntry.from_proto(proto) - - # then - assert result.object_id == "some-id" - assert len(result.fields) == 3 - - float_field = result.fields[0] - assert isinstance(float_field, FloatField) - assert float_field.path == "some/float" - assert float_field.value == 18.5 - - int_field = result.fields[1] - assert isinstance(int_field, IntField) - assert int_field.path == "some/int" - - string_field = result.fields[2] - assert isinstance(string_field, StringField) - assert string_field.path == "some/string" - - -def test__leaderboard_entries_search_result__from_dict(): - # given - data = { - "matchingItemCount": 2, - "entries": [ - { - "experimentId": "some-id-1", - "attributes": [ - { - "path": "some/float", - "type": "float", - "floatProperties": {"attributeType": "float", "attributeName": "some/float", "value": 18.5}, - }, - ], - }, - { - "experimentId": "some-id-2", - "attributes": [ - { - "path": "some/int", - "type": "int", - "intProperties": {"attributeType": "int", "attributeName": "some/int", "value": 18}, - }, - ], - }, - ], - } - - # when - result = LeaderboardEntriesSearchResult.from_dict(data) - - # then - assert result.matching_item_count == 2 - assert len(result.entries) == 2 - - entry_1 = result.entries[0] - assert entry_1.object_id == "some-id-1" - assert len(entry_1.fields) == 1 - assert isinstance(entry_1.fields[0], FloatField) - - entry_2 = result.entries[1] - assert entry_2.object_id == "some-id-2" - assert len(entry_2.fields) == 1 - assert isinstance(entry_2.fields[0], IntField) - - -def test__leaderboard_entries_search_result__from_model(): - # given - model = Mock( - matchingItemCount=2, - entries=[ - Mock( - experimentId="some-id-1", - attributes=[ - Mock( - path="some/float", - type="float", - floatProperties=Mock(attributeType="float", attributeName="some/float", value=18.5), - ), - ], - ), - Mock( - experimentId="some-id-2", - attributes=[ - Mock( - path="some/int", - type="int", - intProperties=Mock(attributeType="int", attributeName="some/int", value=18), - ), - ], - ), - ], - ) - - # when - result = LeaderboardEntriesSearchResult.from_model(model) - - # then - assert result.matching_item_count == 2 - assert len(result.entries) == 2 - - entry_1 = result.entries[0] - assert entry_1.object_id == "some-id-1" - assert len(entry_1.fields) == 1 - assert isinstance(entry_1.fields[0], FloatField) - - entry_2 = result.entries[1] - assert entry_2.object_id == "some-id-2" - assert len(entry_2.fields) == 1 - assert isinstance(entry_2.fields[0], IntField) - - -def test__leaderboard_entries_search_result__from_proto(): - # given - proto = ProtoLeaderboardEntriesSearchResultDTO( - matching_item_count=2, - entries=[ - ProtoAttributesDTO( - experiment_id="some-id-1", - attributes=[ - ProtoAttributeDTO( - name="some/float", - type="float", - float_properties=ProtoFloatAttributeDTO( - attribute_name="some/float", - attribute_type="float", - value=18.5, - ), - ), - ], - ), - ProtoAttributesDTO( - experiment_id="some-id-2", - attributes=[ - ProtoAttributeDTO( - name="some/int", - type="int", - int_properties=ProtoIntAttributeDTO( - attribute_name="some/int", - attribute_type="int", - value=18, - ), - ), - ], - ), - ], - ) - - # when - result = LeaderboardEntriesSearchResult.from_proto(proto) - - # then - assert result.matching_item_count == 2 - assert len(result.entries) == 2 - - entry_1 = result.entries[0] - assert entry_1.object_id == "some-id-1" - assert len(entry_1.fields) == 1 - assert isinstance(entry_1.fields[0], FloatField) - - entry_2 = result.entries[1] - assert entry_2.object_id == "some-id-2" - assert len(entry_2.fields) == 1 - assert isinstance(entry_2.fields[0], IntField) - - -@pytest.mark.parametrize("field_type", list(FieldType)) -def test__all_field_types__have_class(field_type): - # when - field_class = Field.by_type(field_type) - - # then - assert field_class is not None - assert field_class.type == field_type - - -def test__file_entry__from_model(): - # given - now = datetime.datetime.now() - - # and - model = Mock( - size=100, - mtime=now, - fileType="file", - ) - model.name = "mock_name" - - entry = FileEntry.from_dto(model) - - assert entry.name == "mock_name" - assert entry.size == 100 - assert entry.mtime == now - assert entry.file_type == "file" - - -def test__float_series_values__from_dict(): - # given - data = { - "totalItemCount": 3, - "values": [ - {"step": 0.5, "value": 1.0, "timestampMillis": 1612345678901}, - {"step": 2.5, "value": 2.0, "timestampMillis": 1612345679922}, - {"step": 5.0, "value": 3.0, "timestampMillis": 1612345680963}, - ], - } - - # when - result = FloatSeriesValues.from_dict(data) - - # then - assert result.total == 3 - - assert result.values[0].step == 0.5 - assert result.values[0].value == 1.0 - assert result.values[0].timestamp == datetime.datetime(2021, 2, 3, 9, 47, 58, 901000, tzinfo=datetime.timezone.utc) - - assert result.values[1].step == 2.5 - assert result.values[1].value == 2.0 - assert result.values[1].timestamp == datetime.datetime(2021, 2, 3, 9, 47, 59, 922000, tzinfo=datetime.timezone.utc) - - assert result.values[2].step == 5.0 - assert result.values[2].value == 3.0 - assert result.values[2].timestamp == datetime.datetime(2021, 2, 3, 9, 48, 00, 963000, tzinfo=datetime.timezone.utc) - - -def test__float_series_values__from_model(): - # given - model = Mock( - totalItemCount=3, - values=[ - Mock(step=0.5, value=1.0, timestampMillis=1612345678901), - Mock(step=2.5, value=2.0, timestampMillis=1612345679922), - Mock(step=5.0, value=3.0, timestampMillis=1612345680963), - ], - ) - - # when - result = FloatSeriesValues.from_model(model) - - # then - assert result.total == 3 - - assert result.values[0].step == 0.5 - assert result.values[0].value == 1.0 - assert result.values[0].timestamp == datetime.datetime(2021, 2, 3, 9, 47, 58, 901000, tzinfo=datetime.timezone.utc) - - assert result.values[1].step == 2.5 - assert result.values[1].value == 2.0 - assert result.values[1].timestamp == datetime.datetime(2021, 2, 3, 9, 47, 59, 922000, tzinfo=datetime.timezone.utc) - - assert result.values[2].step == 5.0 - assert result.values[2].value == 3.0 - assert result.values[2].timestamp == datetime.datetime(2021, 2, 3, 9, 48, 00, 963000, tzinfo=datetime.timezone.utc) - - -def test__float_series_values__from_proto(): - # given - proto = ProtoFloatSeriesValuesDTO( - total_item_count=3, - values=[ - ProtoFloatPointValueDTO(step=0.5, value=1.0, timestamp_millis=1612345678901), - ProtoFloatPointValueDTO(step=2.5, value=2.0, timestamp_millis=1612345679922), - ProtoFloatPointValueDTO(step=5.0, value=3.0, timestamp_millis=1612345680963), - ], - ) - - # when - result = FloatSeriesValues.from_proto(proto) - - # then - assert result.total == 3 - - assert result.values[0].step == 0.5 - assert result.values[0].value == 1.0 - assert result.values[0].timestamp == datetime.datetime(2021, 2, 3, 9, 47, 58, 901000, tzinfo=datetime.timezone.utc) - - assert result.values[1].step == 2.5 - assert result.values[1].value == 2.0 - assert result.values[1].timestamp == datetime.datetime(2021, 2, 3, 9, 47, 59, 922000, tzinfo=datetime.timezone.utc) - - assert result.values[2].step == 5.0 - assert result.values[2].value == 3.0 - assert result.values[2].timestamp == datetime.datetime(2021, 2, 3, 9, 48, 00, 963000, tzinfo=datetime.timezone.utc) - - -def test__string_series_values__from_dict(): - # given - data = { - "totalItemCount": 3, - "values": [ - {"step": 0.5, "value": "hello", "timestampMillis": 1612345678901}, - {"step": 2.5, "value": "world", "timestampMillis": 1612345679922}, - {"step": 5.0, "value": "!", "timestampMillis": 1612345680963}, - ], - } - - # when - result = StringSeriesValues.from_dict(data) - - # then - assert result.total == 3 - - assert result.values[0].step == 0.5 - assert result.values[0].value == "hello" - assert result.values[0].timestamp == datetime.datetime(2021, 2, 3, 9, 47, 58, 901000, tzinfo=datetime.timezone.utc) - - assert result.values[1].step == 2.5 - assert result.values[1].value == "world" - assert result.values[1].timestamp == datetime.datetime(2021, 2, 3, 9, 47, 59, 922000, tzinfo=datetime.timezone.utc) - - assert result.values[2].step == 5.0 - assert result.values[2].value == "!" - assert result.values[2].timestamp == datetime.datetime(2021, 2, 3, 9, 48, 00, 963000, tzinfo=datetime.timezone.utc) - - -def test__string_series_values__from_model(): - # given - model = Mock( - totalItemCount=3, - values=[ - Mock(step=0.5, value="hello", timestampMillis=1612345678901), - Mock(step=2.5, value="world", timestampMillis=1612345679922), - Mock(step=5.0, value="!", timestampMillis=1612345680963), - ], - ) - - # when - result = StringSeriesValues.from_model(model) - - # then - assert result.total == 3 - - assert result.values[0].step == 0.5 - assert result.values[0].value == "hello" - assert result.values[0].timestamp == datetime.datetime(2021, 2, 3, 9, 47, 58, 901000, tzinfo=datetime.timezone.utc) - - assert result.values[1].step == 2.5 - assert result.values[1].value == "world" - assert result.values[1].timestamp == datetime.datetime(2021, 2, 3, 9, 47, 59, 922000, tzinfo=datetime.timezone.utc) - - assert result.values[2].step == 5.0 - assert result.values[2].value == "!" - assert result.values[2].timestamp == datetime.datetime(2021, 2, 3, 9, 48, 00, 963000, tzinfo=datetime.timezone.utc) - - -def test__string_series_values__from_proto(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - StringSeriesValues.from_proto(proto) - - -def test__image_series_values__from_dict(): - # given - data = { - "totalItemCount": 3, - } - - # when - result = ImageSeriesValues.from_dict(data) - - # then - assert result.total == 3 - - -def test__image_series_values__from_model(): - # given - model = Mock( - totalItemCount=3, - ) - - # when - result = ImageSeriesValues.from_model(model) - - # then - assert result.total == 3 - - -def test__image_series_values__from_proto(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - ImageSeriesValues.from_proto(proto) - - -def test__next_page__from_dict(): - # given - data = { - "nextPageToken": "some-token", - "limit": 10, - } - - # when - result = NextPage.from_dict(data) - - # then - assert result.next_page_token == "some-token" - assert result.limit == 10 - - -def test__next_page__from_model(): - # given - model = Mock( - nextPageToken="some-token", - limit=10, - ) - - # when - result = NextPage.from_model(model) - - # then - assert result.next_page_token == "some-token" - assert result.limit == 10 - - -def test__next_page__from_proto(): - # given - proto = ProtoNextPageDTO(nextPageToken="some-token", limit=10) - - # when - result = NextPage.from_model(proto) - - # then - assert result.next_page_token == "some-token" - assert result.limit == 10 - - -def test__query_field_definitions_result__from_dict(): - # given - data = { - "entries": [ - { - "name": "some/float", - "type": "float", - }, - { - "name": "some/int", - "type": "int", - }, - { - "name": "some/string", - "type": "string", - }, - ], - "nextPage": { - "nextPageToken": "some-token", - "limit": 10, - }, - } - - # when - result = QueryFieldDefinitionsResult.from_dict(data) - - # then - assert len(result.entries) == 3 - - assert result.entries[0].path == "some/float" - assert result.entries[0].type == FieldType.FLOAT - - assert result.entries[1].path == "some/int" - assert result.entries[1].type == FieldType.INT - - assert result.entries[2].path == "some/string" - assert result.entries[2].type == FieldType.STRING - - assert result.next_page.next_page_token == "some-token" - assert result.next_page.limit == 10 - - -def test__query_field_definitions_result__from_model(): - # given - model = Mock( - entries=[ - Mock(type="float"), - Mock(type="int"), - Mock(type="string"), - ], - nextPage=Mock(nextPageToken="some-token", limit=10), - ) - model.entries[0].name = "some/float" - model.entries[1].name = "some/int" - model.entries[2].name = "some/string" - - # when - result = QueryFieldDefinitionsResult.from_model(model) - - # then - assert len(result.entries) == 3 - - assert result.entries[0].path == "some/float" - assert result.entries[0].type == FieldType.FLOAT - - assert result.entries[1].path == "some/int" - assert result.entries[1].type == FieldType.INT - - assert result.entries[2].path == "some/string" - assert result.entries[2].type == FieldType.STRING - - assert result.next_page.next_page_token == "some-token" - assert result.next_page.limit == 10 - - -def test__query_field_definitions_result__from_proto(): - # given - proto = Mock() - - # then - with pytest.raises(NotImplementedError): - QueryFieldDefinitionsResult.from_proto(proto) - - -def test__query_fields_experiment_result__from_dict(): - # given - data = { - "experimentId": "some-id-1", - "experimentShortId": "some-key-1", - "attributes": [ - { - "path": "some/float", - "type": "float", - "floatProperties": {"attributeType": "float", "attributeName": "some/float", "value": 18.5}, - }, - { - "path": "some/int", - "type": "int", - "intProperties": {"attributeType": "int", "attributeName": "some/int", "value": 18}, - }, - ], - } - - # when - result = QueryFieldsExperimentResult.from_dict(data) - - # then - assert result.object_id == "some-id-1" - assert result.object_key == "some-key-1" - - assert len(result.fields) == 2 - - field_1 = result.fields[0] - assert field_1.path == "some/float" - assert field_1.type == FieldType.FLOAT - assert isinstance(field_1, FloatField) - assert field_1.value == 18.5 - - field_2 = result.fields[1] - assert field_2.path == "some/int" - assert field_2.type == FieldType.INT - assert isinstance(field_2, IntField) - assert field_2.value == 18 - - -def test__query_fields_experiment_result__from_model(): - # given - model = Mock( - experimentId="some-id-1", - experimentShortId="some-key-1", - attributes=[ - Mock( - path="some/float", - type="float", - floatProperties=Mock(attributeType="float", attributeName="some/float", value=18.5), - ), - Mock( - path="some/int", - type="int", - intProperties=Mock(attributeType="int", attributeName="some/int", value=18), - ), - ], - ) - - # when - result = QueryFieldsExperimentResult.from_model(model) - - # then - assert result.object_id == "some-id-1" - assert result.object_key == "some-key-1" - - assert len(result.fields) == 2 - - field_1 = result.fields[0] - assert field_1.path == "some/float" - assert field_1.type == FieldType.FLOAT - assert isinstance(field_1, FloatField) - assert field_1.value == 18.5 - - field_2 = result.fields[1] - assert field_2.path == "some/int" - assert field_2.type == FieldType.INT - assert isinstance(field_2, IntField) - assert field_2.value == 18 - - -def test__query_fields_experiment_result__from_proto(): - # given - proto = ProtoQueryAttributesExperimentResultDTO( - experimentId="some-id-1", - experimentShortId="some-key-1", - attributes=[ - ProtoAttributeDTO( - name="some/float", - type="float", - float_properties=ProtoFloatAttributeDTO( - attribute_name="some/float", - attribute_type="float", - value=18.5, - ), - ), - ProtoAttributeDTO( - name="some/int", - type="int", - int_properties=ProtoIntAttributeDTO( - attribute_name="some/int", - attribute_type="int", - value=18, - ), - ), - ], - ) - - # when - result = QueryFieldsExperimentResult.from_proto(proto) - - # then - assert result.object_id == "some-id-1" - assert result.object_key == "some-key-1" - - assert len(result.fields) == 2 - - field_1 = result.fields[0] - assert field_1.path == "some/float" - assert field_1.type == FieldType.FLOAT - assert isinstance(field_1, FloatField) - assert field_1.value == 18.5 - - field_2 = result.fields[1] - assert field_2.path == "some/int" - assert field_2.type == FieldType.INT - assert isinstance(field_2, IntField) - assert field_2.value == 18 - - -def test__query_fields_result__from_dict(): - # given - data = { - "entries": [ - { - "experimentId": "some-id-1", - "experimentShortId": "some-key-1", - "attributes": [ - { - "path": "some/float", - "type": "float", - "floatProperties": {"attributeType": "float", "attributeName": "some/float", "value": 18.5}, - }, - { - "path": "some/int", - "type": "int", - "intProperties": {"attributeType": "int", "attributeName": "some/int", "value": 18}, - }, - ], - }, - { - "experimentId": "some-id-2", - "experimentShortId": "some-key-2", - "attributes": [ - { - "path": "some/string", - "type": "string", - "stringProperties": { - "attributeType": "string", - "attributeName": "some/string", - "value": "hello", - }, - }, - ], - }, - ], - "nextPage": { - "nextPageToken": "some-token", - "limit": 2, - }, - } - - # when - result = QueryFieldsResult.from_dict(data) - - # then - assert len(result.entries) == 2 - - entry_1 = result.entries[0] - assert entry_1.object_id == "some-id-1" - assert entry_1.object_key == "some-key-1" - assert len(entry_1.fields) == 2 - - field_1_1 = entry_1.fields[0] - assert field_1_1.path == "some/float" - assert field_1_1.type == FieldType.FLOAT - assert isinstance(field_1_1, FloatField) - assert field_1_1.value == 18.5 - - field_1_2 = entry_1.fields[1] - assert field_1_2.path == "some/int" - assert field_1_2.type == FieldType.INT - assert isinstance(field_1_2, IntField) - assert field_1_2.value == 18 - - entry_2 = result.entries[1] - assert entry_2.object_id == "some-id-2" - assert entry_2.object_key == "some-key-2" - assert len(entry_2.fields) == 1 - - field_2_1 = entry_2.fields[0] - assert field_2_1.path == "some/string" - assert field_2_1.type == FieldType.STRING - assert isinstance(field_2_1, StringField) - assert field_2_1.value == "hello" - - assert result.next_page.next_page_token == "some-token" - assert result.next_page.limit == 2 - - -def test__query_fields_result__from_model(): - # given - model = Mock( - entries=[ - Mock( - experimentId="some-id-1", - experimentShortId="some-key-1", - attributes=[ - Mock( - path="some/float", - type="float", - floatProperties=Mock(attributeType="float", attributeName="some/float", value=18.5), - ), - Mock( - path="some/int", - type="int", - intProperties=Mock(attributeType="int", attributeName="some/int", value=18), - ), - ], - ), - Mock( - experimentId="some-id-2", - experimentShortId="some-key-2", - attributes=[ - Mock( - path="some/string", - type="string", - stringProperties=Mock(attributeType="string", attributeName="some/string", value="hello"), - ), - ], - ), - ], - nextPage=Mock(nextPageToken="some-token", limit=2), - ) - - # when - result = QueryFieldsResult.from_model(model) - - # then - assert len(result.entries) == 2 - - entry_1 = result.entries[0] - assert entry_1.object_id == "some-id-1" - assert entry_1.object_key == "some-key-1" - assert len(entry_1.fields) == 2 - - field_1_1 = entry_1.fields[0] - assert field_1_1.path == "some/float" - assert field_1_1.type == FieldType.FLOAT - assert isinstance(field_1_1, FloatField) - assert field_1_1.value == 18.5 - - field_1_2 = entry_1.fields[1] - assert field_1_2.path == "some/int" - assert field_1_2.type == FieldType.INT - assert isinstance(field_1_2, IntField) - assert field_1_2.value == 18 - - entry_2 = result.entries[1] - assert entry_2.object_id == "some-id-2" - assert entry_2.object_key == "some-key-2" - assert len(entry_2.fields) == 1 - - field_2_1 = entry_2.fields[0] - assert field_2_1.path == "some/string" - assert field_2_1.type == FieldType.STRING - assert isinstance(field_2_1, StringField) - assert field_2_1.value == "hello" - - assert result.next_page.next_page_token == "some-token" - assert result.next_page.limit == 2 - - -def test__query_fields_result__from_proto(): - # given - proto = ProtoQueryAttributesResultDTO( - entries=[ - ProtoQueryAttributesExperimentResultDTO( - experimentId="some-id-1", - experimentShortId="some-key-1", - attributes=[ - ProtoAttributeDTO( - name="some/float", - type="float", - float_properties=ProtoFloatAttributeDTO( - attribute_name="some/float", - attribute_type="float", - value=18.5, - ), - ), - ProtoAttributeDTO( - name="some/int", - type="int", - int_properties=ProtoIntAttributeDTO( - attribute_name="some/int", - attribute_type="int", - value=18, - ), - ), - ], - ), - ProtoQueryAttributesExperimentResultDTO( - experimentId="some-id-2", - experimentShortId="some-key-2", - attributes=[ - ProtoAttributeDTO( - name="some/string", - type="string", - string_properties=ProtoStringAttributeDTO( - attribute_name="some/string", - attribute_type="string", - value="hello", - ), - ), - ], - ), - ], - nextPage=ProtoNextPageDTO(nextPageToken="some-token", limit=2), - ) - - # when - result = QueryFieldsResult.from_proto(proto) - - # then - assert len(result.entries) == 2 - - entry_1 = result.entries[0] - assert entry_1.object_id == "some-id-1" - assert entry_1.object_key == "some-key-1" - assert len(entry_1.fields) == 2 - - field_1_1 = entry_1.fields[0] - assert field_1_1.path == "some/float" - assert field_1_1.type == FieldType.FLOAT - assert isinstance(field_1_1, FloatField) - assert field_1_1.value == 18.5 - - field_1_2 = entry_1.fields[1] - assert field_1_2.path == "some/int" - assert field_1_2.type == FieldType.INT - assert isinstance(field_1_2, IntField) - assert field_1_2.value == 18 - - entry_2 = result.entries[1] - assert entry_2.object_id == "some-id-2" - assert entry_2.object_key == "some-key-2" - assert len(entry_2.fields) == 1 - - field_2_1 = entry_2.fields[0] - assert field_2_1.path == "some/string" - assert field_2_1.type == FieldType.STRING - assert isinstance(field_2_1, StringField) - assert field_2_1.value == "hello" - - assert result.next_page.next_page_token == "some-token" - assert result.next_page.limit == 2 diff --git a/tests/unit/neptune/new/api/test_pagination.py b/tests/unit/neptune/new/api/test_pagination.py deleted file mode 100644 index b7b35a0be..000000000 --- a/tests/unit/neptune/new/api/test_pagination.py +++ /dev/null @@ -1,152 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - List, -) - -from mock import ( - Mock, - call, -) - -from neptune.api.models import NextPage -from neptune.api.pagination import paginate_over - - -def extract_entries_empty(data: Any) -> List[int]: - return [] - - -def extract_entries(data: Any) -> List[int]: - return [1, 2, 3] - - -def test__empty(): - # given - getter = Mock(side_effect=[Mock(next_page=None)]) - - # when - entries = list(paginate_over(getter=getter, extract_entries=extract_entries_empty)) - - # then - assert entries == [] - - -def test__single_page(): - # given - getter = Mock(side_effect=[Mock(next_page=None)]) - - # when - entries = list(paginate_over(getter=getter, extract_entries=extract_entries)) - - # then - assert entries == [1, 2, 3] - - -def test__multiple_pages(): - # given - getter = Mock( - side_effect=[ - Mock(next_page=NextPage(next_page_token="aa", limit=None)), - Mock(next_page=NextPage(next_page_token="bb", limit=None)), - Mock(next_page=None), - ] - ) - - # when - entries = list(paginate_over(getter=getter, extract_entries=extract_entries)) - - # then - assert entries == [1, 2, 3, 1, 2, 3, 1, 2, 3] - - assert getter.call_count == 3 - assert getter.call_args_list == [ - call(next_page=NextPage(next_page_token=None, limit=50)), - call(next_page=NextPage(next_page_token="aa", limit=50)), - call(next_page=NextPage(next_page_token="bb", limit=50)), - ] - - -def test__kwargs_passed(): - # given - getter = Mock( - side_effect=[ - Mock(next_page=NextPage(next_page_token="aa", limit=None)), - Mock(next_page=NextPage(next_page_token="bb", limit=None)), - Mock(next_page=None), - ] - ) - - # when - entries = list(paginate_over(getter=getter, extract_entries=extract_entries, a=1, b=2)) - - # then - assert entries == [1, 2, 3, 1, 2, 3, 1, 2, 3] - - assert getter.call_count == 3 - assert getter.call_args_list == [ - call(a=1, b=2, next_page=NextPage(next_page_token=None, limit=50)), - call(a=1, b=2, next_page=NextPage(next_page_token="aa", limit=50)), - call(a=1, b=2, next_page=NextPage(next_page_token="bb", limit=50)), - ] - - -def test__page_size(): - # given - getter = Mock( - side_effect=[ - Mock(next_page=NextPage(next_page_token="aa", limit=None)), - Mock(next_page=NextPage(next_page_token="bb", limit=None)), - Mock(next_page=None), - ] - ) - - # when - entries = list(paginate_over(getter=getter, extract_entries=extract_entries, page_size=10, a=1, b=2)) - - # then - assert entries == [1, 2, 3, 1, 2, 3, 1, 2, 3] - - assert getter.call_count == 3 - assert getter.call_args_list == [ - call(a=1, b=2, next_page=NextPage(next_page_token=None, limit=10)), - call(a=1, b=2, next_page=NextPage(next_page_token="aa", limit=10)), - call(a=1, b=2, next_page=NextPage(next_page_token="bb", limit=10)), - ] - - -def test__limit(): - # given - getter = Mock( - side_effect=[ - Mock(next_page=NextPage(next_page_token="aa", limit=None)), - Mock(next_page=NextPage(next_page_token="bb", limit=None)), - Mock(next_page=None), - ] - ) - - # when - entries = list(paginate_over(getter=getter, extract_entries=extract_entries, page_size=3, limit=5, a=1, b=2)) - - # then - assert entries == [1, 2, 3, 1, 2] - - assert getter.call_count == 2 - assert getter.call_args_list == [ - call(a=1, b=2, next_page=NextPage(next_page_token=None, limit=3)), - call(a=1, b=2, next_page=NextPage(next_page_token="aa", limit=2)), - ] diff --git a/tests/unit/neptune/new/api/test_requests_utils.py b/tests/unit/neptune/new/api/test_requests_utils.py deleted file mode 100644 index 7d7c33612..000000000 --- a/tests/unit/neptune/new/api/test_requests_utils.py +++ /dev/null @@ -1,58 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from bravado.requests_client import RequestsResponseAdapter -from requests import Response - -from neptune.api.requests_utils import ensure_json_response - - -class TestResponse(Response): - def __init__(self, content: bytes) -> None: - super().__init__() - self._content = content - - -def test_ensure_json_body__if_empty(): - # given - empty_server_response = RequestsResponseAdapter(TestResponse(content=b"")) - - # when - body = ensure_json_response(empty_server_response) - - # then - assert body == {} - - -def test_ensure_json_body__invalid(): - # given - empty_server_response = RequestsResponseAdapter(TestResponse(content=b"deadbeef")) - - # when - body = ensure_json_response(empty_server_response) - - # then - assert body == {} - - -def test_ensure_json_body__standard(): - # given - empty_server_response = RequestsResponseAdapter(TestResponse(content='{"key": "value"}'.encode("utf-8"))) - - # when - body = ensure_json_response(empty_server_response) - - # then - assert body == {"key": "value"} diff --git a/tests/unit/neptune/new/api/test_searching_entries.py b/tests/unit/neptune/new/api/test_searching_entries.py deleted file mode 100644 index 0fe95bb60..000000000 --- a/tests/unit/neptune/new/api/test_searching_entries.py +++ /dev/null @@ -1,284 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Sequence - -import pytest -from bravado.exception import HTTPBadRequest -from mock import ( - Mock, - call, - patch, -) - -from neptune.api.models import ( - FloatField, - LeaderboardEntriesSearchResult, - LeaderboardEntry, - StringField, -) -from neptune.api.searching_entries import ( - get_single_page, - iter_over_pages, -) -from neptune.exceptions import NeptuneInvalidQueryException -from neptune.internal.backends.nql import RawNQLQuery -from neptune.internal.id_formats import UniqueId - - -def test__to_leaderboard_entry(): - # given - entry = { - "experimentId": "foo", - "attributes": [ - { - "name": "plugh", - "type": "float", - "floatProperties": { - "attributeName": "plugh", - "attributeType": "float", - "value": 1.0, - }, - }, - { - "name": "sys/id", - "type": "string", - "stringProperties": { - "attributeName": "sys/id", - "attributeType": "string", - "value": "TEST-123", - }, - }, - ], - } - - # when - result = LeaderboardEntry.from_dict(entry) - - # then - assert result.object_id == "foo" - assert result.fields == [ - FloatField(path="plugh", value=1.0), - StringField(path="sys/id", value="TEST-123"), - ] - - -@patch("neptune.api.searching_entries.get_single_page") -def test__iter_over_pages__single_pagination(get_single_page_mock): - # given - get_single_page_mock.side_effect = [ - LeaderboardEntriesSearchResult(matching_item_count=9, entries=[]), - generate_leaderboard_entries(values=["a", "b", "c"]), - generate_leaderboard_entries(values=["d", "e", "f"]), - generate_leaderboard_entries(values=["g", "h", "j"]), - generate_leaderboard_entries(values=[]), - ] - - # when - result = list( - iter_over_pages( - step_size=3, - limit=None, - sort_by="sys/id", - sort_by_column_type="string", - ascending=False, - progress_bar=None, - ) - ) - - # then - assert result == generate_leaderboard_entries(values=["a", "b", "c", "d", "e", "f", "g", "h", "j"]).entries - assert get_single_page_mock.mock_calls == [ - # total checking - call(limit=0, offset=0, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(limit=3, offset=0, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(limit=3, offset=3, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(limit=3, offset=6, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(limit=3, offset=9, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - ] - - -@patch("neptune.api.searching_entries.get_single_page") -def test__iter_over_pages__multiple_search_after(get_single_page_mock): - # given - get_single_page_mock.side_effect = [ - LeaderboardEntriesSearchResult(matching_item_count=9, entries=[]), - generate_leaderboard_entries(values=["a", "b", "c"]), - generate_leaderboard_entries(values=["d", "e", "f"]), - generate_leaderboard_entries(values=["g", "h", "j"]), - generate_leaderboard_entries(values=[]), - ] - - # when - result = list( - iter_over_pages( - step_size=3, - limit=None, - sort_by="sys/id", - sort_by_column_type="string", - ascending=False, - progress_bar=None, - max_offset=6, - ) - ) - - # then - assert result == generate_leaderboard_entries(values=["a", "b", "c", "d", "e", "f", "g", "h", "j"]).entries - assert get_single_page_mock.mock_calls == [ - # total checking - call(limit=0, offset=0, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(limit=3, offset=0, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(limit=3, offset=3, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(limit=3, offset=0, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after="f"), - call(limit=3, offset=3, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after="f"), - ] - - -@patch("neptune.api.searching_entries.get_single_page") -def test__iter_over_pages__empty(get_single_page_mock): - # given - get_single_page_mock.side_effect = [ - LeaderboardEntriesSearchResult(matching_item_count=0, entries=[]), - generate_leaderboard_entries(values=[]), - ] - - # when - result = list( - iter_over_pages( - step_size=3, - limit=None, - sort_by="sys/id", - sort_by_column_type="string", - ascending=False, - progress_bar=None, - ) - ) - - # then - assert result == [] - assert get_single_page_mock.mock_calls == [ - # total checking - call(limit=0, offset=0, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(limit=3, offset=0, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - ] - - -@patch("neptune.api.searching_entries.get_single_page") -def test__iter_over_pages__max_server_offset(get_single_page_mock): - # given - get_single_page_mock.side_effect = [ - LeaderboardEntriesSearchResult(matching_item_count=5, entries=[]), - generate_leaderboard_entries(values=["a", "b", "c"]), - generate_leaderboard_entries(values=["d", "e"]), - generate_leaderboard_entries(values=[]), - ] - - # when - result = list( - iter_over_pages( - step_size=3, - limit=None, - sort_by="sys/id", - sort_by_column_type="string", - ascending=False, - progress_bar=None, - max_offset=5, - ) - ) - - # then - assert result == generate_leaderboard_entries(values=["a", "b", "c", "d", "e"]).entries - assert get_single_page_mock.mock_calls == [ - # total checking - call(limit=0, offset=0, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(offset=0, limit=3, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(offset=3, limit=2, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(offset=0, limit=3, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after="e"), - ] - - -@patch("neptune.api.searching_entries.get_single_page") -def test__iter_over_pages__limit(get_single_page_mock): - # since the limiting itself takes place in an external service, we can't test the results - # we can only test if the limit is properly passed to the external service call - - # given - get_single_page_mock.side_effect = [ - LeaderboardEntriesSearchResult(matching_item_count=5, entries=[]), - generate_leaderboard_entries(values=["a", "b"]), - generate_leaderboard_entries(values=["c", "d"]), - generate_leaderboard_entries(values=["e"]), - generate_leaderboard_entries(values=[]), - ] - - # when - list( - iter_over_pages( - step_size=2, - limit=4, - sort_by="sys/id", - sort_by_column_type="string", - ascending=False, - progress_bar=None, - ) - ) - - # then - assert get_single_page_mock.mock_calls == [ - # total checking - call(limit=0, offset=0, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(offset=0, limit=2, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - call(offset=2, limit=2, sort_by="sys/id", ascending=False, sort_by_column_type="string", searching_after=None), - ] - - -def generate_leaderboard_entries(values: Sequence, experiment_id: str = "foo") -> LeaderboardEntriesSearchResult: - return LeaderboardEntriesSearchResult( - matching_item_count=len(values), - entries=[ - LeaderboardEntry( - object_id=f"{experiment_id}-{value}", - fields=[ - StringField(path="sys/id", value=value), - ], - ) - for value in values - ], - ) - - -def test_get_single_page_error_handling(): - # given - bravado_exception = HTTPBadRequest(response=Mock()) - bravado_exception.response.json.return_value = {"title": "Syntax error"} - - failing_client = Mock() - failing_client.api.searchLeaderboardEntries.side_effect = bravado_exception - - # then - with pytest.raises(NeptuneInvalidQueryException): - get_single_page( - project_id=UniqueId("id"), - attributes_filter={}, - types=None, - query=RawNQLQuery("invalid_query"), - limit=0, - offset=0, - sort_by="sys/id", - ascending=False, - sort_by_column_type="string", - searching_after=None, - client=failing_client, - ) diff --git a/tests/unit/neptune/new/attributes/__init__.py b/tests/unit/neptune/new/attributes/__init__.py deleted file mode 100644 index 63b30720b..000000000 --- a/tests/unit/neptune/new/attributes/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/attributes/atoms/__init__.py b/tests/unit/neptune/new/attributes/atoms/__init__.py deleted file mode 100644 index 63b30720b..000000000 --- a/tests/unit/neptune/new/attributes/atoms/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/attributes/atoms/test_artifact.py b/tests/unit/neptune/new/attributes/atoms/test_artifact.py deleted file mode 100644 index 4623f5870..000000000 --- a/tests/unit/neptune/new/attributes/atoms/test_artifact.py +++ /dev/null @@ -1,187 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import pathlib -import tempfile -import uuid -from unittest.mock import Mock - -import pytest -from _pytest.monkeypatch import MonkeyPatch -from mock import ( - MagicMock, - call, - patch, -) - -from neptune import Run -from neptune.attributes.atoms.artifact import Artifact -from neptune.exceptions import ( - NeptuneUnhandledArtifactTypeException, - NeptuneUnsupportedFunctionalityException, -) -from neptune.internal.artifacts.types import ( - ArtifactDriver, - ArtifactDriversMap, - ArtifactFileData, -) -from neptune.internal.container_type import ContainerType -from neptune.internal.operation import TrackFilesToArtifact -from neptune.internal.utils.paths import path_to_str -from neptune.types.atoms.artifact import Artifact as ArtifactAttr -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - - -class TestArtifact(TestAttributeBase): - @patch("neptune.objects.neptune_object.get_operation_processor") - def setUp(self, get_operation_processor): - self.monkeypatch = MonkeyPatch() - - self.wait = self._random_wait() - self.op_processor = MagicMock() - get_operation_processor.return_value = self.op_processor - self.exp = Run( - mode="debug", - capture_stdout=False, - capture_stderr=False, - capture_traceback=False, - capture_hardware_metrics=False, - ) - self.path = self._random_path() - self.path_str = path_to_str(self.path) - - self.artifact_hash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" - self.artifact_files = [ - ArtifactFileData( - file_path="fname.txt", - file_hash="e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - type="test", - size=213, - metadata={}, - ), - ArtifactFileData( - file_path="subdir/other.mp3", - file_hash="e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - type="test", - metadata={}, - ), - ] - - self.exp.set_attribute(self.path_str, Artifact(self.exp, self.path)) - self.exp._backend._containers[(self.exp._id, ContainerType.RUN)].set( - self.path, ArtifactAttr(self.artifact_hash) - ) - self.exp._backend._artifacts[self.exp._project_id, self.artifact_hash] = self.artifact_files - - self._downloads = set() - - class TestArtifactDriver(ArtifactDriver): - @classmethod - def get_type(cls): - return "test" - - @classmethod - def matches(cls, path: str) -> bool: - return False - - @classmethod - def get_tracked_files(cls, path, destination=None): - return [] - - @classmethod - def download_file(cls, destination: pathlib.Path, file_definition: ArtifactFileData): - destination.touch() - - self.test_artifact_driver = TestArtifactDriver - - def tearDown(self): - self.exp.stop() - self.monkeypatch.undo() - - @pytest.mark.xfail(reason="Artifact methods disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_fetch_hash(self): - fetched_hash = self.exp[self.path_str].fetch_hash() - self.assertEqual(self.artifact_hash, fetched_hash) - - @pytest.mark.xfail(reason="Artifact methods disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_fetch_files_list(self): - fetched_hash = self.exp[self.path_str].fetch_files_list() - self.assertEqual(self.artifact_files, fetched_hash) - - @pytest.mark.xfail( - reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException - ) - def test_download(self): - self.monkeypatch.setattr( - ArtifactDriversMap, - "match_type", - Mock(return_value=self.test_artifact_driver), - ) - - with tempfile.TemporaryDirectory() as temporary: - self.exp[self.path_str].download(temporary) - temporary_path = pathlib.Path(temporary) - - self.assertTrue((temporary_path / "fname.txt").exists()) - self.assertTrue((temporary_path / "subdir" / "other.mp3").exists()) - - @pytest.mark.xfail( - reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException - ) - def test_download_unknown_type(self): - self.monkeypatch.setattr( - ArtifactDriversMap, - "match_type", - Mock(side_effect=NeptuneUnhandledArtifactTypeException("test")), - ) - - with tempfile.TemporaryDirectory() as temporary: - with self.assertRaises(NeptuneUnhandledArtifactTypeException): - self.exp[self.path_str].download(temporary) - - temporary_path = pathlib.Path(temporary) - contents = list(temporary_path.iterdir()) - self.assertListEqual(contents, []) - - def test_track_files_to_artifact(self): - source_location = str(uuid.uuid4()) - destination = str(uuid.uuid4()) - source_location2 = str(uuid.uuid4()) - destination2 = str(uuid.uuid4()) - - var = Artifact(self.exp, self.path) - var.track_files(path=source_location, destination=destination, wait=self.wait) - var.track_files(path=source_location2, destination=destination2, wait=self.wait) - - self.op_processor.enqueue_operation.assert_has_calls( - [ - call( - TrackFilesToArtifact( - self.path, - self.exp._project_id, - [(source_location, destination)], - ), - wait=self.wait, - ), - call( - TrackFilesToArtifact( - self.path, - self.exp._project_id, - [(source_location2, destination2)], - ), - wait=self.wait, - ), - ] - ) diff --git a/tests/unit/neptune/new/attributes/atoms/test_artifact_hash.py b/tests/unit/neptune/new/attributes/atoms/test_artifact_hash.py deleted file mode 100644 index 0328d7236..000000000 --- a/tests/unit/neptune/new/attributes/atoms/test_artifact_hash.py +++ /dev/null @@ -1,59 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from mock import MagicMock - -from neptune.attributes.atoms.artifact import Artifact -from neptune.internal.operation import AssignArtifact -from neptune.types.atoms.artifact import Artifact as ArtifactVal -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - - -class TestArtifactHash(TestAttributeBase): - def test_assign_type_error(self): - values = ["foo", 10, None] - for value in values: - with self.assertRaises(Exception): - Artifact(MagicMock(), MagicMock()).assign(value) - - def test_fetch(self): - with self._exp() as exp: - var = Artifact(exp, self._random_path()) - var._enqueue_operation( - AssignArtifact( - var._path, - ArtifactVal("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855").hash, - ), - wait=False, - ) - self.assertEqual( - ArtifactVal("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"), - var.fetch(), - ) - - def test_fetch_hash(self): - with self._exp() as exp: - var = Artifact(exp, self._random_path()) - var._enqueue_operation( - AssignArtifact( - var._path, - ArtifactVal("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855").hash, - ), - wait=False, - ) - self.assertEqual( - "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - var.fetch_hash(), - ) diff --git a/tests/unit/neptune/new/attributes/atoms/test_datetime.py b/tests/unit/neptune/new/attributes/atoms/test_datetime.py deleted file mode 100644 index 6c4d8eeba..000000000 --- a/tests/unit/neptune/new/attributes/atoms/test_datetime.py +++ /dev/null @@ -1,68 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from datetime import datetime - -from mock import ( - MagicMock, - patch, -) - -from neptune.attributes.atoms.datetime import ( - Datetime, - DatetimeVal, -) -from neptune.internal.operation import AssignDatetime -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - - -class TestDatetime(TestAttributeBase): - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_assign(self, get_operation_processor): - now = datetime.now() - value_and_expected = [ - (now, now.replace(microsecond=1000 * int(now.microsecond / 1000))), - ( - DatetimeVal(now), - now.replace(microsecond=1000 * int(now.microsecond / 1000)), - ), - ] - - for value, expected in value_and_expected: - processor = MagicMock() - get_operation_processor.return_value = processor - path, wait = ( - self._random_path(), - self._random_wait(), - ) - with self._exp() as exp: - var = Datetime(exp, path) - var.assign(value, wait=wait) - processor.enqueue_operation.assert_called_with(AssignDatetime(path, expected), wait=wait) - - def test_assign_type_error(self): - values = [55, None] - for value in values: - with self.assertRaises(TypeError): - Datetime(MagicMock(), MagicMock()).assign(value) - - def test_get(self): - with self._exp() as exp: - var = Datetime(exp, self._random_path()) - now = datetime.now() - now = now.replace(microsecond=int(now.microsecond / 1000) * 1000) - var.assign(now) - self.assertEqual(now, var.fetch()) diff --git a/tests/unit/neptune/new/attributes/atoms/test_file.py b/tests/unit/neptune/new/attributes/atoms/test_file.py deleted file mode 100644 index d0684580c..000000000 --- a/tests/unit/neptune/new/attributes/atoms/test_file.py +++ /dev/null @@ -1,212 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import unittest -from io import ( - BytesIO, - StringIO, -) -from pathlib import Path -from unittest.mock import PropertyMock - -import pytest -from mock import ( - MagicMock, - patch, -) - -from neptune.attributes.atoms.file import ( - File, - FileVal, -) -from neptune.attributes.file_set import ( - FileSet, - FileSetVal, -) -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.operation import ( - UploadFile, - UploadFileSet, -) -from neptune.internal.types.file_types import FileType -from neptune.internal.utils.utils import IS_WINDOWS -from tests.e2e.utils import tmp_context -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - - -class TestFile(TestAttributeBase): - @pytest.mark.xfail( - reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException - ) - @unittest.skipIf(IS_WINDOWS, "Windows behaves strangely") - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_assign(self, get_operation_processor): - def get_tmp_uploaded_file_name(tmp_upload_dir): - """Get tmp file to uploaded from `upload_path` - - here's assumption that we upload only one file per one path in test""" - uploaded_files = os.listdir(tmp_upload_dir) - assert len(uploaded_files) == 1 - return uploaded_files[0] - - a_text = "Some text stream" - a_binary = b"Some binary stream" - value_and_operation_factory = [ - ( - FileVal("other/../other/file.txt"), - lambda attribute_path, _: UploadFile( - attribute_path, ext="txt", file_path=os.getcwd() + "/other/file.txt" - ), - ), - ( - FileVal.from_stream(StringIO(a_text)), - lambda attribute_path, tmp_uploaded_file: UploadFile( - attribute_path, ext="txt", tmp_file_name=tmp_uploaded_file - ), - ), - ( - FileVal.from_stream(BytesIO(a_binary)), - lambda attribute_path, tmp_uploaded_file: UploadFile( - attribute_path, ext="bin", tmp_file_name=tmp_uploaded_file - ), - ), - ] - - for value, operation_factory in value_and_operation_factory: - with tmp_context() as tmp_upload_dir: - processor = MagicMock() - processor.operation_storage = PropertyMock(upload_path=Path(tmp_upload_dir)) - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = File(exp, path) - var.assign(value, wait=wait) - - if value.file_type is not FileType.LOCAL_FILE: - tmp_uploaded_file = get_tmp_uploaded_file_name(tmp_upload_dir) - self.assertTrue(os.path.exists(tmp_uploaded_file)) - else: - tmp_uploaded_file = None - - processor.enqueue_operation.assert_called_with( - operation_factory(path, tmp_uploaded_file), wait=wait - ) - - @pytest.mark.xfail( - reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException - ) - def test_assign_type_error(self): - values = [55, None, []] - for value in values: - with self.assertRaises(TypeError): - File(MagicMock(), MagicMock()).assign(value) - - @pytest.mark.xfail( - reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException - ) - @unittest.skipIf(IS_WINDOWS, "Windows behaves strangely") - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_save(self, get_operation_processor): - value_and_expected = [("some/path", os.getcwd() + "/some/path")] - - for value, expected in value_and_expected: - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = File(exp, path) - var.upload(value, wait=wait) - processor.enqueue_operation.assert_called_with( - UploadFile(path=path, ext="", file_path=expected), wait=wait - ) - - @pytest.mark.xfail( - reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException - ) - @unittest.skipIf(IS_WINDOWS, "Windows behaves strangely") - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_save_files(self, get_operation_processor): - value_and_expected = [("some/path/*", [os.getcwd() + "/some/path/*"])] - - for value, expected in value_and_expected: - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = FileSet(exp, path) - var.upload_files(value, wait=wait) - processor.enqueue_operation.assert_called_with(UploadFileSet(path, expected, False), wait=wait) - - @pytest.mark.xfail( - reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException - ) - def test_save_type_error(self): - values = [55, None, [], FileVal] - for value in values: - with self.assertRaises(TypeError): - File(MagicMock(), MagicMock()).upload(value) - - @pytest.mark.xfail( - reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException - ) - def test_save__files_type_error(self): - values = [55, None, [55], FileSetVal] - for value in values: - with self.assertRaises(TypeError): - FileSet(MagicMock(), MagicMock()).upload_files(value) - - @pytest.mark.xfail( - reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException - ) - def test_fetch_extension(self): - value_and_expected_ext = [ - (FileVal("some/file.txt"), "txt"), - (FileVal("some/file"), ""), - (FileVal.from_content("Some text stream"), "txt"), - (FileVal.from_content(b"Some binary stream"), "bin"), - (FileVal.from_content(b"Some binary stream", extension="png"), "png"), - ] - - for value, expected_ext in value_and_expected_ext: - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = File(exp, path) - var.assign(value, wait=wait) - self.assertEqual(expected_ext, var.fetch_extension()) - - def test_clean_files_on_close(self): - with self._exp() as run: - data_path = run._op_processor.data_path - - assert os.path.exists(data_path) - - run.stop() - - assert not os.path.exists(data_path) # exec folder diff --git a/tests/unit/neptune/new/attributes/atoms/test_float.py b/tests/unit/neptune/new/attributes/atoms/test_float.py deleted file mode 100644 index be0338502..000000000 --- a/tests/unit/neptune/new/attributes/atoms/test_float.py +++ /dev/null @@ -1,84 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import pytest -from mock import ( - MagicMock, - patch, -) - -from neptune import init_run -from neptune.attributes.atoms.float import ( - Float, - FloatVal, -) -from neptune.exceptions import MetadataInconsistency -from neptune.internal.operation import AssignFloat -from neptune.internal.warnings import NeptuneUnsupportedValue -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - - -class TestFloat(TestAttributeBase): - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_assign(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - value_and_expected = [ - (13, 13), - (15.3, 15.3), - (FloatVal(17), 17), - (FloatVal(17.5), 17.5), - ] - - for value, expected in value_and_expected: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - with self._exp() as run: - var = Float(run, path) - var.assign(value, wait=wait) - processor.enqueue_operation.assert_called_with(AssignFloat(path, expected), wait=wait) - - def test_assign_type_error(self): - values = ["string", None] - for value in values: - with self.assertRaises(Exception): - Float(MagicMock(), MagicMock()).assign(value) - - def test_get(self): - with self._exp() as run: - var = Float(run, self._random_path()) - var.assign(5) - self.assertEqual(5, var.fetch()) - - def test_float_warnings(self): - run = init_run(mode="debug") - with pytest.warns(NeptuneUnsupportedValue): - run["infinity"] = float("inf") - run["neg-infinity"] = float("-inf") - run["nan"] = float("nan") - - with pytest.raises(MetadataInconsistency): - run["infinity"].fetch() - - with pytest.raises(MetadataInconsistency): - run["neg-infinity"].fetch() - - with pytest.raises(MetadataInconsistency): - run["nan"].fetch() - - run.stop() diff --git a/tests/unit/neptune/new/attributes/atoms/test_string.py b/tests/unit/neptune/new/attributes/atoms/test_string.py deleted file mode 100644 index 89a9855cb..000000000 --- a/tests/unit/neptune/new/attributes/atoms/test_string.py +++ /dev/null @@ -1,54 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from mock import ( - MagicMock, - patch, -) - -from neptune.attributes.atoms.string import ( - String, - StringVal, -) -from neptune.internal.operation import AssignString -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - - -class TestString(TestAttributeBase): - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_assign(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - value_and_expected = [ - ("qwertyu", "qwertyu"), - (StringVal("Some string"), "Some string"), - ] - - for value, expected in value_and_expected: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - with self._exp() as exp: - var = String(exp, path) - var.assign(value, wait=wait) - processor.enqueue_operation.assert_called_with(AssignString(path, expected), wait=wait) - - def test_get(self): - with self._exp() as exp: - var = String(exp, self._random_path()) - var.assign("adfh") - self.assertEqual("adfh", var.fetch()) diff --git a/tests/unit/neptune/new/attributes/series/__init__.py b/tests/unit/neptune/new/attributes/series/__init__.py deleted file mode 100644 index 63b30720b..000000000 --- a/tests/unit/neptune/new/attributes/series/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/attributes/series/test_file_series.py b/tests/unit/neptune/new/attributes/series/test_file_series.py deleted file mode 100644 index e257804df..000000000 --- a/tests/unit/neptune/new/attributes/series/test_file_series.py +++ /dev/null @@ -1,252 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -from importlib.util import find_spec -from unittest import mock - -import numpy -import pytest -from mock import ( - MagicMock, - call, - patch, -) - -from neptune.attributes.series.file_series import FileSeries -from neptune.exceptions import OperationNotSupported -from neptune.internal.operation import ( - ClearImageLog, - ImageValue, - LogImages, -) -from neptune.internal.utils import base64_encode -from neptune.types import File -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase -from tests.unit.neptune.new.utils.file_helpers import create_file - - -@patch("time.time", new=TestAttributeBase._now) -class TestFileSeries(TestAttributeBase): - def test_assign_type_error(self): - values = [[5.0], ["text"], 55, "string", None] - for value in values: - with self.assertRaises(Exception): - FileSeries(MagicMock(), MagicMock()).assign(value) - - def test_log_type_error(self): - values = [[5.0], [[]], 55, None] - for value in values: - with self.assertRaises(TypeError): - FileSeries(MagicMock(), MagicMock()).log(value) - - @pytest.mark.skipif(condition=find_spec("PIL") is None, reason="PIL not installed") - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_log_content(self, get_operation_processor): - # given - wait = self._random_wait() - path = self._random_path() - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - attr = FileSeries(exp, path) - - file = File.as_image(numpy.random.rand(10, 10) * 255) - - # when - attr.log( - file, - step=3, - timestamp=self._now(), - wait=wait, - name="nazwa", - description="opis", - ) - - # then - processor.enqueue_operation.assert_called_with( - LogImages( - path, - [ - LogImages.ValueType( - ImageValue(base64_encode(file.content), "nazwa", "opis"), - 3, - self._now(), - ) - ], - ), - wait=wait, - ) - - @pytest.mark.skipif(condition=find_spec("PIL") is None, reason="PIL not installed") - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_assign_content(self, get_operation_processor): - # given - wait = self._random_wait() - path = self._random_path() - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - attr = FileSeries(exp, path) - - file = File.as_image(numpy.random.rand(10, 10) * 255) - - # when - attr.assign([file], wait=wait) - - # then - processor.enqueue_operation.assert_has_calls( - [ - call(ClearImageLog(path), wait=False), - call( - LogImages( - path, - [ - LogImages.ValueType( - ImageValue(base64_encode(file.content), None, None), - None, - self._now(), - ) - ], - ), - wait=wait, - ), - ] - ) - - @pytest.mark.skipif(condition=find_spec("PIL") is None, reason="PIL not installed") - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_log_path(self, get_operation_processor): - # given - wait = self._random_wait() - path = self._random_path() - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - attr = FileSeries(exp, path) - - file = File.as_image(numpy.random.rand(10, 10) * 255) - stream = File.from_stream(io.BytesIO(file.content)) - with create_file(file.content, binary_mode=True) as tmp_filename: - saved_file = File(tmp_filename) - - # when - attr.log( - file, - step=3, - timestamp=self._now(), - wait=wait, - description="something", - ) - attr.log( - [stream, saved_file], - timestamp=self._now(), - wait=wait, - description="something", - ) - - # then - def generate_expected_call(wait, step): - log_operation = LogImages( - path=path, - values=[ - LogImages.ValueType( - value=ImageValue(base64_encode(file.content), None, "something"), - step=step, - ts=self._now(), - ) - ], - ) - return call( - log_operation, - wait=wait, - ) - - processor.enqueue_operation.assert_has_calls( - [ - generate_expected_call(wait, step=3), - generate_expected_call(wait, step=None), - generate_expected_call(wait, step=None), - ] - ) - - @pytest.mark.skipif(condition=find_spec("PIL") is None, reason="PIL not installed") - def test_log_raise_not_image(self): - # given - path = self._random_path() - - with self._exp() as exp: - attr = FileSeries(exp, path) - - file = File.from_content("some text") - stream = File.from_stream(io.BytesIO(file.content)) - with create_file(file.content, binary_mode=True) as tmp_filename: - saved_file = File(tmp_filename) - - # when - with self.assertRaises(OperationNotSupported): - attr.log(file) - with self.assertRaises(OperationNotSupported): - attr.log(saved_file) - with self.assertRaises(OperationNotSupported): - attr.log(stream) - - @pytest.mark.skipif(condition=find_spec("PIL") is None, reason="PIL not installed") - def test_assign_raise_not_image(self): - # given - path = self._random_path() - - with self._exp() as exp: - attr = FileSeries(exp, path) - - file = File.from_content("some text") - stream = File.from_stream(io.BytesIO(file.content)) - with create_file(file.content, binary_mode=True) as tmp_filename: - saved_file = File(tmp_filename) - - # when - with self.assertRaises(OperationNotSupported): - attr.assign([file]) - with self.assertRaises(OperationNotSupported): - attr.assign([saved_file]) - with self.assertRaises(OperationNotSupported): - attr.assign([stream]) - - @pytest.mark.skipif(condition=find_spec("PIL") is None, reason="PIL not installed") - @mock.patch("neptune.internal.utils.limits._LOGGED_IMAGE_SIZE_LIMIT_MB", (10**-3)) - def test_image_limit(self): - """Test if we prohibit logging images greater than mocked 1KB limit size""" - # given - path = self._random_path() - - with self._exp() as exp: - attr = FileSeries(exp, path) - - file = File.as_image(numpy.random.rand(100, 100) * 255) - with create_file(file.content, binary_mode=True) as tmp_filename: - saved_file = File(tmp_filename) - - # when - with pytest.warns( - expected_warning=UserWarning, match=".* Neptune supports logging images smaller than .*" - ): - attr.assign([file]) - with pytest.warns( - expected_warning=UserWarning, match=".* Neptune supports logging images smaller than .*" - ): - attr.assign([saved_file]) diff --git a/tests/unit/neptune/new/attributes/series/test_float_series.py b/tests/unit/neptune/new/attributes/series/test_float_series.py deleted file mode 100644 index 7633a23a0..000000000 --- a/tests/unit/neptune/new/attributes/series/test_float_series.py +++ /dev/null @@ -1,87 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import pytest -from mock import ( - MagicMock, - patch, -) - -from neptune import init_run -from neptune.attributes.series.float_series import FloatSeries -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.warnings import NeptuneUnsupportedValue -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - - -@patch("time.time", new=TestAttributeBase._now) -class TestFloatSeries(TestAttributeBase): - def test_assign_type_error(self): - values = [["text"], 55, "string", None] - for value in values: - with self.assertRaises(Exception): - FloatSeries(MagicMock(), MagicMock()).assign(value) - - def test_log_type_error(self): - values = [["text"], [[]], [5, ""], "string", None] - for value in values: - with self.assertRaises(Exception): - FloatSeries(MagicMock(), MagicMock()).log(value) - - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_get(self): - with self._exp() as exp: - var = FloatSeries(exp, self._random_path()) - var.log(5) - var.log(34) - self.assertEqual(34, var.fetch_last()) - - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_log(self): - with self._exp() as exp: - var = FloatSeries(exp, self._random_path()) - var.log([val for val in range(0, 5000)]) - self.assertEqual(4999, var.fetch_last()) - values = list(var.fetch_values()["value"].array) - expected = list(range(0, 5000)) - self.assertEqual(len(set(expected)), len(set(values))) - - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_float_warnings(self): - run = init_run(mode="debug") - - with pytest.warns(NeptuneUnsupportedValue): - run["train"].append({"supported_1": 1, "supported_2": 2}) - run["train"].append({"unsupported": float("nan"), "supported_3": float(3)}) - run["train"].append({"nef_infinity": float("-inf")}) - run["train"].append({"infinity": float("inf")}) - - assert run["train/supported_1"].fetch_last() == 1 - assert run["train/supported_2"].fetch_last() == 2 - assert run["train/supported_3"].fetch_last() == 3 - - run.stop() - - def test_multiple_values_to_same_namespace(self): - run = init_run(mode="debug") - - run["multiple"].extend([1.5, 2.3, str(float("nan")), 4.7]) - result = run["multiple"].fetch_values() - - assert result["value"][0] == 1.5 - assert result["value"][1] == 2.3 - assert result["value"][2] == 4.7 - - run.stop() diff --git a/tests/unit/neptune/new/attributes/series/test_series.py b/tests/unit/neptune/new/attributes/series/test_series.py deleted file mode 100644 index ae54edf02..000000000 --- a/tests/unit/neptune/new/attributes/series/test_series.py +++ /dev/null @@ -1,196 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from mock import ( - MagicMock, - call, - patch, -) - -from neptune.attributes.series.float_series import ( - FloatSeries, - FloatSeriesVal, -) -from neptune.attributes.series.string_series import ( - StringSeries, - StringSeriesVal, -) -from neptune.internal.operation import ( - ClearFloatLog, - ClearStringLog, - ConfigFloatSeries, - LogFloats, -) -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - - -@patch("time.time", new=TestAttributeBase._now) -class TestSeries(TestAttributeBase): - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_assign(self, get_operation_processor): - value = FloatSeriesVal([17, 3.6], min=0, max=100, unit="%") - expected = [ - LogFloats.ValueType(17, None, self._now()), - LogFloats.ValueType(3.6, None, self._now()), - ] - - processor = MagicMock() - get_operation_processor.return_value = processor - path, wait = ( - self._random_path(), - self._random_wait(), - ) - with self._exp() as exp: - var = FloatSeries(exp, path) - var.assign(value, wait=wait) - processor.enqueue_operation.assert_has_calls( - [ - call(ConfigFloatSeries(path, min=0, max=100, unit="%"), wait=False), - call(ClearFloatLog(path), wait=False), - call(LogFloats(path, expected), wait=wait), - ] - ) - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_assign_empty(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = StringSeries(exp, path) - var.assign(StringSeriesVal([]), wait=wait) - processor.enqueue_operation.assert_called_with(ClearStringLog(path), wait=wait) - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_log(self, get_operation_processor): - value_and_expected = [ - (13, [LogFloats.ValueType(13, None, self._now())]), - (15.3, [LogFloats.ValueType(15.3, None, self._now())]), - ( - [1, 9, 7], - [ - LogFloats.ValueType(1, None, self._now()), - LogFloats.ValueType(9, None, self._now()), - LogFloats.ValueType(7, None, self._now()), - ], - ), - ( - (1, 9, 7), - [ - LogFloats.ValueType(1, None, self._now()), - LogFloats.ValueType(9, None, self._now()), - LogFloats.ValueType(7, None, self._now()), - ], - ), - ( - {1, 9, 7}, - [ - LogFloats.ValueType(1, None, self._now()), - LogFloats.ValueType(9, None, self._now()), - LogFloats.ValueType(7, None, self._now()), - ], - ), - ] - - for value, expected in value_and_expected: - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = FloatSeries(exp, path) - var.log(value, wait=wait) - processor.enqueue_operation.assert_called_with(LogFloats(path, expected), wait=wait) - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_log_with_step(self, get_operation_processor): - value_step_and_expected = [ - (13, 5.3, LogFloats.ValueType(13, 5.3, self._now())), - (15.3, 10, LogFloats.ValueType(15.3, 10, self._now())), - ([13], 5.3, LogFloats.ValueType(13, 5.3, self._now())), - ((13,), 5.3, LogFloats.ValueType(13, 5.3, self._now())), - ({13}, 5.3, LogFloats.ValueType(13, 5.3, self._now())), - ] - - for value, step, expected in value_step_and_expected: - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = FloatSeries(exp, path) - var.log(value, step=step, wait=wait) - processor.enqueue_operation.assert_called_with(LogFloats(path, [expected]), wait=wait) - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_log_with_timestamp(self, get_operation_processor): - value_step_and_expected = [ - (13, 5.3, LogFloats.ValueType(13, None, 5.3)), - (15.3, 10, LogFloats.ValueType(15.3, None, 10)), - ] - - for value, ts, expected in value_step_and_expected: - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = FloatSeries(exp, path) - var.log(value, timestamp=ts, wait=wait) - processor.enqueue_operation.assert_called_with(LogFloats(path, [expected]), wait=wait) - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_log_value_errors(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - attr = FloatSeries(exp, self._random_path()) - - with self.assertRaises(ValueError): - attr.log(["str", 5]) - with self.assertRaises(ValueError): - attr.log([5, 10], step=10) - with self.assertRaises(TypeError): - attr.log(5, step="str") - with self.assertRaises(TypeError): - attr.log(5, timestamp="str") - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_clear(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = FloatSeries(exp, path) - var.clear(wait=wait) - processor.enqueue_operation.assert_called_with(ClearFloatLog(path), wait=wait) diff --git a/tests/unit/neptune/new/attributes/series/test_string_series.py b/tests/unit/neptune/new/attributes/series/test_string_series.py deleted file mode 100644 index 015d5eaea..000000000 --- a/tests/unit/neptune/new/attributes/series/test_string_series.py +++ /dev/null @@ -1,51 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import pytest -from mock import ( - MagicMock, - patch, -) - -from neptune.attributes.series.string_series import StringSeries -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - - -@patch("time.time", new=TestAttributeBase._now) -class TestStringSeries(TestAttributeBase): - def test_assign_type_error(self): - values = [55, "string", None] - for value in values: - with self.assertRaises(Exception): - StringSeries(MagicMock(), MagicMock()).assign(value) - - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_get(self): - with self._exp() as exp: - var = StringSeries(exp, self._random_path()) - var.log("asdfhadh") - var.log("hej!") - self.assertEqual("hej!", var.fetch_last()) - - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_log(self): - with self._exp() as exp: - var = StringSeries(exp, self._random_path()) - var.log([str(val) for val in range(0, 5000)]) - self.assertEqual("4999", var.fetch_last()) - values = list(var.fetch_values()["value"].array) - expected = list(range(0, 5000)) - self.assertEqual(len(set(expected)), len(set(values))) diff --git a/tests/unit/neptune/new/attributes/sets/__init__.py b/tests/unit/neptune/new/attributes/sets/__init__.py deleted file mode 100644 index 63b30720b..000000000 --- a/tests/unit/neptune/new/attributes/sets/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/attributes/sets/test_file_set.py b/tests/unit/neptune/new/attributes/sets/test_file_set.py deleted file mode 100644 index ca8fcff20..000000000 --- a/tests/unit/neptune/new/attributes/sets/test_file_set.py +++ /dev/null @@ -1,95 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os - -import pytest -from mock import ( - MagicMock, - patch, -) - -from neptune.attributes.file_set import FileSet -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.operation import ( - DeleteFiles, - UploadFileSet, -) -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - - -@pytest.mark.xfail(reason="File functionality disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) -class TestFileSet(TestAttributeBase): - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_assign(self, get_operation_processor): - globs = ["path1", "dir/", "glob/*.py"] - expected = [os.path.abspath(glob) for glob in globs] - - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = FileSet(exp, path) - var.assign(globs, wait=wait) - processor.enqueue_operation.assert_called_with(UploadFileSet(path, expected, reset=True), wait=wait) - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_upload_files(self, get_operation_processor): - globs = ["path1", "dir/", "glob/*.py"] - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = FileSet(exp, path) - var.upload_files(globs, wait=wait) - processor.enqueue_operation.assert_called_with( - UploadFileSet(path, [os.path.abspath(glob) for glob in globs], reset=False), - wait=wait, - ) - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_delete_files(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = FileSet(exp, path) - var.delete_files(["path1", "dir/"], wait=wait) - processor.enqueue_operation.assert_called_with(DeleteFiles(path, {"path1", "dir/"}), wait=wait) - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_list_fileset_files(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path = self._random_path() - var = FileSet(exp, path) - file_entries = var.list_fileset_files() - - assert isinstance(file_entries, list) - assert len(file_entries) == 1 diff --git a/tests/unit/neptune/new/attributes/sets/test_string_set.py b/tests/unit/neptune/new/attributes/sets/test_string_set.py deleted file mode 100644 index 761fd1d40..000000000 --- a/tests/unit/neptune/new/attributes/sets/test_string_set.py +++ /dev/null @@ -1,156 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import pytest -from mock import ( - MagicMock, - call, - patch, -) - -from neptune.attributes.sets.string_set import ( - StringSet, - StringSetVal, -) -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.operation import ( - AddStrings, - ClearStringSet, - RemoveStrings, -) -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - - -class TestStringSet(TestAttributeBase): - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_assign(self, get_operation_processor): - value = StringSetVal(["ert", "qwe"]) - expected = {"ert", "qwe"} - - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = StringSet(exp, path) - var.assign(value, wait=wait) - processor.enqueue_operation.assert_has_calls( - [call(ClearStringSet(path), wait=False), call(AddStrings(path, expected), wait=wait)] - ) - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_assign_empty(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = StringSet(exp, path) - var.assign(StringSetVal([]), wait=wait) - processor.enqueue_operation.assert_called_with(ClearStringSet(path), wait=wait) - - def test_assign_type_error(self): - values = [{5.0}, {"text"}, {}, [5.0], ["text"], [], 55, "string", None] - for value in values: - with self.assertRaises(TypeError): - StringSet(MagicMock(), MagicMock()).assign(value) - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_add(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = StringSet(exp, path) - var.add(["a", "bb", "ccc"], wait=wait) - processor.enqueue_operation.assert_called_with(AddStrings(path, {"a", "bb", "ccc"}), wait=wait) - - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_add_single_value(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = StringSet(exp, path) - var.add("ccc", wait=wait) - processor.enqueue_operation.assert_called_with(AddStrings(path, {"ccc"}), wait=wait) - - @pytest.mark.xfail(reason="StringSet remove disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_remove(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = StringSet(exp, path) - var.remove(["a", "bb", "ccc"], wait=wait) - processor.enqueue_operation.assert_called_with(RemoveStrings(path, {"a", "bb", "ccc"}), wait=wait) - - @pytest.mark.xfail(reason="StringSet remove disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_remove_single_value(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = StringSet(exp, path) - var.remove("bb", wait=wait) - processor.enqueue_operation.assert_called_with(RemoveStrings(path, {"bb"}), wait=wait) - - @pytest.mark.xfail(reason="StringSet clear disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - @patch("neptune.objects.neptune_object.get_operation_processor") - def test_clear(self, get_operation_processor): - processor = MagicMock() - get_operation_processor.return_value = processor - - with self._exp() as exp: - path, wait = ( - self._random_path(), - self._random_wait(), - ) - var = StringSet(exp, path) - var.clear(wait=wait) - processor.enqueue_operation.assert_called_with(ClearStringSet(path), wait=wait) - - @pytest.mark.xfail(reason="StringSet remove disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_get(self): - with self._exp() as exp: - var = StringSet(exp, self._random_path()) - var.add(["abc", "xyz"]) - var.remove(["abc"]) - var.add(["hej", "lol"]) - self.assertEqual({"xyz", "hej", "lol"}, var.fetch()) diff --git a/tests/unit/neptune/new/attributes/test_attribute_base.py b/tests/unit/neptune/new/attributes/test_attribute_base.py deleted file mode 100644 index 2e8574b35..000000000 --- a/tests/unit/neptune/new/attributes/test_attribute_base.py +++ /dev/null @@ -1,50 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import random -import time -import unittest -import uuid -from contextlib import contextmanager - -from neptune import Run - -_now = time.time() - - -class TestAttributeBase(unittest.TestCase): - @staticmethod - @contextmanager - def _exp(): - with Run( - mode="debug", - capture_stderr=False, - capture_traceback=False, - capture_stdout=False, - capture_hardware_metrics=False, - ) as exp: - yield exp - - @staticmethod - def _random_path(): - return ["some", "random", "path", str(uuid.uuid4())] - - @staticmethod - def _random_wait(): - return bool(random.getrandbits(1)) - - @staticmethod - def _now(): - return _now diff --git a/tests/unit/neptune/new/attributes/test_attribute_utils.py b/tests/unit/neptune/new/attributes/test_attribute_utils.py deleted file mode 100644 index 5839101e2..000000000 --- a/tests/unit/neptune/new/attributes/test_attribute_utils.py +++ /dev/null @@ -1,32 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import unittest -from unittest.mock import MagicMock - -from neptune.api.models import FieldType -from neptune.attributes import create_attribute_from_type -from neptune.attributes.attribute import Attribute - - -class TestAttributeUtils(unittest.TestCase): - def test_attribute_type_to_atom(self): - # Expect all AttributeTypes are reflected in `attribute_type_to_atom`... - # ... and this reflection is class based on `Attribute` - self.assertTrue( - all( - isinstance(create_attribute_from_type(attr_type, MagicMock(), ""), Attribute) for attr_type in FieldType - ) - ) diff --git a/tests/unit/neptune/new/cli/__init__.py b/tests/unit/neptune/new/cli/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/tests/unit/neptune/new/cli/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/cli/test_clear.py b/tests/unit/neptune/new/cli/test_clear.py deleted file mode 100644 index c772892f5..000000000 --- a/tests/unit/neptune/new/cli/test_clear.py +++ /dev/null @@ -1,245 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -from unittest.mock import MagicMock - -import pytest - -from neptune.cli.clear import ClearRunner -from neptune.cli.utils import get_qualified_name -from neptune.constants import ( - ASYNC_DIRECTORY, - OFFLINE_DIRECTORY, - SYNC_DIRECTORY, -) -from neptune.internal.container_type import ContainerType -from neptune.internal.operation import Operation -from tests.unit.neptune.new.cli.utils import ( - generate_get_metadata_container, - prepare_v1_container, - prepare_v2_container, -) - -AVAILABLE_CONTAINERS = [ContainerType.RUN, ContainerType.MODEL_VERSION, ContainerType.MODEL, ContainerType.PROJECT] - - -@pytest.fixture(name="backend") -def backend_fixture(): - return MagicMock() - - -@pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) -def test_clean_v2_containers(tmp_path, mocker, capsys, backend, container_type): - # given - unsynced_container = prepare_v2_container( - container_type=container_type, path=tmp_path, last_ack_version=1, pid=1234, key="a1b2c3" - ) - synced_container = prepare_v2_container( - container_type=container_type, path=tmp_path, last_ack_version=3, pid=1235, key="d4e5f6" - ) - offline_containers = prepare_v2_container( - container_type=container_type, path=tmp_path, last_ack_version=None, pid=1236, key="g7h8j9" - ) - - # and - get_container_impl = generate_get_metadata_container(registered_containers=(unsynced_container, synced_container)) - - # and - mocker.patch.object(backend, "get_metadata_container", get_container_impl) - mocker.patch.object(Operation, "from_dict") - - assert os.path.exists( - tmp_path / ASYNC_DIRECTORY / f"{container_type.create_dir_name(unsynced_container.id)}__1234__a1b2c3" - ) - assert os.path.exists( - tmp_path / ASYNC_DIRECTORY / f"{container_type.create_dir_name(synced_container.id)}__1235__d4e5f6" - ) - assert os.path.exists( - tmp_path / OFFLINE_DIRECTORY / f"{container_type.create_dir_name(offline_containers.id)}__1236__g7h8j9" - ) - - # when - ClearRunner.clear(backend=backend, path=tmp_path, force=True) - - # then - assert not os.path.exists( - tmp_path / ASYNC_DIRECTORY / f"{container_type.create_dir_name(unsynced_container.id)}__1234__a1b2c3" - ) - assert not os.path.exists( - tmp_path / ASYNC_DIRECTORY / f"{container_type.create_dir_name(synced_container.id)}__1235__d4e5f6" - ) - assert not os.path.exists( - tmp_path / OFFLINE_DIRECTORY / f"{container_type.create_dir_name(offline_containers.id)}__1236__g7h8j9" - ) - - # and - offline_container_prefix = container_type.create_dir_name(offline_containers.id) - unsynced_container_prefix = container_type.create_dir_name(unsynced_container.id) - synced_container_prefix = container_type.create_dir_name(synced_container.id) - - # and - captured = capsys.readouterr() - assert captured.out.splitlines() == [ - f"Deleted: {tmp_path / ASYNC_DIRECTORY / f'{synced_container_prefix}__1235__d4e5f6'}", - "", - "Unsynchronized objects:", - f"- {get_qualified_name(unsynced_container)}", - "", - "Unsynchronized offline objects:", - f"- offline/{offline_containers.id}", - f"Deleted: {tmp_path / OFFLINE_DIRECTORY / f'{offline_container_prefix}__1236__g7h8j9'}", - f"Deleted: {tmp_path / ASYNC_DIRECTORY / f'{unsynced_container_prefix}__1234__a1b2c3'}", - ] - - -@pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) -def test_clean_v2_deleted_containers(tmp_path, mocker, capsys, backend, container_type): - # given - unsynced_container = prepare_v2_container( - container_type=container_type, path=tmp_path, last_ack_version=1, pid=1234, key="a1b2c3" - ) - synced_container = prepare_v2_container( - container_type=container_type, path=tmp_path, last_ack_version=3, pid=1235, key="d4e5f6" - ) - - # and - empty_get_container_impl = generate_get_metadata_container(registered_containers=[]) - - # and - mocker.patch.object(backend, "get_metadata_container", empty_get_container_impl) - mocker.patch.object(Operation, "from_dict") - - assert os.path.exists( - tmp_path / ASYNC_DIRECTORY / f"{container_type.create_dir_name(unsynced_container.id)}__1234__a1b2c3" - ) - assert os.path.exists( - tmp_path / ASYNC_DIRECTORY / f"{container_type.create_dir_name(synced_container.id)}__1235__d4e5f6" - ) - # when - ClearRunner.clear(backend=backend, path=tmp_path, force=True) - - # then - assert not os.path.exists( - tmp_path / ASYNC_DIRECTORY / f"{container_type.create_dir_name(unsynced_container.id)}__1234__a1b2c3" - ) - assert not os.path.exists( - tmp_path / ASYNC_DIRECTORY / f"{container_type.create_dir_name(synced_container.id)}__1235__d4e5f6" - ) - - # and - unsynced_container_prefix = container_type.create_dir_name(unsynced_container.id) - synced_container_prefix = container_type.create_dir_name(synced_container.id) - - # and - captured = capsys.readouterr() - assert set(captured.out.splitlines()) == { - f"Can't fetch ContainerType.{container_type.name} {synced_container.id}. Skipping.", - f"Can't fetch ContainerType.{container_type.name} {unsynced_container.id}. Skipping.", - f"Deleted: {tmp_path / ASYNC_DIRECTORY / f'{synced_container_prefix}__1235__d4e5f6'}", - f"Deleted: {tmp_path / ASYNC_DIRECTORY / f'{unsynced_container_prefix}__1234__a1b2c3'}", - } - - -@pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) -def test_clean_v1_containers(tmp_path, mocker, capsys, backend, container_type): - # given - unsynced_container = prepare_v1_container(container_type=container_type, path=tmp_path, last_ack_version=1) - synced_container = prepare_v1_container(container_type=container_type, path=tmp_path, last_ack_version=3) - offline_containers = prepare_v1_container(container_type=container_type, path=tmp_path, last_ack_version=None) - - # and - get_container_impl = generate_get_metadata_container(registered_containers=(unsynced_container, synced_container)) - - # and - mocker.patch.object(backend, "get_metadata_container", get_container_impl) - mocker.patch.object(Operation, "from_dict") - - assert os.path.exists(tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(unsynced_container.id)) - assert os.path.exists(tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(synced_container.id)) - assert os.path.exists(tmp_path / OFFLINE_DIRECTORY / container_type.create_dir_name(offline_containers.id)) - - # when - ClearRunner.clear(backend=backend, path=tmp_path, force=True) - - # then - assert not os.path.exists(tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(unsynced_container.id)) - assert not os.path.exists(tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(synced_container.id)) - assert not os.path.exists(tmp_path / OFFLINE_DIRECTORY / container_type.create_dir_name(offline_containers.id)) - - # and - captured = capsys.readouterr() - assert captured.out.splitlines() == [ - f"Deleted: {tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(synced_container.id) / 'exec-0'}", - f"Deleted: {tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(synced_container.id)}", - "", - "Unsynchronized objects:", - f"- {get_qualified_name(unsynced_container)}", - "", - "Unsynchronized offline objects:", - f"- offline/{offline_containers.id}", - f"Deleted: {tmp_path / OFFLINE_DIRECTORY / container_type.create_dir_name(offline_containers.id)}", - f"Deleted: {tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(unsynced_container.id) / 'exec-0'}", - f"Deleted: {tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(unsynced_container.id)}", - ] - - -@pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) -def test_clean_v1_deleted_containers(tmp_path, mocker, capsys, backend, container_type): - # given - unsynced_container = prepare_v1_container(container_type=container_type, path=tmp_path, last_ack_version=1) - synced_container = prepare_v1_container(container_type=container_type, path=tmp_path, last_ack_version=3) - - # and - empty_get_container_impl = generate_get_metadata_container(registered_containers=[]) - - # and - mocker.patch.object(backend, "get_metadata_container", empty_get_container_impl) - mocker.patch.object(Operation, "from_dict") - - assert os.path.exists(tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(synced_container.id)) - assert os.path.exists(tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(unsynced_container.id)) - - # when - ClearRunner.clear(backend=backend, path=tmp_path, force=True) - - # then - assert not os.path.exists(tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(synced_container.id)) - assert not os.path.exists(tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(unsynced_container.id)) - - # and - captured = capsys.readouterr() - assert set(captured.out.splitlines()) == { - f"Can't fetch ContainerType.{container_type.name} {synced_container.id}. Skipping.", - f"Can't fetch ContainerType.{container_type.name} {unsynced_container.id}. Skipping.", - f"Deleted: {tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(synced_container.id) / 'exec-0'}", - f"Deleted: {tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(synced_container.id)}", - f"Deleted: {tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(unsynced_container.id) / 'exec-0'}", - f"Deleted: {tmp_path / ASYNC_DIRECTORY / container_type.create_dir_name(unsynced_container.id)}", - } - - -def test_clean_sync_directory(tmp_path, backend): - # given - sync_directory = tmp_path / SYNC_DIRECTORY - sync_directory.mkdir(parents=True, exist_ok=True) - - assert os.path.exists(sync_directory) - - # when - ClearRunner.clear(backend=backend, path=tmp_path) - - # then - assert not os.path.exists(sync_directory) diff --git a/tests/unit/neptune/new/cli/test_status.py b/tests/unit/neptune/new/cli/test_status.py deleted file mode 100644 index e9d4a152b..000000000 --- a/tests/unit/neptune/new/cli/test_status.py +++ /dev/null @@ -1,199 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from unittest.mock import MagicMock - -import pytest - -from neptune.cli.status import StatusRunner -from neptune.cli.utils import get_qualified_name -from neptune.internal.container_type import ContainerType -from neptune.internal.operation import Operation -from tests.unit.neptune.new.cli.utils import ( - generate_get_metadata_container, - prepare_v1_container, - prepare_v2_container, -) - -AVAILABLE_CONTAINERS = [ContainerType.RUN, ContainerType.MODEL_VERSION, ContainerType.MODEL, ContainerType.PROJECT] - - -@pytest.fixture(name="backend") -def backend_fixture(): - return MagicMock() - - -@pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) -def test_list_v2_containers(tmp_path, mocker, capsys, backend, container_type): - # given - unsynced_container = prepare_v2_container( - container_type=container_type, path=tmp_path, last_ack_version=1, pid=2501, key="a1b2c3" - ) - synced_container = prepare_v2_container( - container_type=container_type, path=tmp_path, last_ack_version=3, pid=2502, key="d4e5f6" - ) - - # and - get_container_impl = generate_get_metadata_container(registered_containers=(unsynced_container, synced_container)) - - # and - mocker.patch.object(backend, "get_metadata_container", get_container_impl) - mocker.patch.object(Operation, "from_dict") - - # when - StatusRunner.status(backend=backend, path=tmp_path) - - # then - captured = capsys.readouterr() - assert captured.out.splitlines() == [ - "Unsynchronized objects:", - f"- {get_qualified_name(unsynced_container)}", - "", - "Please run with the `neptune sync --help` to see example commands.", - ] - - -def test_list_offline_v2_runs(tmp_path, mocker, capsys, backend): - # given - offline_run = prepare_v2_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=None, pid=2501, key="a1b2c3" - ) - - # and - mocker.patch.object(Operation, "from_dict") - - # when - StatusRunner.status(backend=backend, path=tmp_path) - - # then - captured = capsys.readouterr() - assert captured.err == "" - assert f"Unsynchronized offline objects:\n- offline/{offline_run.id}" in captured.out - - -def test_list_trashed_v2_containers(tmp_path, mocker, capsys, backend): - # given - unsynced_container = prepare_v2_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=1, trashed=True, pid=2501, key="a1b2c3" - ) - synced_container = prepare_v2_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=3, trashed=True, pid=2502, key="d4e5f6" - ) - - # and - get_container_impl = generate_get_metadata_container(registered_containers=(unsynced_container, synced_container)) - - # and - mocker.patch.object(backend, "get_metadata_container", get_container_impl) - mocker.patch.object(Operation, "from_dict") - - # when - StatusRunner.status(backend=backend, path=tmp_path) - - # then - captured = capsys.readouterr() - assert captured.out.splitlines() == [ - "Unsynchronized objects:", - f"- {get_qualified_name(unsynced_container)} (Trashed)", - "", - "Please run with the `neptune sync --help` to see example commands.", - ] - - -@pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) -def test_list_v1_containers(tmp_path, mocker, capsys, backend, container_type): - # given - unsynced_container = prepare_v1_container(container_type=container_type, path=tmp_path, last_ack_version=1) - synced_container = prepare_v1_container(container_type=container_type, path=tmp_path, last_ack_version=3) - - # and - get_container_impl = generate_get_metadata_container(registered_containers=(unsynced_container, synced_container)) - - # and - mocker.patch.object(backend, "get_metadata_container", get_container_impl) - mocker.patch.object(Operation, "from_dict") - - # when - StatusRunner.status(backend=backend, path=tmp_path) - - # then - captured = capsys.readouterr() - assert captured.out.splitlines() == [ - "Unsynchronized objects:", - f"- {get_qualified_name(unsynced_container)}", - "", - "Please run with the `neptune sync --help` to see example commands.", - ] - - -def test_list_offline_v1_runs(tmp_path, mocker, capsys, backend): - # given - offline_run = prepare_v1_container( - container_type=ContainerType.RUN, - path=tmp_path, - last_ack_version=None, - ) - - # and - mocker.patch.object(Operation, "from_dict") - - # when - StatusRunner.status(backend=backend, path=tmp_path) - - # then - captured = capsys.readouterr() - assert captured.err == "" - assert "Unsynchronized offline objects:\n- offline/{}".format(offline_run.id) in captured.out - - -def test_list_trashed_v1_containers(tmp_path, mocker, capsys, backend): - # given - unsynced_container = prepare_v1_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=1, trashed=True - ) - synced_container = prepare_v1_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=3, trashed=True - ) - - # and - get_container_impl = generate_get_metadata_container(registered_containers=(unsynced_container, synced_container)) - - # and - mocker.patch.object(backend, "get_metadata_container", get_container_impl) - mocker.patch.object(Operation, "from_dict") - - # when - StatusRunner.status(backend=backend, path=tmp_path) - - # then - captured = capsys.readouterr() - assert captured.out.splitlines() == [ - "Unsynchronized objects:", - f"- {get_qualified_name(unsynced_container)} (Trashed)", - "", - "Please run with the `neptune sync --help` to see example commands.", - ] - - -def test_list_runs_when_no_run(tmp_path, capsys, backend): - (tmp_path / "async").mkdir() - # when - with pytest.raises(SystemExit): - StatusRunner.status(backend=backend, path=tmp_path) - - # then - captured = capsys.readouterr() - assert captured.err == "" - assert "There are no Neptune objects" in captured.out diff --git a/tests/unit/neptune/new/cli/test_sync.py b/tests/unit/neptune/new/cli/test_sync.py deleted file mode 100644 index 9d1a62796..000000000 --- a/tests/unit/neptune/new/cli/test_sync.py +++ /dev/null @@ -1,428 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from unittest.mock import MagicMock - -import mock -import pytest - -from neptune.cli.sync import SyncRunner -from neptune.cli.utils import get_qualified_name -from neptune.internal.container_type import ContainerType -from neptune.internal.operation import Operation -from tests.unit.neptune.new.cli.utils import ( - execute_operations, - generate_get_metadata_container, - prepare_v0_run, - prepare_v1_container, - prepare_v2_container, -) - -AVAILABLE_CONTAINERS = [ContainerType.RUN, ContainerType.MODEL_VERSION, ContainerType.MODEL, ContainerType.PROJECT] - - -@pytest.fixture(name="backend") -def backend_fixture(): - backend = MagicMock() - backend.execute_operations.side_effect = execute_operations - return backend - - -@pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) -def test_sync_all_v2_containers(tmp_path, mocker, capsys, backend, container_type): - # given - unsynced_container = prepare_v2_container( - container_type=container_type, - path=tmp_path, - last_ack_version=1, - pid=2501, - key="a1b2c3", - ) - synced_container = prepare_v2_container( - container_type=container_type, path=tmp_path, last_ack_version=3, pid=2502, key="d4e5f6" - ) - - # and - get_container_impl = generate_get_metadata_container(registered_containers=(unsynced_container, synced_container)) - - # and - mocker.patch.object(backend, "get_metadata_container", get_container_impl) - mocker.patch.object(Operation, "from_dict", lambda x: x) - - # when - SyncRunner.sync_all(backend=backend, base_path=tmp_path, project_name="foo") - - # then - captured = capsys.readouterr() - assert captured.err == "" - - # expect output for unsynced run - assert f"Synchronising {get_qualified_name(unsynced_container)}" in captured.out - assert ( - f"Synchronization of {container_type.value} {get_qualified_name(unsynced_container)} completed." in captured.out - ) - - # expect NO output for synced run - assert f"Synchronising {get_qualified_name(synced_container)}" not in captured.out - - # and - backend.execute_operations.assert_called_once() - backend.execute_operations.assert_has_calls( - calls=[ - mocker.call( - container_id=unsynced_container.id, - container_type=container_type, - operations=["op-1", "op-2"], - operation_storage=mock.ANY, - ), - ], - any_order=True, - ) - - -def test_sync_all_offline_v2_runs(tmp_path, mocker, capsys, backend): - # given - offline_run = prepare_v2_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=None, pid=2501, key="a1b2c3" - ) - - # and - get_run_impl = generate_get_metadata_container(registered_containers=(offline_run,)) - - # and - mocker.patch.object(backend, "get_metadata_container", get_run_impl) - mocker.patch( - "neptune.cli.containers.register_offline_container", - lambda backend, project, container_type: offline_run, - ) - mocker.patch.object(Operation, "from_dict", lambda x: x) - - # when - SyncRunner.sync_all(backend=backend, base_path=tmp_path, project_name="foo") - - # then - captured = capsys.readouterr() - assert captured.err == "" - assert ( - "Offline container {} registered as {}".format(f"{offline_run.id}", get_qualified_name(offline_run)) - ) in captured.out - - # and - backend.execute_operations.assert_has_calls( - [ - mocker.call( - container_id=offline_run.id, - container_type=ContainerType.RUN, - operations=["op-0", "op-1", "op-2"], - operation_storage=mock.ANY, - ), - ], - any_order=True, - ) - - -def test_sync_selected_v2_runs(tmp_path, mocker, capsys, backend): - # given - unsync_exp = prepare_v2_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=1, pid=2501, key="a1b2c3" - ) # won't be synced, despite fact it's not synced yet - sync_exp = prepare_v2_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=3, pid=2502, key="d4e5f6" - ) # will be synced despite fact that it's up to date - offline_run = prepare_v2_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=None, pid=2503, key="g7h8j9" - ) # will be synced - - # and - get_run_impl = generate_get_metadata_container( - registered_containers=[ - unsync_exp, - sync_exp, - offline_run, - ] - ) - - # and - mocker.patch.object(backend, "get_metadata_container", get_run_impl) - mocker.patch( - "neptune.cli.containers.register_offline_container", - lambda backend, project, container_type: offline_run, - ) - mocker.patch.object(Operation, "from_dict", lambda x: x) - - # when - SyncRunner.sync_selected( - backend=backend, - base_path=tmp_path, - project_name="some-name", - object_names=[ - get_qualified_name(sync_exp), - "offline/" + offline_run.id, - ], - ) - - # then - captured = capsys.readouterr() - # expect no errors - assert captured.err == "" - - # expected output for mentioned async exp - assert "Synchronising {}".format(get_qualified_name(sync_exp)) in captured.out - assert "Synchronization of run {} completed.".format(get_qualified_name(sync_exp)) in captured.out - - # expected output for offline container - assert (f"Offline container {offline_run.id} registered as {get_qualified_name(offline_run)}") in captured.out - assert "Synchronising {}".format(get_qualified_name(offline_run)) in captured.out - assert "Synchronization of run {} completed.".format(get_qualified_name(offline_run)) in captured.out - - # expected NO output for not mentioned async container - assert "Synchronising {}".format(get_qualified_name(unsync_exp)) not in captured.out - - # and - backend.execute_operations.assert_has_calls( - [ - mocker.call( - container_id=offline_run.id, - container_type=ContainerType.RUN, - operations=["op-0", "op-1", "op-2"], - operation_storage=mock.ANY, - ), - ], - any_order=True, - ) - - -@pytest.mark.parametrize("container_type", AVAILABLE_CONTAINERS) -def test_sync_all_v1_containers(tmp_path, mocker, capsys, backend, container_type): - # given - unsynced_container = prepare_v1_container(container_type=container_type, path=tmp_path, last_ack_version=1) - synced_container = prepare_v1_container(container_type=container_type, path=tmp_path, last_ack_version=3) - - # and - get_container_impl = generate_get_metadata_container(registered_containers=(unsynced_container, synced_container)) - - # and - mocker.patch.object(backend, "get_metadata_container", get_container_impl) - mocker.patch.object(Operation, "from_dict", lambda x: x) - - # when - SyncRunner.sync_all(backend=backend, base_path=tmp_path, project_name="foo") - - # then - captured = capsys.readouterr() - assert captured.err == "" - - # expect output for unsynced run - assert f"Synchronising {get_qualified_name(unsynced_container)}" in captured.out - assert ( - f"Synchronization of {container_type.value} {get_qualified_name(unsynced_container)} completed." in captured.out - ) - - # expect NO output for synced run - assert f"Synchronising {get_qualified_name(synced_container)}" not in captured.out - - # and - backend.execute_operations.assert_called_once() - backend.execute_operations.assert_has_calls( - calls=[ - mocker.call( - container_id=unsynced_container.id, - container_type=container_type, - operations=["op-1", "op-2"], - operation_storage=mock.ANY, - ), - ], - any_order=True, - ) - - -def test_sync_all_offline_v1_runs(tmp_path, mocker, capsys, backend): - # given - offline_run = prepare_v1_container(container_type=ContainerType.RUN, path=tmp_path, last_ack_version=None) - - # and - get_run_impl = generate_get_metadata_container(registered_containers=(offline_run,)) - - # and - mocker.patch.object(backend, "get_metadata_container", get_run_impl) - mocker.patch( - "neptune.cli.containers.register_offline_container", - lambda backend, project, container_type: offline_run, - ) - mocker.patch.object(Operation, "from_dict", lambda x: x) - - # when - SyncRunner.sync_all(backend=backend, base_path=tmp_path, project_name="foo") - - # then - captured = capsys.readouterr() - assert captured.err == "" - assert ( - "Offline container {} registered as {}".format(f"{offline_run.id}", get_qualified_name(offline_run)) - ) in captured.out - - # and - backend.execute_operations.assert_has_calls( - [ - mocker.call( - container_id=offline_run.id, - container_type=ContainerType.RUN, - operations=["op-0", "op-1", "op-2"], - operation_storage=mock.ANY, - ), - ], - any_order=True, - ) - - -def test_sync_selected_v1_runs(tmp_path, mocker, capsys, backend): - # given - unsync_exp = prepare_v1_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=1 - ) # won't be synced, despite fact it's not synced yet - sync_exp = prepare_v1_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=3 - ) # will be synced despite fact that it's up to date - offline_run = prepare_v1_container( - container_type=ContainerType.RUN, path=tmp_path, last_ack_version=None - ) # will be synced - - # and - get_run_impl = generate_get_metadata_container( - registered_containers=[ - unsync_exp, - sync_exp, - offline_run, - ] - ) - - # and - mocker.patch.object(backend, "get_metadata_container", get_run_impl) - mocker.patch( - "neptune.cli.containers.register_offline_container", - lambda backend, project, container_type: offline_run, - ) - mocker.patch.object(Operation, "from_dict", lambda x: x) - - # when - SyncRunner.sync_selected( - backend=backend, - base_path=tmp_path, - project_name="some-name", - object_names=[ - get_qualified_name(sync_exp), - "offline/" + offline_run.id, - ], - ) - - # then - captured = capsys.readouterr() - # expect no errors - assert captured.err == "" - - # expected output for mentioned async exp - assert "Synchronising {}".format(get_qualified_name(sync_exp)) in captured.out - assert "Synchronization of run {} completed.".format(get_qualified_name(sync_exp)) in captured.out - - # expected output for offline container - assert ( - "Offline container {} registered as {}".format(f"{offline_run.id}", get_qualified_name(offline_run)) - ) in captured.out - assert "Synchronising {}".format(get_qualified_name(offline_run)) in captured.out - assert "Synchronization of run {} completed.".format(get_qualified_name(offline_run)) in captured.out - - # expected NO output for not mentioned async container - assert "Synchronising {}".format(get_qualified_name(unsync_exp)) not in captured.out - - # and - backend.execute_operations.assert_has_calls( - [ - mocker.call( - container_id=offline_run.id, - container_type=ContainerType.RUN, - operations=["op-0", "op-1", "op-2"], - operation_storage=mock.ANY, - ), - ], - any_order=True, - ) - - -def test_sync_v0_runs(tmp_path, mocker, capsys, backend): - # given - deprecated_unsynced_run = prepare_v0_run(path=tmp_path, last_ack_version=1) - offline_old_run = prepare_v0_run(path=tmp_path, last_ack_version=None) - - # and - get_container_impl = generate_get_metadata_container( - registered_containers=(deprecated_unsynced_run, offline_old_run) - ) - - # and - mocker.patch.object(backend, "get_metadata_container", get_container_impl) - mocker.patch( - "neptune.cli.containers.register_offline_container", - lambda backend, project, container_type: offline_old_run, - ) - mocker.patch.object(Operation, "from_dict", lambda x: x) - - # when - SyncRunner.sync_all(backend=backend, base_path=tmp_path, project_name="foo") - - # then - captured = capsys.readouterr() - assert captured.err == "" - - assert ( - "Offline container {} registered as {}".format(f"{offline_old_run.id}", get_qualified_name(offline_old_run)) - ) in captured.out - - assert "Synchronising {}".format(get_qualified_name(deprecated_unsynced_run)) in captured.out - assert "Synchronization of run {} completed.".format(get_qualified_name(deprecated_unsynced_run)) in captured.out - assert "Synchronising {}".format(get_qualified_name(offline_old_run)) in captured.out - assert "Synchronization of run {} completed.".format(get_qualified_name(offline_old_run)) in captured.out - - # and - backend.execute_operations.assert_has_calls( - [ - mocker.call( - container_id=deprecated_unsynced_run.id, - container_type=ContainerType.RUN, - operations=["op-1", "op-2"], - operation_storage=mock.ANY, - ), - mocker.call( - container_id=offline_old_run.id, - container_type=ContainerType.RUN, - operations=["op-0", "op-1", "op-2"], - operation_storage=mock.ANY, - ), - ], - any_order=True, - ) - - -def test_sync_non_existent_offline_containers(tmp_path, capsys, backend): - # when - SyncRunner.sync_selected(backend=backend, base_path=tmp_path, project_name="foo", object_names=["offline/foo__bar"]) - SyncRunner.sync_selected( - backend=backend, base_path=tmp_path, project_name="foo", object_names=["offline/model__bar"] - ) - - # then - captured = capsys.readouterr() - assert "Offline container foo__bar not found on disk." in captured.out - assert "Offline container model__bar not found on disk." in captured.out diff --git a/tests/unit/neptune/new/cli/test_utils.py b/tests/unit/neptune/new/cli/test_utils.py deleted file mode 100644 index 134f6a8ff..000000000 --- a/tests/unit/neptune/new/cli/test_utils.py +++ /dev/null @@ -1,68 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import uuid - -from neptune.cli.utils import detect_async_dir -from neptune.internal.container_type import ContainerType -from neptune.internal.id_formats import UniqueId -from neptune.objects.structure_version import StructureVersion - - -def test__split_dir_name(): - # given - random_id = UniqueId(str(uuid.uuid4())) - - assert detect_async_dir(f"{random_id}") == (ContainerType.RUN, random_id, StructureVersion.LEGACY) - assert detect_async_dir(f"run__{random_id}") == ( - ContainerType.RUN, - random_id, - StructureVersion.CHILD_EXECUTION_DIRECTORIES, - ) - assert detect_async_dir(f"model__{random_id}") == ( - ContainerType.MODEL, - random_id, - StructureVersion.CHILD_EXECUTION_DIRECTORIES, - ) - assert detect_async_dir(f"project__{random_id}") == ( - ContainerType.PROJECT, - random_id, - StructureVersion.CHILD_EXECUTION_DIRECTORIES, - ) - assert detect_async_dir(f"model_version__{random_id}") == ( - ContainerType.MODEL_VERSION, - random_id, - StructureVersion.CHILD_EXECUTION_DIRECTORIES, - ) - assert detect_async_dir(f"run__{random_id}__1234__abcdefgh") == ( - ContainerType.RUN, - random_id, - StructureVersion.DIRECT_DIRECTORY, - ) - assert detect_async_dir(f"project__{random_id}__1234__abcdefgh") == ( - ContainerType.PROJECT, - random_id, - StructureVersion.DIRECT_DIRECTORY, - ) - assert detect_async_dir(f"model__{random_id}__1234__abcdefgh") == ( - ContainerType.MODEL, - random_id, - StructureVersion.DIRECT_DIRECTORY, - ) - assert detect_async_dir(f"model_version__{random_id}__1234__abcdefgh") == ( - ContainerType.MODEL_VERSION, - random_id, - StructureVersion.DIRECT_DIRECTORY, - ) diff --git a/tests/unit/neptune/new/cli/utils.py b/tests/unit/neptune/new/cli/utils.py deleted file mode 100644 index 2ae5ea10a..000000000 --- a/tests/unit/neptune/new/cli/utils.py +++ /dev/null @@ -1,127 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import threading -from pathlib import Path -from typing import Optional - -from neptune.cli.utils import get_qualified_name -from neptune.constants import ( - ASYNC_DIRECTORY, - OFFLINE_DIRECTORY, -) -from neptune.core.components.queue.disk_queue import DiskQueue -from neptune.core.components.queue.sync_offset_file import SyncOffsetFile -from neptune.exceptions import MetadataContainerNotFound -from neptune.internal.backends.api_model import ApiExperiment -from neptune.internal.container_type import ContainerType -from tests.unit.neptune.new.utils.api_experiments_factory import ( - api_metadata_container, - api_run, -) - - -def generate_get_metadata_container(registered_containers): - def get_metadata_container(container_id, expected_container_type: ContainerType): - """This function will return run as well as projects. Will be cleaned in ModelRegistry""" - for exp in registered_containers: - if container_id in (str(exp.id), get_qualified_name(exp)): - return exp - - raise MetadataContainerNotFound.of_container_type( - container_type=expected_container_type, container_id=container_id - ) - - return get_metadata_container - - -def execute_operations(container_id, container_type, operations, operation_storage): - return len(operations), [] - - -def _prepare_disk_queue(*, exp_path, last_ack_version): - exp_path.mkdir(parents=True) - queue = DiskQueue( - data_path=exp_path, - to_dict=lambda x: x, - from_dict=lambda x: x, - lock=threading.RLock(), - ) - queue.put("op-0") - queue.put("op-1") - queue.put("op-2") - - SyncOffsetFile(exp_path / "last_put_version").write(3) - if last_ack_version is not None: - SyncOffsetFile(exp_path / "last_ack_version").write(last_ack_version) - - -def prepare_v2_container( - *, - container_type: ContainerType, - path: Path, - pid: int, - key: str, - last_ack_version: Optional[int], - trashed: Optional[bool] = False, -) -> ApiExperiment: - container = api_metadata_container(container_type, trashed=trashed) - - exec_path = f"{container_type.create_dir_name(container.id)}__{pid}__{key}" - directory = OFFLINE_DIRECTORY if last_ack_version is None else ASYNC_DIRECTORY - experiment_path = path / directory / exec_path - - _prepare_disk_queue(exp_path=experiment_path, last_ack_version=last_ack_version) - - return container - - -def prepare_v1_container( - *, container_type: ContainerType, path: Path, last_ack_version: Optional[int], trashed: Optional[bool] = False -) -> ApiExperiment: - is_offline = last_ack_version is None - - container = api_metadata_container(container_type, trashed=trashed) - - if is_offline: - exp_path = path / OFFLINE_DIRECTORY / f"{container.type.value}__{container.id}" - else: - execution_id = "exec-0" - exp_path = path / ASYNC_DIRECTORY / f"{container.type.value}__{container.id}" / execution_id - - _prepare_disk_queue( - exp_path=exp_path, - last_ack_version=last_ack_version, - ) - - return container - - -def prepare_v0_run(*, path: Path, last_ack_version: Optional[int]): - is_offline = last_ack_version is None - - run = api_run() - - if is_offline: - exp_path = path / OFFLINE_DIRECTORY / run.id - else: - execution_id = "exec-0" - exp_path = path / ASYNC_DIRECTORY / run.id / execution_id - _prepare_disk_queue( - exp_path=exp_path, - last_ack_version=last_ack_version, - ) - return run diff --git a/tests/unit/neptune/new/client/__init__.py b/tests/unit/neptune/new/client/__init__.py deleted file mode 100644 index b5e585d90..000000000 --- a/tests/unit/neptune/new/client/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/client/abstract_experiment_test_mixin.py b/tests/unit/neptune/new/client/abstract_experiment_test_mixin.py deleted file mode 100644 index 1e012ce1e..000000000 --- a/tests/unit/neptune/new/client/abstract_experiment_test_mixin.py +++ /dev/null @@ -1,160 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import contextlib -import os -import time -import unittest -from abc import abstractmethod -from io import StringIO -from unittest.mock import ( - Mock, - patch, -) - -import pytest - -from neptune.exceptions import ( - MetadataInconsistency, - MissingFieldException, - NeptuneOfflineModeFetchException, - NeptuneSynchronizationAlreadyStoppedException, - TypeDoesNotSupportAttributeException, -) - - -class AbstractExperimentTestMixin: - @staticmethod - @abstractmethod - def call_init(**kwargs): - pass - - def test_incorrect_mode(self): - with self.assertRaises(ValueError): - with self.call_init(mode="srtgj"): - pass - - def test_debug_mode(self): - with self.call_init(mode="debug") as exp: - exp["some/variable"] = 13 - self.assertEqual(13, exp["some/variable"].fetch()) - self.assertNotIn(str(exp._id), os.listdir(".neptune")) - - @patch("neptune.internal.operation_processors.utils.random_key") - @patch("neptune.internal.operation_processors.utils.os.getpid") - def test_offline_mode(self, getpid_mock, random_mock): - random_mock.return_value = "test" - getpid_mock.return_value = 1234 - - with self.call_init(mode="offline") as exp: - exp["some/variable"] = 13 - with self.assertRaises(NeptuneOfflineModeFetchException): - exp["some/variable"].fetch() - - exp_dir = f"{exp.container_type.value}__{exp._id}__1234__test" - - self.assertIn(exp_dir, os.listdir(".neptune/offline")) - self.assertIn("data-1.log", os.listdir(f".neptune/offline/{exp_dir}")) - - def test_sync_mode(self): - with self.call_init(mode="sync") as exp: - exp["some/variable"] = 13 - exp["copied/variable"] = exp["some/variable"] - self.assertEqual(13, exp["some/variable"].fetch()) - self.assertEqual(13, exp["copied/variable"].fetch()) - self.assertNotIn(str(exp._id), os.listdir(".neptune")) - - def test_async_mode(self): - with patch("neptune.internal.operation_processors.utils.random_key") as random_mock: - with patch("neptune.internal.operation_processors.utils.os.getpid") as getpid_mock: - random_mock.return_value = "test" - getpid_mock.return_value = 1234 - - with self.call_init(mode="async", flush_period=0.5) as exp: - exp["some/variable"] = 13 - exp["copied/variable"] = exp["some/variable"] - with self.assertRaises(MetadataInconsistency): - exp["some/variable"].fetch() - exp.wait() - self.assertEqual(13, exp["some/variable"].fetch()) - self.assertEqual(13, exp["copied/variable"].fetch()) - - exp_dir = f"{exp.container_type.value}__{exp._id}__1234__test" - self.assertIn(exp_dir, os.listdir(".neptune/async")) - self.assertIn("data-1.log", os.listdir(f".neptune/async/{exp_dir}")) - - def test_async_mode_wait_on_dead(self): - with self.call_init(mode="async", flush_period=0.5) as exp: - exp._backend.execute_operations = Mock(side_effect=ValueError) - exp["some/variable"] = 13 - # wait for the process to die - time.sleep(1) - with self.assertRaises(NeptuneSynchronizationAlreadyStoppedException): - exp.wait() - - def test_async_mode_die_during_wait(self): - with self.call_init(mode="async", flush_period=1) as exp: - exp._backend.execute_operations = Mock(side_effect=ValueError) - exp["some/variable"] = 13 - with self.assertRaises(NeptuneSynchronizationAlreadyStoppedException): - exp.wait() - - @unittest.skip("NPT-12753 Flaky test") - def test_async_mode_stop_on_dead(self): - stream = StringIO() - with contextlib.redirect_stdout(stream): - with self.call_init(mode="async", flush_period=0.5) as exp: - update_freq = 1 - default_freq = exp._op_processor.STOP_QUEUE_STATUS_UPDATE_FREQ_SECONDS - try: - exp._op_processor.STOP_QUEUE_STATUS_UPDATE_FREQ_SECONDS = update_freq - exp._op_processor._backend.execute_operations = Mock(side_effect=ValueError) - exp["some/variable"] = 13 - exp.stop() - finally: - exp._op_processor.STOP_QUEUE_STATUS_UPDATE_FREQ_SECONDS = default_freq - - self.assertIn("NeptuneSynchronizationAlreadyStopped", stream.getvalue()) - - def test_missing_attribute(self): - with self.call_init(mode="debug") as exp: - with self.assertRaises(MissingFieldException): - exp["non/existing/path"].fetch() - - def test_wrong_function(self): - with self.call_init(mode="debug") as exp: - with self.assertRaises(AttributeError): - exp["non/existing/path"].foo() - - @pytest.mark.skip(reason="File functionality disabled") - def test_wrong_per_type_function(self): - with self.call_init(mode="debug") as exp: - exp["some/path"] = "foo" - with self.assertRaises(TypeDoesNotSupportAttributeException): - exp["some/path"].download() - - def test_clean_data_on_stop(self): - with self.call_init(mode="async", flush_period=0.5) as exp: - container_path = exp._op_processor.data_path - - assert os.path.exists(container_path) - - exp.stop() - - assert not os.path.exists(container_path) - - @abstractmethod - def test_read_only_mode(self): - pass diff --git a/tests/unit/neptune/new/client/abstract_tables_test.py b/tests/unit/neptune/new/client/abstract_tables_test.py deleted file mode 100644 index 0eee972b5..000000000 --- a/tests/unit/neptune/new/client/abstract_tables_test.py +++ /dev/null @@ -1,230 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import uuid -from abc import abstractmethod -from datetime import datetime -from typing import List - -import pytest -from mock import patch - -from neptune import ANONYMOUS_API_TOKEN -from neptune.api.models import ( - DateTimeField, - FieldDefinition, - FieldType, - FileField, - FileSetField, - FloatField, - FloatSeriesField, - GitCommit, - GitRefField, - ImageSeriesField, - LeaderboardEntry, - ObjectStateField, - StringField, - StringSeriesField, - StringSetField, -) -from neptune.envs import ( - API_TOKEN_ENV_NAME, - PROJECT_ENV_NAME, -) -from neptune.exceptions import MetadataInconsistency -from neptune.internal.backends.neptune_backend_mock import NeptuneBackendMock -from neptune.table import ( - Table, - TableEntry, -) - - -@patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_attributes", - new=lambda _, _uuid, _type: [FieldDefinition(path="test", type=FieldType.STRING)], -) -@patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) -class AbstractTablesTestMixin: - expected_container_type = None - - @abstractmethod - def get_table(self, **kwargs) -> Table: - pass - - @abstractmethod - def get_table_entries(self, table) -> List[TableEntry]: - pass - - @classmethod - def setUpClass(cls) -> None: - os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN - - @classmethod - def setUp(cls) -> None: - if PROJECT_ENV_NAME in os.environ: - del os.environ[PROJECT_ENV_NAME] - - @staticmethod - def build_fields_leaderboard(now: datetime): - return [ - ObjectStateField(path="run/state", value="Inactive"), - FloatField(path="float", value=12.5), - StringField(path="string", value="some text"), - DateTimeField(path="datetime", value=now), - FloatSeriesField(path="float/series", last=8.7), - StringSeriesField(path="string/series", last="last text"), - StringSetField(path="string/set", values={"a", "b"}), - GitRefField(path="git/ref", commit=GitCommit(commit_id="abcdef0123456789")), - FileField(path="file", size=0, name="file.txt", ext="txt"), - FileSetField(path="file/set", size=0), - ImageSeriesField(path="image/series", last_step=None), - ] - - @patch.object(NeptuneBackendMock, "search_leaderboard_entries") - def test_get_table_with_columns_filter(self, search_leaderboard_entries): - # when - self.get_table(columns=["datetime"]) - - # then - self.assertEqual(1, search_leaderboard_entries.call_count) - parameters = search_leaderboard_entries.call_args[1] - self.assertEqual({"sys/id", "sys/creation_time", "datetime"}, parameters.get("columns")) - - @patch.object(NeptuneBackendMock, "search_leaderboard_entries") - def test_get_table_as_pandas(self, search_leaderboard_entries): - # given - now = datetime.now() - fields = self.build_fields_leaderboard(now) - - # and - empty_entry = LeaderboardEntry(object_id=str(uuid.uuid4()), fields=[]) - filled_entry = LeaderboardEntry(object_id=str(uuid.uuid4()), fields=fields) - search_leaderboard_entries.return_value = [empty_entry, filled_entry] - - # when - df = self.get_table().to_pandas() - - # then - self.assertEqual("Inactive", df["run/state"][1]) - self.assertEqual(12.5, df["float"][1]) - self.assertEqual("some text", df["string"][1]) - self.assertEqual(now, df["datetime"][1]) - self.assertEqual(8.7, df["float/series"][1]) - self.assertEqual("last text", df["string/series"][1]) - self.assertEqual({"a", "b"}, set(df["string/set"][1].split(","))) - self.assertEqual("abcdef0123456789", df["git/ref"][1]) - - with self.assertRaises(KeyError): - self.assertTrue(df["file"]) - with self.assertRaises(KeyError): - self.assertTrue(df["file/set"]) - with self.assertRaises(KeyError): - self.assertTrue(df["image/series"]) - - @patch.object(NeptuneBackendMock, "search_leaderboard_entries") - def test_get_table_as_rows(self, search_leaderboard_entries): - # given - now = datetime.now() - fields = self.build_fields_leaderboard(now) - - # and - empty_entry = LeaderboardEntry(object_id=str(uuid.uuid4()), fields=[]) - filled_entry = LeaderboardEntry(object_id=str(uuid.uuid4()), fields=fields) - search_leaderboard_entries.return_value = [empty_entry, filled_entry] - - # and - # (check if using both to_rows and table generator produces the same results) - table_gen = self.get_table() - next(table_gen) # to move to the second table entry - # when - for row in (self.get_table().to_rows()[1], next(table_gen)): - # then - self.assertEqual("Inactive", row.get_attribute_value("run/state")) - self.assertEqual(12.5, row.get_attribute_value("float")) - self.assertEqual("some text", row.get_attribute_value("string")) - self.assertEqual(now, row.get_attribute_value("datetime")) - self.assertEqual(8.7, row.get_attribute_value("float/series")) - self.assertEqual("last text", row.get_attribute_value("string/series")) - self.assertEqual({"a", "b"}, row.get_attribute_value("string/set")) - self.assertEqual("abcdef0123456789", row.get_attribute_value("git/ref")) - - with self.assertRaises(MetadataInconsistency): - row.get_attribute_value("file") - with self.assertRaises(MetadataInconsistency): - row.get_attribute_value("image/series") - - @patch.object(NeptuneBackendMock, "search_leaderboard_entries") - @patch.object(NeptuneBackendMock, "download_file") - @patch.object(NeptuneBackendMock, "download_file_set") - def test_get_table_as_table_entries( - self, - download_file_set, - download_file, - search_leaderboard_entries, - ): - # given - exp_id = str(uuid.uuid4()) - now = datetime.now() - fields = self.build_fields_leaderboard(now) - - # and - search_leaderboard_entries.return_value = [LeaderboardEntry(object_id=exp_id, fields=fields)] - - # when - table_entry = self.get_table_entries(table=self.get_table())[0] - - # then - self.assertEqual("Inactive", table_entry["run/state"].get()) - self.assertEqual("Inactive", table_entry["run"]["state"].get()) - self.assertEqual(12.5, table_entry["float"].get()) - self.assertEqual("some text", table_entry["string"].get()) - self.assertEqual(now, table_entry["datetime"].get()) - self.assertEqual(8.7, table_entry["float/series"].get()) - self.assertEqual("last text", table_entry["string/series"].get()) - self.assertEqual({"a", "b"}, table_entry["string/set"].get()) - self.assertEqual("abcdef0123456789", table_entry["git/ref"].get()) - - with self.assertRaises(MetadataInconsistency): - table_entry["file"].get() - with self.assertRaises(MetadataInconsistency): - table_entry["file/set"].get() - with self.assertRaises(MetadataInconsistency): - table_entry["image/series"].get() - - table_entry["file"].download("some_directory") - download_file.assert_called_with( - container_id=exp_id, - container_type=self.expected_container_type, - path=["file"], - destination="some_directory", - progress_bar=None, - ) - - table_entry["file/set"].download("some_directory") - download_file_set.assert_called_with( - container_id=exp_id, - container_type=self.expected_container_type, - path=["file", "set"], - destination="some_directory", - progress_bar=None, - ) - - def test_table_limit(self): - with pytest.raises(ValueError): - self.get_table(limit=-4) - - with pytest.raises(ValueError): - self.get_table(limit=0) diff --git a/tests/unit/neptune/new/client/test_model.py b/tests/unit/neptune/new/client/test_model.py deleted file mode 100644 index 30acb6c40..000000000 --- a/tests/unit/neptune/new/client/test_model.py +++ /dev/null @@ -1,120 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import unittest - -import pytest -from mock import patch - -from neptune import ( - ANONYMOUS_API_TOKEN, - init_model, -) -from neptune.api.models import ( - FieldDefinition, - FieldType, - IntField, -) -from neptune.attributes import String -from neptune.envs import ( - API_TOKEN_ENV_NAME, - PROJECT_ENV_NAME, -) -from neptune.exceptions import ( - NeptuneUnsupportedFunctionalityException, - NeptuneWrongInitParametersException, -) -from neptune.internal.backends.neptune_backend_mock import NeptuneBackendMock -from neptune.internal.exceptions import NeptuneException -from neptune.internal.utils.paths import path_to_str -from neptune.internal.warnings import ( - NeptuneWarning, - warned_once, -) -from tests.unit.neptune.new.client.abstract_experiment_test_mixin import AbstractExperimentTestMixin -from tests.unit.neptune.new.utils.api_experiments_factory import api_model - -AN_API_MODEL = api_model() - - -@patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) -@pytest.mark.xfail(reason="Model is not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) -class TestClientModel(AbstractExperimentTestMixin, unittest.TestCase): - @staticmethod - def call_init(**kwargs): - return init_model(key="MOD", **kwargs) - - @classmethod - def setUpClass(cls) -> None: - os.environ[PROJECT_ENV_NAME] = "organization/project" - os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN - - @pytest.mark.skip( - reason=( - "By coincidence, the test is passing as " - "NeptuneUnsupportedFunctionalityException is subclass of NeptuneException" - ) - ) - def test_offline_mode(self): - with self.assertRaises(NeptuneException): - init_model(key="MOD", mode="offline") - - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_metadata_container", - new=lambda _, container_id, expected_container_type: AN_API_MODEL, - ) - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_attributes", - new=lambda _, _uuid, _type: [FieldDefinition("some/variable", FieldType.INT)], - ) - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_int_attribute", - new=lambda _, _uuid, _type, _path: IntField(path=path_to_str(_path), value=42), - ) - @patch("neptune.internal.operation_processors.read_only_operation_processor.warn_once") - def test_read_only_mode(self, warn_once): - warned_once.clear() - with init_model(mode="read-only", with_id="whatever") as exp: - exp["some/variable"] = 13 - exp["some/other_variable"] = 11 - - warn_once.assert_called_with( - "Client in read-only mode, nothing will be saved to server.", exception=NeptuneWarning - ) - self.assertEqual(42, exp["some/variable"].fetch()) - self.assertNotIn(str(exp._id), os.listdir(".neptune")) - - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_metadata_container", - new=lambda _, container_id, expected_container_type: AN_API_MODEL, - ) - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_attributes", - new=lambda _, _uuid, _type: [FieldDefinition("test", FieldType.STRING)], - ) - def test_resume(self): - with init_model(flush_period=0.5, with_id="whatever") as exp: - self.assertEqual(exp._id, AN_API_MODEL.id) - self.assertIsInstance(exp.get_structure()["test"], String) - - def test_wrong_parameters(self): - with self.assertRaises(NeptuneWrongInitParametersException): - init_model(with_id=None, key=None) - - def test_name_parameter(self): - with init_model(key="TRY", name="some_name") as exp: - exp.wait() - self.assertEqual(exp["sys/name"].fetch(), "some_name") diff --git a/tests/unit/neptune/new/client/test_model_tables.py b/tests/unit/neptune/new/client/test_model_tables.py deleted file mode 100644 index 00f0fb754..000000000 --- a/tests/unit/neptune/new/client/test_model_tables.py +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import unittest -from typing import List - -from neptune import init_project -from neptune.internal.container_type import ContainerType -from neptune.table import ( - Table, - TableEntry, -) -from tests.unit.neptune.new.client.abstract_tables_test import AbstractTablesTestMixin - - -class TestModelTables(AbstractTablesTestMixin, unittest.TestCase): - expected_container_type = ContainerType.MODEL - - def get_table(self, **kwargs) -> Table: - return init_project(project="organization/project", mode="read-only").fetch_models_table(**kwargs) - - def get_table_entries(self, table) -> List[TableEntry]: - return table.to_rows() diff --git a/tests/unit/neptune/new/client/test_model_version.py b/tests/unit/neptune/new/client/test_model_version.py deleted file mode 100644 index 14ca4404b..000000000 --- a/tests/unit/neptune/new/client/test_model_version.py +++ /dev/null @@ -1,169 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import unittest - -import pytest -from mock import patch - -from neptune import ( - ANONYMOUS_API_TOKEN, - init_model_version, -) -from neptune.api.models import ( - FieldDefinition, - FieldType, - IntField, - StringField, -) -from neptune.attributes import String -from neptune.envs import ( - API_TOKEN_ENV_NAME, - PROJECT_ENV_NAME, -) -from neptune.exceptions import ( - NeptuneOfflineModeChangeStageException, - NeptuneUnsupportedFunctionalityException, - NeptuneWrongInitParametersException, -) -from neptune.internal.backends.neptune_backend_mock import NeptuneBackendMock -from neptune.internal.container_type import ContainerType -from neptune.internal.exceptions import NeptuneException -from neptune.internal.utils.paths import path_to_str -from neptune.internal.warnings import ( - NeptuneWarning, - warned_once, -) -from tests.unit.neptune.new.client.abstract_experiment_test_mixin import AbstractExperimentTestMixin -from tests.unit.neptune.new.utils.api_experiments_factory import ( - api_model, - api_model_version, -) - -AN_API_MODEL = api_model() -AN_API_MODEL_VERSION = api_model_version() - - -@pytest.mark.xfail(reason="Model version not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) -@patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_metadata_container", - new=lambda _, container_id, expected_container_type: ( - AN_API_MODEL if expected_container_type == ContainerType.MODEL else AN_API_MODEL_VERSION - ), -) -@patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) -class TestClientModelVersion(AbstractExperimentTestMixin, unittest.TestCase): - @staticmethod - def call_init(**kwargs): - return init_model_version(model="PRO-MOD", **kwargs) - - @classmethod - def setUpClass(cls) -> None: - os.environ[PROJECT_ENV_NAME] = "organization/project" - os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN - - @pytest.mark.skip( - ( - "By coincidence, the test is passing when it should not. It's caused by the fact that " - "NeptuneUnsupportedFunctionalityException is subclass of NeptuneException" - ) - ) - def test_offline_mode(self): - with self.assertRaises(NeptuneException): - init_model_version(model="PRO-MOD", mode="offline") - - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_attributes", - new=lambda _, _uuid, _type: [ - FieldDefinition("some/variable", FieldType.INT), - FieldDefinition("sys/model_id", FieldType.STRING), - ], - ) - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_int_attribute", - new=lambda _, _uuid, _type, _path: IntField(path=path_to_str(_path), value=42), - ) - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_string_attribute", - new=lambda _, _uuid, _type, _path: StringField(path=path_to_str(_path), value="MDL"), - ) - @patch("neptune.internal.operation_processors.read_only_operation_processor.warn_once") - def test_read_only_mode(self, warn_once): - warned_once.clear() - with init_model_version(mode="read-only", with_id="whatever") as exp: - exp["some/variable"] = 13 - exp["some/other_variable"] = 11 - - warn_once.assert_called_with( - "Client in read-only mode, nothing will be saved to server.", exception=NeptuneWarning - ) - self.assertEqual(42, exp["some/variable"].fetch()) - self.assertNotIn(str(exp._id), os.listdir(".neptune")) - - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_attributes", - new=lambda _, _uuid, _type: [ - FieldDefinition("test", FieldType.STRING), - FieldDefinition("sys/model_id", FieldType.STRING), - ], - ) - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_string_attribute", - new=lambda _, _uuid, _type, _path: StringField(path=path_to_str(_path), value="MDL"), - ) - def test_resume(self): - with init_model_version(flush_period=0.5, with_id="whatever") as exp: - self.assertEqual(exp._id, AN_API_MODEL_VERSION.id) - self.assertIsInstance(exp.get_structure()["test"], String) - - def test_sync_mode(self): - AbstractExperimentTestMixin.test_sync_mode(self) - - def test_async_mode(self): - AbstractExperimentTestMixin.test_async_mode(self) - - def test_wrong_parameters(self): - with self.assertRaises(NeptuneWrongInitParametersException): - with init_model_version(with_id=None, model=None): - pass - - def test_change_stage(self): - with self.call_init() as exp: - exp.change_stage(stage="production") - - self.assertEqual("production", exp["sys/stage"].fetch()) - - with self.assertRaises(ValueError): - exp.change_stage(stage="wrong_stage") - - @pytest.mark.skip( - ( - "By coincidence, the test is passing when it should not. It's caused by the fact that " - "NeptuneUnsupportedFunctionalityException is subclass of NeptuneException" - ) - ) - def test_change_stage_of_offline_model_version(self): - # this test will be required when we decide that creating model versions - # in offline mode is allowed - with self.assertRaises(NeptuneException): - with self.call_init(mode="offline") as exp: - with self.assertRaises(NeptuneOfflineModeChangeStageException): - exp.change_stage(stage="production") - - def test_name_parameter(self): - with self.call_init(name="some_name") as exp: - exp.wait() - self.assertEqual(exp["sys/name"].fetch(), "some_name") diff --git a/tests/unit/neptune/new/client/test_model_version_tables.py b/tests/unit/neptune/new/client/test_model_version_tables.py deleted file mode 100644 index a8b038215..000000000 --- a/tests/unit/neptune/new/client/test_model_version_tables.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import unittest -from typing import List - -import pytest - -from neptune import init_model -from neptune.exceptions import NeptuneUnsupportedFunctionalityException -from neptune.internal.container_type import ContainerType -from neptune.table import ( - Table, - TableEntry, -) -from tests.unit.neptune.new.client.abstract_tables_test import AbstractTablesTestMixin - - -@pytest.mark.xfail(reason="Model is not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) -class TestModelVersionTables(AbstractTablesTestMixin, unittest.TestCase): - expected_container_type = ContainerType.MODEL_VERSION - - def get_table(self, **kwargs) -> Table: - return init_model( - with_id="organization/project", - project="PRO-MOD", - mode="read-only", - ).fetch_model_versions_table(**kwargs) - - def get_table_entries(self, table) -> List[TableEntry]: - return table.to_rows() diff --git a/tests/unit/neptune/new/client/test_project.py b/tests/unit/neptune/new/client/test_project.py deleted file mode 100644 index e73284c8c..000000000 --- a/tests/unit/neptune/new/client/test_project.py +++ /dev/null @@ -1,165 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import unittest - -import pytest -from mock import patch - -from neptune import ( - ANONYMOUS_API_TOKEN, - init_project, -) -from neptune.api.models import ( - FieldDefinition, - FieldType, - IntField, -) -from neptune.envs import ( - API_TOKEN_ENV_NAME, - PROJECT_ENV_NAME, -) -from neptune.exceptions import ( - NeptuneMissingProjectNameException, - NeptuneUnsupportedFunctionalityException, -) -from neptune.internal.backends.neptune_backend_mock import NeptuneBackendMock -from neptune.internal.exceptions import NeptuneException -from neptune.internal.utils.paths import path_to_str -from neptune.internal.warnings import ( - NeptuneWarning, - warned_once, -) -from neptune.objects.utils import prepare_nql_query -from tests.unit.neptune.new.client.abstract_experiment_test_mixin import AbstractExperimentTestMixin - - -@patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_attributes", - new=lambda _, _uuid, _type: [FieldDefinition("test", FieldType.STRING)], -) -@patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) -class TestClientProject(AbstractExperimentTestMixin, unittest.TestCase): - PROJECT_NAME = "organization/project" - - @staticmethod - def call_init(**kwargs): - return init_project(project=TestClientProject.PROJECT_NAME, **kwargs) - - @classmethod - def setUpClass(cls) -> None: - os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN - - @classmethod - def setUp(cls) -> None: - if PROJECT_ENV_NAME in os.environ: - del os.environ[PROJECT_ENV_NAME] - - def test_offline_mode(self): - with self.assertRaises(NeptuneException): - with init_project(project=self.PROJECT_NAME, mode="offline"): - pass - - @pytest.mark.xfail(reason="Project not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_no_project_name(self): - with self.assertRaises(NeptuneMissingProjectNameException): - with init_project(mode="async"): - pass - - @pytest.mark.xfail(reason="Project not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_inexistent_project(self): - with self.assertRaises(NeptuneMissingProjectNameException): - with init_project(mode="async"): - pass - - @pytest.mark.xfail(reason="Project not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_project_name_env_var(self): - os.environ[PROJECT_ENV_NAME] = self.PROJECT_NAME - - with init_project(mode="sync") as project: - project["some/variable"] = 13 - self.assertEqual(13, project["some/variable"].fetch()) - - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_int_attribute", - new=lambda _, _uuid, _type, _path: IntField(value=42, path=path_to_str(_path)), - ) - @patch("neptune.internal.operation_processors.read_only_operation_processor.warn_once") - def test_read_only_mode(self, warn_once): - warned_once.clear() - with init_project(project=self.PROJECT_NAME, mode="read-only") as project: - project["some/variable"] = 13 - project["some/other_variable"] = 11 - - warn_once.assert_called_with( - "Client in read-only mode, nothing will be saved to server.", exception=NeptuneWarning - ) - - self.assertEqual(42, project["some/variable"].fetch()) - self.assertNotIn(str(project._id), os.listdir(".neptune")) - - @pytest.mark.xfail(reason="Project not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_async_mode(self): - super().test_async_mode() - - @pytest.mark.xfail(reason="Project not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_sync_mode(self): - super().test_sync_mode() - - @pytest.mark.xfail(reason="Project not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_async_mode_stop_on_dead(self): - super().test_async_mode_stop_on_dead() - - @pytest.mark.xfail(reason="Project not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_async_mode_wait_on_dead(self): - super().test_async_mode_wait_on_dead() - - @pytest.mark.xfail(reason="Project not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_async_mode_die_during_wait(self): - super().test_async_mode_die_during_wait() - - @pytest.mark.xfail(reason="Project not supported", strict=True, raises=NeptuneUnsupportedFunctionalityException) - def test_clean_data_on_stop(self): - super().test_clean_data_on_stop() - - -def test_prepare_nql_query(): - query = prepare_nql_query( - ["id1", "id2"], - ["active"], - ["owner1", "owner2"], - ["tag1", "tag2"], - trashed=True, - ) - assert len(query.items) == 5 - - query = prepare_nql_query( - ["id1", "id2"], - ["active"], - ["owner1", "owner2"], - ["tag1", "tag2"], - trashed=None, - ) - assert len(query.items) == 4 - - query = prepare_nql_query( - None, - None, - None, - None, - trashed=None, - ) - assert len(query.items) == 0 diff --git a/tests/unit/neptune/new/client/test_run.py b/tests/unit/neptune/new/client/test_run.py deleted file mode 100644 index 1c6bc9f8f..000000000 --- a/tests/unit/neptune/new/client/test_run.py +++ /dev/null @@ -1,306 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import itertools -import os -import unittest - -import pytest -from mock import ( - mock_open, - patch, -) - -from neptune import ( - ANONYMOUS_API_TOKEN, - init_run, -) -from neptune.api.models import ( - FieldDefinition, - FieldType, - IntField, -) -from neptune.attributes.atoms import String -from neptune.envs import ( - API_TOKEN_ENV_NAME, - PROJECT_ENV_NAME, -) -from neptune.exceptions import MissingFieldException -from neptune.internal.backends.neptune_backend_mock import NeptuneBackendMock -from neptune.internal.utils.paths import path_to_str -from neptune.internal.utils.utils import IS_WINDOWS -from neptune.internal.warnings import ( - NeptuneWarning, - warned_once, -) -from neptune.types import GitRef -from tests.unit.neptune.new.client.abstract_experiment_test_mixin import AbstractExperimentTestMixin -from tests.unit.neptune.new.utils.api_experiments_factory import api_run - -AN_API_RUN = api_run() - - -@patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) -class TestClientRun(AbstractExperimentTestMixin, unittest.TestCase): - @staticmethod - def call_init(**kwargs): - return init_run(**kwargs) - - @classmethod - def setUpClass(cls) -> None: - os.environ[PROJECT_ENV_NAME] = "organization/project" - os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN - - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_metadata_container", - new=lambda _, container_id, expected_container_type: AN_API_RUN, - ) - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_attributes", - new=lambda _, _uuid, _type: [FieldDefinition("some/variable", FieldType.INT)], - ) - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_int_attribute", - new=lambda _, _uuid, _type, _path: IntField(value=42, path=path_to_str(_path)), - ) - @patch("neptune.internal.operation_processors.read_only_operation_processor.warn_once") - def test_read_only_mode(self, warn_once): - warned_once.clear() - with init_run(mode="read-only", with_id="whatever") as exp: - exp["some/variable"] = 13 - exp["some/other_variable"] = 11 - - warn_once.assert_called_with( - "Client in read-only mode, nothing will be saved to server.", exception=NeptuneWarning - ) - self.assertEqual(42, exp["some/variable"].fetch()) - self.assertNotIn(str(exp._id), os.listdir(".neptune")) - - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_metadata_container", - new=lambda _, container_id, expected_container_type: AN_API_RUN, - ) - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.get_attributes", - new=lambda _, _uuid, _type: [FieldDefinition("test", FieldType.STRING)], - ) - def test_resume(self): - with init_run(flush_period=0.5, with_id="whatever") as exp: - self.assertEqual(exp._id, AN_API_RUN.id) - self.assertIsInstance(exp.get_structure()["test"], String) - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - @patch("neptune.internal.utils.source_code.get_path_executed_script", lambda: "main.py") - @patch("neptune.objects.run.os.path.isfile", new=lambda file: "." in file) - @patch( - "neptune.internal.utils.glob", - new=lambda path, recursive=False: [path.replace("*", "file.txt")], - ) - @patch( - "neptune.internal.utils.os.path.abspath", - new=lambda path: os.path.normpath(os.path.join("/home/user/main_dir", path)), - ) - @unittest.skipIf(IS_WINDOWS, "Linux/Mac test") - @patch("neptune.core.components.operation_storage.os.listdir", new=lambda path: []) - @patch("neptune.core.components.metadata_file.open", mock_open()) - def test_entrypoint(self): - with init_run(mode="debug") as exp: - self.assertEqual(exp["source_code/entrypoint"].fetch(), "main.py") - - with init_run(mode="debug", source_files=[]) as exp: - self.assertEqual(exp["source_code/entrypoint"].fetch(), "main.py") - - with init_run(mode="debug", source_files=["../*"]) as exp: - self.assertEqual(exp["source_code/entrypoint"].fetch(), "main_dir/main.py") - - with init_run(mode="debug", source_files=["internal/*"]) as exp: - self.assertEqual(exp["source_code/entrypoint"].fetch(), "../main.py") - - with init_run(mode="debug", source_files=["../other_dir/*"]) as exp: - self.assertEqual(exp["source_code/entrypoint"].fetch(), "../main_dir/main.py") - - @pytest.mark.skip(reason="File functionality disabled") - @patch("neptune.vendor.lib_programname.sys.argv", ["main.py"]) - @patch("neptune.internal.utils.source_code.is_ipython", new=lambda: True) - def test_entrypoint_in_interactive_python(self): - with init_run(mode="debug") as exp: - with self.assertRaises(MissingFieldException): - exp["source_code/entrypoint"].fetch() - - with init_run(mode="debug", source_files=[]) as exp: - with self.assertRaises(MissingFieldException): - exp["source_code/entrypoint"].fetch() - - with init_run(mode="debug", source_files=["../*"]) as exp: - with self.assertRaises(MissingFieldException): - exp["source_code/entrypoint"].fetch() - - with init_run(mode="debug", source_files=["internal/*"]) as exp: - with self.assertRaises(MissingFieldException): - exp["source_code/entrypoint"].fetch() - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - @patch("neptune.objects.run.in_interactive", new=lambda: True) - @patch("neptune.objects.run.TracebackJob") - @patch("neptune.objects.run.HardwareMetricReportingJob") - @patch("neptune.objects.run.StderrCaptureBackgroundJob") - @patch("neptune.objects.run.StdoutCaptureBackgroundJob") - def test_monitoring_disabled_in_interactive_python(self, stdout_job, stderr_job, hardware_job, traceback_job): - with init_run(mode="debug", monitoring_namespace="monitoring"): - assert not stdout_job.called - assert not stderr_job.called - assert not hardware_job.called - traceback_job.assert_called_once_with(path="monitoring/traceback", fail_on_exception=True) - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - @patch("neptune.objects.run.in_interactive", new=lambda: False) - @patch("neptune.objects.run.TracebackJob") - @patch("neptune.objects.run.HardwareMetricReportingJob") - @patch("neptune.objects.run.StderrCaptureBackgroundJob") - @patch("neptune.objects.run.StdoutCaptureBackgroundJob") - def test_monitoring_enabled_in_non_interactive_python(self, stdout_job, stderr_job, hardware_job, traceback_job): - with init_run(mode="debug", monitoring_namespace="monitoring"): - stdout_job.assert_called_once_with(attribute_name="monitoring/stdout") - stderr_job.assert_called_once_with(attribute_name="monitoring/stderr") - hardware_job.assert_called_once_with(attribute_namespace="monitoring") - traceback_job.assert_called_once_with(path="monitoring/traceback", fail_on_exception=True) - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - @patch("neptune.objects.run.in_interactive", new=lambda: True) - @patch("neptune.objects.run.TracebackJob") - @patch("neptune.objects.run.HardwareMetricReportingJob") - @patch("neptune.objects.run.StderrCaptureBackgroundJob") - @patch("neptune.objects.run.StdoutCaptureBackgroundJob") - def test_monitoring_in_interactive_explicitly_enabled(self, stdout_job, stderr_job, hardware_job, traceback_job): - with init_run( - mode="debug", - monitoring_namespace="monitoring", - capture_stdout=True, - capture_stderr=True, - capture_hardware_metrics=True, - ): - stdout_job.assert_called_once_with(attribute_name="monitoring/stdout") - stderr_job.assert_called_once_with(attribute_name="monitoring/stderr") - hardware_job.assert_called_once_with(attribute_namespace="monitoring") - traceback_job.assert_called_once_with(path="monitoring/traceback", fail_on_exception=True) - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - @patch("neptune.internal.utils.source_code.get_path_executed_script", lambda: "main.py") - @patch("neptune.internal.utils.source_code.get_common_root", new=lambda _: None) - @patch("neptune.objects.run.os.path.isfile", new=lambda file: "." in file) - @patch( - "neptune.internal.utils.glob", - new=lambda path, recursive=False: [path.replace("*", "file.txt")], - ) - @patch( - "neptune.internal.utils.os.path.abspath", - new=lambda path: os.path.normpath(os.path.join("/home/user/main_dir", path)), - ) - @patch("neptune.core.components.operation_storage.os.listdir", new=lambda path: []) - @patch("neptune.core.components.metadata_file.open", mock_open()) - def test_entrypoint_without_common_root(self): - with init_run(mode="debug", source_files=["../*"]) as exp: - self.assertEqual(exp["source_code/entrypoint"].fetch(), "/home/user/main_dir/main.py") - - with init_run(mode="debug", source_files=["internal/*"]) as exp: - self.assertEqual(exp["source_code/entrypoint"].fetch(), "/home/user/main_dir/main.py") - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - @patch("neptune.objects.run.generate_hash", lambda *vals, length: "some_hash") - @patch("neptune.objects.run.TracebackJob") - @patch("neptune.objects.run.HardwareMetricReportingJob") - @patch("neptune.objects.run.StderrCaptureBackgroundJob") - @patch("neptune.objects.run.StdoutCaptureBackgroundJob") - def test_monitoring_namespace_based_on_hash(self, stdout_job, stderr_job, hardware_job, traceback_job): - with init_run(mode="debug"): - stdout_job.assert_called_once_with(attribute_name="monitoring/some_hash/stdout") - stderr_job.assert_called_once_with(attribute_name="monitoring/some_hash/stderr") - hardware_job.assert_called_once_with(attribute_namespace="monitoring/some_hash") - traceback_job.assert_called_once_with(path="monitoring/some_hash/traceback", fail_on_exception=True) - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - @patch("neptune.objects.run.generate_hash", lambda *vals, length: "some_hash") - @patch("neptune.objects.run.get_hostname", lambda *vals: "localhost") - @patch("neptune.objects.run.os.getpid", lambda *vals: 1234) - @patch("neptune.objects.run.threading.get_ident", lambda: 56789) - def test_that_hostname_and_process_info_were_logged(self): - with init_run(mode="debug") as exp: - assert exp["monitoring/some_hash/hostname"].fetch() == "localhost" - assert exp["monitoring/some_hash/pid"].fetch() == "1234" - assert exp["monitoring/some_hash/tid"].fetch() == "56789" - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - @patch("neptune.internal.utils.dependency_tracking.InferDependenciesStrategy.log_dependencies") - def test_infer_dependency_strategy_called(self, mock_infer_method): - with init_run(mode="debug", dependencies="infer"): - mock_infer_method.assert_called_once() - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - @patch("neptune.internal.utils.dependency_tracking.FileDependenciesStrategy.log_dependencies") - def test_file_dependency_strategy_called(self, mock_file_method): - with init_run(mode="debug", dependencies="some_file_path.txt"): - mock_file_method.assert_called_once() - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - @patch("neptune.objects.run.track_uncommitted_changes") - def test_track_uncommitted_changes_called_given_default_git_ref(self, mock_track_changes): - with init_run(mode="debug"): - mock_track_changes.assert_called_once() - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - @patch("neptune.objects.run.track_uncommitted_changes") - def test_track_uncommitted_changes_called(self, mock_track_changes): - git_ref = GitRef() - with init_run(mode="debug", git_ref=git_ref) as run: - mock_track_changes.assert_called_once_with( - git_ref=git_ref, - run=run, - ) - - mock_track_changes.reset_mock() - - with init_run(mode="debug", git_ref=True): - mock_track_changes.assert_called_once() - - @patch("neptune.internal.utils.git.get_diff") - def test_track_uncommitted_changes_not_called_given_git_ref_disabled(self, mock_get_diff): - with init_run(mode="debug", git_ref=GitRef.DISABLED): - mock_get_diff.assert_not_called() - - with init_run(mode="debug", git_ref=False): - mock_get_diff.assert_not_called() - - def test_monitoring_namespace_not_created_if_no_monitoring_enabled(self): - with init_run( - mode="debug", - capture_traceback=False, - capture_stdout=False, - capture_stderr=False, - capture_hardware_metrics=False, - ) as run: - assert not run.exists("monitoring") - - @pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") - def test_monitoring_namespace_created_if_any_flag_enabled(self): - for perm in set(itertools.permutations([True, False, False, False])): - ct, cso, cse, chm = perm - with init_run( - mode="debug", - capture_traceback=ct, - capture_stdout=cso, - capture_stderr=cse, - capture_hardware_metrics=chm, - ) as run: - assert run.exists("monitoring") diff --git a/tests/unit/neptune/new/client/test_run_tables.py b/tests/unit/neptune/new/client/test_run_tables.py deleted file mode 100644 index d1fe07f5f..000000000 --- a/tests/unit/neptune/new/client/test_run_tables.py +++ /dev/null @@ -1,81 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import unittest -from datetime import datetime -from typing import List - -from mock import patch - -from neptune import init_project -from neptune.api.models import ( - DateTimeField, - LeaderboardEntry, -) -from neptune.internal.backends.neptune_backend_mock import NeptuneBackendMock -from neptune.internal.container_type import ContainerType -from neptune.table import ( - Table, - TableEntry, -) -from tests.unit.neptune.new.client.abstract_tables_test import AbstractTablesTestMixin - - -class TestRunTables(AbstractTablesTestMixin, unittest.TestCase): - expected_container_type = ContainerType.RUN - - def get_table(self, **kwargs) -> Table: - return init_project(project="organization/project", mode="read-only").fetch_runs_table(**kwargs) - - def get_table_entries(self, table) -> List[TableEntry]: - return table.to_rows() - - @patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) - def test_fetch_runs_table_is_case_insensitive(self): - states = ["active", "inactive", "Active", "Inactive", "aCTive", "INacTiVe"] - for state in states: - with self.subTest(state): - try: - self.get_table(state=state) - except ValueError as e: - self.fail(e) - - @patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) - def test_fetch_runs_table_raises_correct_exception_for_incorrect_states(self): - for incorrect_state in ["idle", "running", "some_arbitrary_state"]: - with self.subTest(incorrect_state): - with self.assertRaises(ValueError): - self.get_table(state=incorrect_state) - - @patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) - @patch( - "neptune.internal.backends.neptune_backend_mock.NeptuneBackendMock.search_leaderboard_entries", - new=lambda *args, **kwargs: [ - LeaderboardEntry( - object_id="123", - fields=[ - DateTimeField( - path="sys/creation_time", - value=datetime(2024, 2, 5, 20, 37, 40, 915000), - ) - ], - ) - ], - ) - def test_creation_time_returned_as_datetime(self): - table = self.get_table() - val = table.to_rows()[0].get_attribute_value("sys/creation_time") - assert val == datetime(2024, 2, 5, 20, 37, 40, 915000) diff --git a/tests/unit/neptune/new/core/__init__.py b/tests/unit/neptune/new/core/__init__.py deleted file mode 100644 index 665b8500e..000000000 --- a/tests/unit/neptune/new/core/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/core/components/__init__.py b/tests/unit/neptune/new/core/components/__init__.py deleted file mode 100644 index 665b8500e..000000000 --- a/tests/unit/neptune/new/core/components/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/core/components/queue/__init__.py b/tests/unit/neptune/new/core/components/queue/__init__.py deleted file mode 100644 index 665b8500e..000000000 --- a/tests/unit/neptune/new/core/components/queue/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/core/components/queue/test_disk_queue.py b/tests/unit/neptune/new/core/components/queue/test_disk_queue.py deleted file mode 100644 index f22969187..000000000 --- a/tests/unit/neptune/new/core/components/queue/test_disk_queue.py +++ /dev/null @@ -1,255 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import random -import threading -from dataclasses import dataclass -from glob import glob -from pathlib import Path -from tempfile import TemporaryDirectory -from typing import Optional - -from mock import patch -from pytest import fixture - -from neptune.core.components.queue.disk_queue import ( - DiskQueue, - QueueElement, -) - - -def test_put(): - with TemporaryDirectory() as data_path: - with DiskQueue[Obj]( - data_path=Path(data_path), - to_dict=serializer, - from_dict=deserializer, - lock=threading.RLock(), - ) as queue: - # given - obj = Obj(5, "test") - queue.put(obj) - - # when - queue.flush() - - # then - assert get_queue_element(obj, 1, 1234) == queue.get() - - -def test_multiple_files(): - with TemporaryDirectory() as data_path: - with DiskQueue[Obj]( - data_path=Path(data_path), - to_dict=serializer, - from_dict=deserializer, - lock=threading.RLock(), - max_file_size=300, - ) as queue: - # given - for i in range(1, 101): - obj = Obj(i, str(i)) - queue.put(obj) - - # when - queue.flush() - - # then - for i in range(1, 101): - obj = Obj(i, str(i)) - assert get_queue_element(obj, i, 1234 + i - 1) == queue.get() - - # and - assert queue._read_file_version > 90 - assert queue._write_file_version > 90 - assert len(glob(data_path + "/data-*.log")) > 10 - - -def test_get_batch(): - with TemporaryDirectory() as data_path: - with DiskQueue[Obj]( - data_path=Path(data_path), - to_dict=serializer, - from_dict=deserializer, - lock=threading.RLock(), - max_file_size=100, - ) as queue: - # given - for i in range(1, 91): - obj = Obj(i, str(i)) - queue.put(obj) - - # when - queue.flush() - - # then - assert [get_queue_element(Obj(i, str(i)), i, 1234 + i - 1) for i in range(1, 26)] == queue.get_batch(25) - assert [get_queue_element(Obj(i, str(i)), i, 1234 + i - 1) for i in range(26, 51)] == queue.get_batch(25) - assert [get_queue_element(Obj(i, str(i)), i, 1234 + i - 1) for i in range(51, 76)] == queue.get_batch(25) - assert [get_queue_element(Obj(i, str(i)), i, 1234 + i - 1) for i in range(76, 91)] == queue.get_batch(25) - - -def test_batch_limit(): - with TemporaryDirectory() as data_path: - with DiskQueue[Obj]( - data_path=Path(data_path), - to_dict=serializer, - from_dict=deserializer, - lock=threading.RLock(), - max_file_size=100, - max_batch_size_bytes=get_obj_size_bytes(Obj(1, "1"), 1, 1234) * 3, - ) as queue: - # given - for i in range(5): - obj = Obj(i, str(i)) - queue.put(obj) - - # when - queue.flush() - - # then - assert [get_queue_element(Obj(i, str(i)), i + 1, 1234 + i) for i in range(3)] == queue.get_batch(5) - assert [get_queue_element(Obj(i, str(i)), i + 1, 1234 + i) for i in range(3, 5)] == queue.get_batch(2) - - -def test_resuming_queue(): - with TemporaryDirectory() as data_path: - with DiskQueue[Obj]( - data_path=Path(data_path), - to_dict=serializer, - from_dict=deserializer, - lock=threading.RLock(), - max_file_size=999, - ) as queue: - # given - for i in range(1, 501): - obj = Obj(i, str(i)) - queue.put(obj) - - # when - queue.flush() - - # and - version = queue.get_batch(random.randrange(300, 400))[-1].ver - version_to_ack = version - random.randrange(100, 200) - queue.ack(version_to_ack) - - # then - assert queue._read_file_version > 100 - assert queue._write_file_version > 450 - - # and - data_files = glob(data_path + "/data-*.log") - data_files_versions = [int(file[len(data_path + "/data-") : -len(".log")]) for file in data_files] - - assert len(data_files) > 10 - assert 1 == len([ver for ver in data_files_versions if ver <= version_to_ack]) - - # Resume queue - with DiskQueue[Obj]( - data_path=Path(data_path), - to_dict=serializer, - from_dict=deserializer, - lock=threading.RLock(), - max_file_size=200, - ) as queue: - # then - for i in range(version_to_ack + 1, 501): - assert get_queue_element(Obj(i, str(i)), i, 1234 + i - 1) == queue.get() - - -def test_ack(): - with TemporaryDirectory() as data_path: - with DiskQueue[Obj]( - data_path=Path(data_path), - to_dict=serializer, - from_dict=deserializer, - lock=threading.RLock(), - max_file_size=999, - ) as queue: - # given - for i in range(5): - queue.put(Obj(i, str(i))) - - # when - queue.flush() - - # and - queue.ack(3) - - # then - assert get_queue_element(Obj(3, "3"), 4, 1234 + 3) == queue.get() - assert get_queue_element(Obj(4, "4"), 5, 1234 + 4) == queue.get() - - -def test_cleaning_up(): - with TemporaryDirectory() as data_path: - with DiskQueue[Obj]( - data_path=Path(data_path), - to_dict=serializer, - from_dict=deserializer, - lock=threading.RLock(), - max_file_size=999, - ) as queue: - # given - for i in range(5): - queue.put(Obj(i, str(i))) - - # when - queue.flush() - - # and - queue.ack(5) - - # then - assert 0 == queue.size() - assert queue.is_empty() - - assert list(Path(data_path).glob("*")) == [] - - -@dataclass -class Obj: - num: int - txt: str - - -def get_obj_size_bytes(obj, version, at: Optional[int] = None) -> int: - return len(json.dumps({"obj": obj.__dict__, "version": version, "at": at})) - - -def get_queue_element(obj, version, at: Optional[int] = None) -> QueueElement[Obj]: - obj_size = len(json.dumps({"obj": obj.__dict__, "version": version, "at": at})) - return QueueElement(obj, version, obj_size, at) - - -def serializer(obj: "Obj") -> dict: - return obj.__dict__ - - -def deserializer(obj: dict) -> "Obj": - return Obj(**obj) - - -def version_getter(obj: "Obj") -> int: - return obj.num - - -@fixture(autouse=True, scope="function") -def mock_time(): - with patch("neptune.core.components.queue.disk_queue.time") as time_mock: - time_mock.side_effect = list(range(1234, 1234 + 1000)) - yield time_mock diff --git a/tests/unit/neptune/new/core/components/queue/test_json_file_splitter.py b/tests/unit/neptune/new/core/components/queue/test_json_file_splitter.py deleted file mode 100644 index 7a812119a..000000000 --- a/tests/unit/neptune/new/core/components/queue/test_json_file_splitter.py +++ /dev/null @@ -1,176 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from neptune.core.components.queue.json_file_splitter import JsonFileSplitter -from tests.unit.neptune.new.utils.file_helpers import create_file - - -def test_simple_file(): - content = """ - { - "a": 5, - "b": "text" - } - { - "a": 13 - } - {} - """.lstrip() - - with create_file(content) as filename: - with JsonFileSplitter(filename) as splitter: - assert splitter.get() == {"a": 5, "b": "text"} - assert splitter.get() == {"a": 13} - assert splitter.get() == {} - assert splitter.get() is None - - -def test_append(): - content1 = """ - { - "a": 5, - "b": "text" - } - { - "a": 13 - }""" - - content2 = """ - { - "q": 555, - "r": "something" - } - { - "a": { - "b": [1, 2, 3] - } - } - {}""" - - with create_file(content1) as filename, open(filename, "a") as fp: - with JsonFileSplitter(filename) as splitter: - assert splitter.get() == {"a": 5, "b": "text"} - assert splitter.get() == {"a": 13} - assert splitter.get() is None - - fp.write(content2) - fp.flush() - - assert splitter.get() == {"q": 555, "r": "something"} - assert splitter.get() == {"a": {"b": [1, 2, 3]}} - assert splitter.get() == {} - assert splitter.get() is None - - -def test_append_cut_json(): - content1 = """ - { - "a": 5, - "b": "text" - } - { - "a": 1""" - - content2 = """55, - "r": "something" - } - { - "a": { - "b": [1, 2, 3] - } - }""" - - with create_file(content1) as filename, open(filename, "a") as fp: - with JsonFileSplitter(filename) as splitter: - assert splitter.get() == {"a": 5, "b": "text"} - assert splitter.get() is None - - fp.write(content2) - fp.flush() - - assert splitter.get() == {"a": 155, "r": "something"} - assert splitter.get() == {"a": {"b": [1, 2, 3]}} - assert splitter.get() is None - - -def test_big_json(): - content = """ - { - "a": 5, - "b": "text" - } - { - "a": "%s", - "b": "%s" - } - {} - """.lstrip() % ( - "x" * JsonFileSplitter.BUFFER_SIZE * 2, - "y" * JsonFileSplitter.BUFFER_SIZE * 2, - ) - - with create_file(content) as filename: - with JsonFileSplitter(filename) as splitter: - assert splitter.get() == {"a": 5, "b": "text"} - assert splitter.get() == { - "a": "x" * JsonFileSplitter.BUFFER_SIZE * 2, - "b": "y" * JsonFileSplitter.BUFFER_SIZE * 2, - } - assert splitter.get() == {} - assert splitter.get() is None - - -def test_data_size(): - object1 = """{ - "a": 5, - "b": "text" - }""" - object2 = """{ - "a": 155, - "r": "something" - }""" - object3 = """{ - "a": { - "b": [1, 2, 3] - } - }""" - content1 = """ - { - "a": 5, - "b": "text" - } - { - "a": 1""" - - content2 = """55, - "r": "something" - } - { - "a": { - "b": [1, 2, 3] - } - }""" - - with create_file(content1) as filename, open(filename, "a") as fp: - with JsonFileSplitter(filename) as splitter: - assert splitter.get_with_size() == ({"a": 5, "b": "text"}, len(object1)) - assert splitter.get_with_size()[0] is None - - fp.write(content2) - fp.flush() - - assert splitter.get_with_size() == ({"a": 155, "r": "something"}, len(object2)) - assert splitter.get_with_size() == ({"a": {"b": [1, 2, 3]}}, len(object3)) - assert splitter.get_with_size()[0] is None diff --git a/tests/unit/neptune/new/core/components/test_metadata_file.py b/tests/unit/neptune/new/core/components/test_metadata_file.py deleted file mode 100644 index d95bfa667..000000000 --- a/tests/unit/neptune/new/core/components/test_metadata_file.py +++ /dev/null @@ -1,119 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from pathlib import Path - -from mock import ( - MagicMock, - mock_open, - patch, -) - -from neptune.core.components.metadata_file import MetadataFile - -sample_content = """ -{ - "version": 5, - "dependencies": [ - "a==1.0", - "b==2.0" - ] -}""".lstrip() - - -@patch("builtins.open", new_callable=mock_open) -def test_saving(mock_file): - # given - resolved_path = MagicMock( - spec=Path, - exists=lambda: False, - ) - file_path = MagicMock(spec=Path, resolve=lambda strict: resolved_path) - data_path = MagicMock(spec=Path, __truediv__=lambda self, key: file_path) - - # when - with MetadataFile(data_path=data_path) as metadata: - # and - metadata["version"] = 5 - metadata["dependencies"] = ["a==1.0", "b==2.0"] - - # and - metadata.flush() - - # then - mock_file.assert_called_with(resolved_path, "w") - - # and - concatenate all written content - write_calls = mock_file().write.call_args_list - written_content = "".join(call[0][0] for call in write_calls) - assert written_content == sample_content - - -@patch("builtins.open", new_callable=mock_open, read_data=sample_content) -def test_loading_existing_state(mock_file): - # given - resolved_path = MagicMock(spec=Path, exists=lambda: True) - file_path = MagicMock(spec=Path, resolve=lambda strict: resolved_path) - data_path = MagicMock(spec=Path, __truediv__=lambda self, key: file_path) - - # when - with MetadataFile(data_path=data_path) as metadata: - # then - mock_file.assert_called_with(resolved_path, "r") - - # and - assert metadata["version"] == 5 - assert metadata["dependencies"] == ["a==1.0", "b==2.0"] - - -@patch("os.remove") -@patch("builtins.open", MagicMock()) -def test_cleaning(remove): - # given - resolved_path = MagicMock( - spec=Path, - exists=lambda: False, - ) - file_path = MagicMock(spec=Path, resolve=lambda strict: resolved_path) - data_path = MagicMock(spec=Path, __truediv__=lambda self, key: file_path) - - # when - with MetadataFile(data_path=data_path) as metadata: - # when - metadata.cleanup() - - # then - remove.assert_called_with(resolved_path) - - -@patch("builtins.open", new_callable=mock_open) -def test_initial_metadata(mock_file): - # given - resolved_path = MagicMock( - spec=Path, - exists=lambda: False, - ) - file_path = MagicMock(spec=Path, resolve=lambda strict: resolved_path) - data_path = MagicMock(spec=Path, __truediv__=lambda self, key: file_path) - - # when - with MetadataFile(data_path=data_path, metadata={"version": 5, "dependencies": ["a==1.0", "b==2.0"]}): - # then - mock_file.assert_called_with(resolved_path, "w") - - # and - concatenate all written content - write_calls = mock_file().write.call_args_list - written_content = "".join(call[0][0] for call in write_calls) - assert written_content == sample_content diff --git a/tests/unit/neptune/new/internal/__init__.py b/tests/unit/neptune/new/internal/__init__.py deleted file mode 100644 index 63b30720b..000000000 --- a/tests/unit/neptune/new/internal/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/internal/artifacts/__init__.py b/tests/unit/neptune/new/internal/artifacts/__init__.py deleted file mode 100644 index d71b3273e..000000000 --- a/tests/unit/neptune/new/internal/artifacts/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/internal/artifacts/drivers/__init__.py b/tests/unit/neptune/new/internal/artifacts/drivers/__init__.py deleted file mode 100644 index d71b3273e..000000000 --- a/tests/unit/neptune/new/internal/artifacts/drivers/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/internal/artifacts/drivers/test_local.py b/tests/unit/neptune/new/internal/artifacts/drivers/test_local.py deleted file mode 100644 index 5db8e991e..000000000 --- a/tests/unit/neptune/new/internal/artifacts/drivers/test_local.py +++ /dev/null @@ -1,209 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import tempfile -import unittest -from pathlib import Path - -from neptune.exceptions import ( - NeptuneLocalStorageAccessException, - NeptuneUnsupportedArtifactFunctionalityException, -) -from neptune.internal.artifacts.drivers.local import LocalArtifactDriver -from neptune.internal.artifacts.types import ( - ArtifactDriversMap, - ArtifactFileData, - ArtifactFileType, -) -from tests.unit.neptune.new.internal.artifacts.utils import md5 - - -class TestLocalArtifactDrivers(unittest.TestCase): - test_dir = None - - def setUp(self): - self.test_sources_dir = Path(str(tempfile.mktemp())) - self.test_dir = Path(str(tempfile.mktemp())) - test_source_data = Path(__file__).parents[5] / "data" / "local_artifact_drivers_data" - test_data = self.test_dir / "data" - - # copy source data to temp dir (to prevent e.g. inter-fs symlinks) - shutil.copytree(test_source_data, self.test_sources_dir) - - # create files to track - shutil.copytree(self.test_sources_dir / "files_to_track", test_data) - - # symbolic and hard link files - # `link_to` is new in python 3.8 - # (test_source_data / 'file_to_link.txt').link_to(test_data / 'hardlinked_file.txt') - os.link( - src=str(self.test_sources_dir / "file_to_link.txt"), - dst=str(test_data / "hardlinked_file.txt"), - ) - (test_data / "symlinked_file.txt").symlink_to(self.test_sources_dir / "file_to_link.txt") - - # symlink dir - content of this file won't be discovered - (test_data / "symlinked_dir").symlink_to(self.test_sources_dir / "dir_to_link", target_is_directory=True) - - def tearDown(self) -> None: - # clean tmp directories - shutil.rmtree(self.test_dir, ignore_errors=True) - shutil.rmtree(self.test_sources_dir, ignore_errors=True) - - def test_match_by_path(self): - self.assertEqual(ArtifactDriversMap.match_path("file:///path/to/"), LocalArtifactDriver) - self.assertEqual(ArtifactDriversMap.match_path("/path/to/"), LocalArtifactDriver) - - def test_match_by_type(self): - self.assertEqual(ArtifactDriversMap.match_type("Local"), LocalArtifactDriver) - - def test_file_download(self): - path = (self.test_dir / "data/file1.txt").as_posix() - artifact_file = ArtifactFileData( - file_path="data/file1.txt", - file_hash="??", - type="??", - metadata={"file_path": f"file://{path}"}, - ) - - with tempfile.TemporaryDirectory() as temporary: - downloaded_file = Path(temporary) / "downloaded_file.ext" - - LocalArtifactDriver.download_file(destination=downloaded_file, file_definition=artifact_file) - - self.assertTrue(Path(downloaded_file).is_symlink()) - self.assertEqual("ad62f265e5b1a2dc51f531e44e748aa0", md5(downloaded_file)) - - def test_non_existing_file_download(self): - path = "/wrong/path" - artifact_file = ArtifactFileData(file_path=path, file_hash="??", type="??", metadata={"file_path": path}) - - with self.assertRaises(NeptuneLocalStorageAccessException), tempfile.TemporaryDirectory() as temporary: - local_destination = Path(temporary) - LocalArtifactDriver.download_file(destination=local_destination, file_definition=artifact_file) - - def test_single_retrieval(self): - files = LocalArtifactDriver.get_tracked_files(str(self.test_dir / "data/file1.txt")) - - self.assertEqual(1, len(files)) - self.assertIsInstance(files[0], ArtifactFileData) - self.assertEqual(ArtifactFileType.LOCAL.value, files[0].type) - self.assertEqual("d2c24d65e1d3870f4cf2dbbd8c994b4977a7c384", files[0].file_hash) - self.assertEqual("file1.txt", files[0].file_path) - self.assertEqual(21, files[0].size) - self.assertEqual({"file_path", "last_modified"}, files[0].metadata.keys()) - self.assertEqual( - f"file://{(self.test_dir.resolve() / 'data/file1.txt').as_posix()}", - files[0].metadata["file_path"], - ) - self.assertIsInstance(files[0].metadata["last_modified"], str) - - def test_multiple_retrieval(self): - files = LocalArtifactDriver.get_tracked_files(str(self.test_dir / "data")) - files = sorted(files, key=lambda file: file.file_path) - - self.assertEqual(4, len(files)) - - self.assertEqual("file1.txt", files[0].file_path) - self.assertEqual("d2c24d65e1d3870f4cf2dbbd8c994b4977a7c384", files[0].file_hash) - self.assertEqual(21, files[0].size) - self.assertEqual( - f"file://{(self.test_dir.resolve() / 'data/file1.txt').as_posix()}", - files[0].metadata["file_path"], - ) - - self.assertEqual("hardlinked_file.txt", files[1].file_path) - self.assertEqual("da3e6ddfa171e1ab5564609caa1dbbea9871886e", files[1].file_hash) - self.assertEqual(44, files[1].size) - self.assertEqual( - f"file://{(self.test_dir.resolve() / 'data/hardlinked_file.txt').as_posix()}", - files[1].metadata["file_path"], - ) - - self.assertEqual("sub_dir/file_in_subdir.txt", files[2].file_path) - self.assertEqual("98181b1a4c880a462fcfa96b92c84b8e945ac335", files[2].file_hash) - self.assertEqual(24, files[2].size) - self.assertEqual( - f"file://{(self.test_dir.resolve() / 'data/sub_dir/file_in_subdir.txt').as_posix()}", - files[2].metadata["file_path"], - ) - - self.assertEqual("symlinked_file.txt", files[3].file_path) - self.assertEqual("da3e6ddfa171e1ab5564609caa1dbbea9871886e", files[3].file_hash) - self.assertEqual(44, files[3].size) - self.assertEqual( - f"file://{(self.test_sources_dir.resolve() / 'file_to_link.txt').as_posix()}", - files[3].metadata["file_path"], - ) - - def test_multiple_retrieval_prefix(self): - files = LocalArtifactDriver.get_tracked_files((self.test_dir / "data").as_posix(), "my/custom_path") - files = sorted(files, key=lambda file: file.file_path) - - self.assertEqual(4, len(files)) - - self.assertEqual("my/custom_path/file1.txt", files[0].file_path) - self.assertEqual("d2c24d65e1d3870f4cf2dbbd8c994b4977a7c384", files[0].file_hash) - self.assertEqual(21, files[0].size) - self.assertEqual( - f"file://{(self.test_dir.resolve() / 'data/file1.txt').as_posix()}", - files[0].metadata["file_path"], - ) - - self.assertEqual("my/custom_path/hardlinked_file.txt", files[1].file_path) - self.assertEqual("da3e6ddfa171e1ab5564609caa1dbbea9871886e", files[1].file_hash) - self.assertEqual(44, files[1].size) - self.assertEqual( - f"file://{(self.test_dir.resolve() / 'data/hardlinked_file.txt').as_posix()}", - files[1].metadata["file_path"], - ) - - self.assertEqual("my/custom_path/sub_dir/file_in_subdir.txt", files[2].file_path) - self.assertEqual("98181b1a4c880a462fcfa96b92c84b8e945ac335", files[2].file_hash) - self.assertEqual(24, files[2].size) - self.assertEqual( - f"file://{(self.test_dir.resolve() / 'data/sub_dir/file_in_subdir.txt').as_posix()}", - files[2].metadata["file_path"], - ) - - self.assertEqual("my/custom_path/symlinked_file.txt", files[3].file_path) - self.assertEqual("da3e6ddfa171e1ab5564609caa1dbbea9871886e", files[3].file_hash) - self.assertEqual(44, files[3].size) - self.assertEqual( - f"file://{(self.test_sources_dir.resolve() / 'file_to_link.txt').as_posix()}", - files[3].metadata["file_path"], - ) - - def test_expand_user(self): - os.environ["HOME"] = str(self.test_dir.resolve()) - - with open(pathlib.Path("~/tmp_test_expand_user").expanduser(), "w") as f: - f.write("File to test ~ resolution") - - files = LocalArtifactDriver.get_tracked_files("~/tmp_test_expand_user") - - self.assertEqual(1, len(files)) - file = files[0] - - self.assertEqual("tmp_test_expand_user", file.file_path) - self.assertEqual("eb596bf2f5fd0461d3d0b432f805b3984786c721", file.file_hash) - self.assertEqual(25, file.size) - - def test_wildcards_not_supported(self): - with self.assertRaises(NeptuneUnsupportedArtifactFunctionalityException): - LocalArtifactDriver.get_tracked_files(str(self.test_dir / "data/*.txt")) diff --git a/tests/unit/neptune/new/internal/artifacts/drivers/test_s3.py b/tests/unit/neptune/new/internal/artifacts/drivers/test_s3.py deleted file mode 100644 index c1b1a12ed..000000000 --- a/tests/unit/neptune/new/internal/artifacts/drivers/test_s3.py +++ /dev/null @@ -1,134 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import datetime -import tempfile -import unittest -from pathlib import Path - -import freezegun -import pytest - -from neptune.exceptions import NeptuneUnsupportedArtifactFunctionalityException -from neptune.internal.artifacts.drivers.s3 import S3ArtifactDriver -from neptune.internal.artifacts.types import ( - ArtifactDriversMap, - ArtifactFileData, - ArtifactFileType, -) -from tests.unit.neptune.new.internal.artifacts.utils import md5 - -mock_s3 = pytest.importorskip("moto.mock_s3") - - -@pytest.mark.skip(reason="boto3 is not installed") -@mock_s3 -class TestS3ArtifactDrivers(unittest.TestCase): - def setUp(self): - self.bucket_name = "kuiper_belt" - self.s3 = boto3.client("s3") # noqa: F821 - self.s3.create_bucket(Bucket=self.bucket_name) - self.update_time = datetime.datetime(2021, 5, 23, 3, 55, 26) - with freezegun.freeze_time(self.update_time): - self.s3.put_object(Bucket=self.bucket_name, Key="path/to/file1", Body=b"\xde\xad\xbe\xef") - self.s3.put_object(Bucket=self.bucket_name, Key="path/to/file2", Body=b"\x20") - self.s3.put_object(Bucket=self.bucket_name, Key="path/to/", Body=b"") - self.s3.put_object(Bucket=self.bucket_name, Key="path/file3", Body=b"\x21") - - def test_match_by_path(self): - self.assertEqual( - ArtifactDriversMap.match_path(f"s3://{self.bucket_name}/path/to/"), - S3ArtifactDriver, - ) - - def test_match_by_type(self): - self.assertEqual(ArtifactDriversMap.match_type("S3"), S3ArtifactDriver) - - def test_file_download(self): - artifact_file = ArtifactFileData( - file_path="to/file1", - file_hash="2f249230a8e7c2bf6005ccd2679259ec", - type=ArtifactFileType.S3.value, - metadata={"location": f"s3://{self.bucket_name}/path/to/file1"}, - ) - - with tempfile.TemporaryDirectory() as temporary: - local_destination = Path(temporary) / "target.txt" - - S3ArtifactDriver.download_file(destination=local_destination, file_definition=artifact_file) - - self.assertEqual("2f249230a8e7c2bf6005ccd2679259ec", md5(local_destination)) - - def test_single_retrieval(self): - files = S3ArtifactDriver.get_tracked_files(f"s3://{self.bucket_name}/path/to/file1") - - self.assertEqual(1, len(files)) - self.assertIsInstance(files[0], ArtifactFileData) - self.assertEqual(ArtifactFileType.S3.value, files[0].type) - self.assertEqual("2f249230a8e7c2bf6005ccd2679259ec", files[0].file_hash) - self.assertEqual("file1", files[0].file_path) - self.assertEqual(4, files[0].size) - self.assertEqual({"location", "last_modified"}, files[0].metadata.keys()) - self.assertEqual(f"s3://{self.bucket_name}/path/to/file1", files[0].metadata["location"]) - self.assertEqual( - self.update_time.strftime(S3ArtifactDriver.DATETIME_FORMAT), - files[0].metadata["last_modified"], - ) - - def test_multiple_retrieval(self): - files = S3ArtifactDriver.get_tracked_files(f"s3://{self.bucket_name}/path/to/") - files = sorted(files, key=lambda file: file.file_path) - - self.assertEqual(3, len(files)) - - self.assertEqual("d41d8cd98f00b204e9800998ecf8427e", files[0].file_hash) - self.assertEqual("to", files[0].file_path) - self.assertEqual(0, files[0].size) - self.assertEqual(f"s3://{self.bucket_name}/path/to/", files[0].metadata["location"]) - - self.assertEqual("2f249230a8e7c2bf6005ccd2679259ec", files[1].file_hash) - self.assertEqual("to/file1", files[1].file_path) - self.assertEqual(f"s3://{self.bucket_name}/path/to/file1", files[1].metadata["location"]) - - self.assertEqual("7215ee9c7d9dc229d2921a40e899ec5f", files[2].file_hash) - self.assertEqual("to/file2", files[2].file_path) - self.assertEqual(f"s3://{self.bucket_name}/path/to/file2", files[2].metadata["location"]) - - def test_multiple_retrieval_prefix(self): - files = S3ArtifactDriver.get_tracked_files(f"s3://{self.bucket_name}/path/", "my/custom_path") - files = sorted(files, key=lambda file: file.file_path) - - self.assertEqual(len(files), 4) - - self.assertEqual("9033e0e305f247c0c3c80d0c7848c8b3", files[0].file_hash) - self.assertEqual("my/custom_path/file3", files[0].file_path) - self.assertEqual(f"s3://{self.bucket_name}/path/file3", files[0].metadata["location"]) - - self.assertEqual("d41d8cd98f00b204e9800998ecf8427e", files[1].file_hash) - self.assertEqual("my/custom_path/to", files[1].file_path) - self.assertEqual(0, files[1].size) - self.assertEqual(f"s3://{self.bucket_name}/path/to/", files[1].metadata["location"]) - - self.assertEqual("2f249230a8e7c2bf6005ccd2679259ec", files[2].file_hash) - self.assertEqual("my/custom_path/to/file1", files[2].file_path) - self.assertEqual(f"s3://{self.bucket_name}/path/to/file1", files[2].metadata["location"]) - - self.assertEqual("7215ee9c7d9dc229d2921a40e899ec5f", files[3].file_hash) - self.assertEqual("my/custom_path/to/file2", files[3].file_path) - self.assertEqual(f"s3://{self.bucket_name}/path/to/file2", files[3].metadata["location"]) - - def test_wildcards_not_supported(self): - with self.assertRaises(NeptuneUnsupportedArtifactFunctionalityException): - S3ArtifactDriver.get_tracked_files(f"s3://{self.bucket_name}/*/to/") diff --git a/tests/unit/neptune/new/internal/artifacts/test_file_hasher.py b/tests/unit/neptune/new/internal/artifacts/test_file_hasher.py deleted file mode 100644 index fac11a6e2..000000000 --- a/tests/unit/neptune/new/internal/artifacts/test_file_hasher.py +++ /dev/null @@ -1,134 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import hashlib -import time -from pathlib import Path -from tempfile import TemporaryDirectory - -from mock import ( - Mock, - patch, -) - -from neptune.internal.artifacts.file_hasher import FileHasher -from neptune.internal.artifacts.types import ArtifactFileData - - -class TestFileHasher: - def test_artifact_hash(self): - # do not change this test case without coordinating with Artifact API's ArtifactHashComputer - artifacts = [ - ArtifactFileData( - file_path="to/file1", - file_hash="c38444d2ccff1a7aab3d323fb6234e1b4f0a81ac", - type="S3", - size=5234, - metadata={ - "location": "s3://bucket/path/to/file1", - "last_modification": "2021-08-09 10:22:53", - }, - ), - ArtifactFileData( - file_path="from/file2", - file_hash="4347d0f8ba661234a8eadc005e2e1d1b646c9682", - type="S3", - metadata={ - "location": "s3://bucket/path/to/file2", - "last_modification": "2021-08-09 10:32:12", - }, - ), - ] - - expected_hash = "56e64245b1d4915ff27b306c8077cd4f9ce1b31233c690a93ebc38a1b737a9ea" - assert expected_hash == FileHasher.get_artifact_hash(artifacts) - assert expected_hash == FileHasher.get_artifact_hash(reversed(artifacts)) - - def test_artifact_hash_without_metadata(self): - # do not change this test case without coordinating with Artifact API's ArtifactHashComputer - artifacts = [ - ArtifactFileData( - file_path="to/file1", - file_hash="c38444d2ccff1a7aab3d323fb6234e1b4f0a81ac", - type="S3", - size=5234, - metadata={ - "location": "s3://bucket/path/to/file1", - "last_modification": "2021-08-09 10:22:53", - }, - ), - ArtifactFileData( - file_path="from/file2", - file_hash="4347d0f8ba661234a8eadc005e2e1d1b646c9682", - type="S3", - metadata={ - "location": "s3://bucket/path/to/file2", - "last_modification": "2021-08-09 10:32:12", - }, - ), - ] - - expected_hash = "e6d96bccc12db43acc6e24e2e79052ecaee52307470e44f93d74ecfebc119128" - assert expected_hash == FileHasher.get_artifact_hash_without_metadata(artifacts) - assert expected_hash == FileHasher.get_artifact_hash_without_metadata(reversed(artifacts)) - - @patch("pathlib.Path.home") - def test_local_file_hash(self, home): - with TemporaryDirectory() as tmp_dir: - with open(f"{tmp_dir}/test", "wb") as handler: - handler.write(b"\xde\xad\xbe\xef") - - home.return_value = Path(tmp_dir) - - assert "d78f8bb992a56a597f6c7a1fb918bb78271367eb" == FileHasher.get_local_file_hash(f"{tmp_dir}/test") - - @patch("pathlib.Path.home") - def test_local_file_hashed_only_once(self, home): - with TemporaryDirectory() as tmp_dir: - with open(f"{tmp_dir}/test", "wb") as handler: - handler.write(b"\xde\xad\xbe\xef") - - home.return_value = Path(tmp_dir) - hashlib.sha1 = Mock(side_effect=hashlib.sha1) - - hash1 = FileHasher.get_local_file_hash(f"{tmp_dir}/test") - hash2 = FileHasher.get_local_file_hash(f"{tmp_dir}/test") - - assert "d78f8bb992a56a597f6c7a1fb918bb78271367eb" == hash1 - assert "d78f8bb992a56a597f6c7a1fb918bb78271367eb" == hash2 - assert 1 == hashlib.sha1.call_count - - @patch("pathlib.Path.home") - def test_local_file_hashed_update(self, home): - with TemporaryDirectory() as tmp_dir: - with open(f"{tmp_dir}/test", "wb") as handler: - handler.write(b"\xde\xad\xbe\xef") - - home.return_value = Path(tmp_dir) - hashlib.sha1 = Mock(side_effect=hashlib.sha1) - - hash1 = FileHasher.get_local_file_hash(f"{tmp_dir}/test") - - # Minimal change in modification time - time.sleep(0.1) - - with open(f"{tmp_dir}/test", "wb") as handler: - handler.write(b"\x01\x02\x03\x04") - - hash2 = FileHasher.get_local_file_hash(f"{tmp_dir}/test") - - assert "d78f8bb992a56a597f6c7a1fb918bb78271367eb" == hash1 - assert "12dada1fff4d4787ade3333147202c3b443e376f" == hash2 - assert 2 == hashlib.sha1.call_count diff --git a/tests/unit/neptune/new/internal/artifacts/test_local_file_hash_storage.py b/tests/unit/neptune/new/internal/artifacts/test_local_file_hash_storage.py deleted file mode 100644 index 07c974aad..000000000 --- a/tests/unit/neptune/new/internal/artifacts/test_local_file_hash_storage.py +++ /dev/null @@ -1,73 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import datetime -import tempfile -import unittest -from pathlib import Path - -from mock import patch - -from neptune.internal.artifacts.local_file_hash_storage import LocalFileHashStorage - - -class TestLocalFileHashStorage(unittest.TestCase): - @patch("pathlib.Path.home") - def setUp(self, home) -> None: - self.tempDir = tempfile.TemporaryDirectory() - home.return_value = Path(self.tempDir.name) - - self.sut = LocalFileHashStorage() - self.sut.insert( - Path(f"{self.tempDir.name}/test.file"), - "c38444d2ccff1a7aab3d323fb6234e1b4f0a81ac", - datetime.datetime.now().strftime("%Y%m%d_%H%M%S%f"), - ) - - def test_fetch_presented(self): - returned = self.sut.fetch_one(Path(f"{self.tempDir.name}/test.file")) - - self.assertEqual(returned.file_hash, "c38444d2ccff1a7aab3d323fb6234e1b4f0a81ac") - - def test_fetch_not_presented(self): - returned = self.sut.fetch_one(Path(f"{self.tempDir.name}/test1.file")) - - self.assertIsNone(returned) - - def test_update_for_not_presented_suppressed(self): - self.sut.update( - Path(f"{self.tempDir.name}/test1.file"), - "c38444d2ccff1a7aab3d323fb6234e1b4f0a81ac", - datetime.datetime.now().strftime("%Y%m%d_%H%M%S%f"), - ) - - def test_update_for_presented(self): - new_datetime = datetime.datetime.now().strftime("%Y%m%d_%H%M%S%f") - - self.sut.update(Path(f"{self.tempDir.name}/test.file"), "new_test_hash", new_datetime) - returned = self.sut.fetch_one(Path(f"{self.tempDir.name}/test.file")) - - self.assertEqual(returned.file_hash, "new_test_hash") - self.assertEqual(returned.modification_date, new_datetime) - - def test_insert(self): - modification_date = datetime.datetime.now().strftime("%Y%m%d_%H%M%S%f") - - self.sut.insert(Path(f"{self.tempDir.name}/test23.file"), "test_hash", modification_date) - returned = self.sut.fetch_one(Path(f"{self.tempDir.name}/test23.file")) - - self.assertEqual(returned.file_hash, "test_hash") - self.assertEqual(returned.modification_date, modification_date) diff --git a/tests/unit/neptune/new/internal/artifacts/test_serializer.py b/tests/unit/neptune/new/internal/artifacts/test_serializer.py deleted file mode 100644 index f634e609a..000000000 --- a/tests/unit/neptune/new/internal/artifacts/test_serializer.py +++ /dev/null @@ -1,42 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import unittest - -from neptune.internal.artifacts.types import ArtifactMetadataSerializer - - -class TestArtifactMetadataSerializer(unittest.TestCase): - def test_simple(self): - metadata = { - "location": "s3://bucket/path/to/file", - "last_modification": "2021-08-09 09:41:53", - "file_size": "18", - } - - serialized = ArtifactMetadataSerializer.serialize(metadata) - - self.assertListEqual( - [ - {"key": "file_size", "value": "18"}, - {"key": "last_modification", "value": "2021-08-09 09:41:53"}, - {"key": "location", "value": "s3://bucket/path/to/file"}, - ], - serialized, - ) - - deserialized = ArtifactMetadataSerializer.deserialize(serialized) - - self.assertDictEqual(metadata, deserialized) diff --git a/tests/unit/neptune/new/internal/artifacts/test_types.py b/tests/unit/neptune/new/internal/artifacts/test_types.py deleted file mode 100644 index cba4f9998..000000000 --- a/tests/unit/neptune/new/internal/artifacts/test_types.py +++ /dev/null @@ -1,99 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import pathlib -import unittest -from urllib.parse import urlparse - -from neptune.exceptions import ( - NeptuneUnhandledArtifactSchemeException, - NeptuneUnhandledArtifactTypeException, -) -from neptune.internal.artifacts.types import ( - ArtifactDriver, - ArtifactDriversMap, - ArtifactFileData, -) - - -class TestArtifactDriversMap(unittest.TestCase): - def setUp(self): - self._impl_backup = ArtifactDriversMap._implementations - ArtifactDriversMap._implementations = [] - - class TestArtifactDriver(ArtifactDriver): - @staticmethod - def get_type(): - return "test" - - @classmethod - def matches(cls, path: str) -> bool: - return urlparse(path).scheme == "test" - - @classmethod - def get_tracked_files(cls, path, destination=None): - return [] - - @classmethod - def download_file(cls, destination: pathlib.Path, file_definition: ArtifactFileData): - pass - - self.test_driver_instance = TestArtifactDriver - ArtifactDriversMap._implementations = [self.test_driver_instance] - - def tearDown(self): - ArtifactDriversMap._implementations = self._impl_backup - - def test_driver_autoregister(self): - class PkArtifactDriver(ArtifactDriver): - @staticmethod - def get_type() -> str: - return "PK" - - @classmethod - def matches(cls, path: str) -> bool: - return urlparse(path).scheme == "pk" - - @classmethod - def get_tracked_files(cls, path, destination=None): - return [] - - @classmethod - def download_file(cls, destination: pathlib.Path, file_definition: ArtifactFileData): - pass - - self.assertIn( - PkArtifactDriver, - ArtifactDriversMap._implementations, - "PkArtifactDriver not registered with subclass logic", - ) - - def test_match_by_path(self): - driver_instance = ArtifactDriversMap.match_path("test://path/to/file") - - self.assertEqual(driver_instance, self.test_driver_instance) - - def test_unmatched_path_raises_exception(self): - with self.assertRaises(NeptuneUnhandledArtifactSchemeException): - ArtifactDriversMap.match_path("test2://path/to/file") - - def test_match_by_type(self): - driver_instance = ArtifactDriversMap.match_type("test") - - self.assertEqual(driver_instance, self.test_driver_instance) - - def test_unmatched_type_raises_exception(self): - with self.assertRaises(NeptuneUnhandledArtifactTypeException): - ArtifactDriversMap.match_type("test2") diff --git a/tests/unit/neptune/new/internal/artifacts/utils.py b/tests/unit/neptune/new/internal/artifacts/utils.py deleted file mode 100644 index d0f6ee200..000000000 --- a/tests/unit/neptune/new/internal/artifacts/utils.py +++ /dev/null @@ -1,29 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import hashlib - -CHUNK_SIZE = 4096 - - -# from https://stackoverflow.com/a/3431838 -def md5(fname): - hash_md5 = hashlib.md5() - - with open(fname, "rb") as f: - for chunk in iter(lambda: f.read(CHUNK_SIZE), b""): - hash_md5.update(chunk) - - return hash_md5.hexdigest() diff --git a/tests/unit/neptune/new/internal/backends/__init__.py b/tests/unit/neptune/new/internal/backends/__init__.py deleted file mode 100644 index 63b30720b..000000000 --- a/tests/unit/neptune/new/internal/backends/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/internal/backends/test_hosted_artifact_operations.py b/tests/unit/neptune/new/internal/backends/test_hosted_artifact_operations.py deleted file mode 100644 index af85bcdb0..000000000 --- a/tests/unit/neptune/new/internal/backends/test_hosted_artifact_operations.py +++ /dev/null @@ -1,196 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import unittest -import uuid - -from mock import ( - MagicMock, - patch, -) - -from neptune.exceptions import ArtifactUploadingError -from neptune.internal.artifacts.types import ArtifactFileData -from neptune.internal.backends.api_model import ArtifactModel -from neptune.internal.backends.hosted_artifact_operations import ( - track_to_existing_artifact, - track_to_new_artifact, -) - - -class TestHostedArtifactOperations(unittest.TestCase): - def setUp(self) -> None: - self.artifact_hash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" - self.emptyDirectoryFile = ArtifactFileData("to", "c38444d2ccff1a7aab3d323fb6234e1b4f0a81ac", "S3", {}, 0) - self.files = [ - ArtifactFileData( - "fname.txt", - "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "test", - {}, - ), - self.emptyDirectoryFile, - ] - self.project_id = str(uuid.uuid4()) - self.parent_identifier = str(uuid.uuid4()) - - @patch("neptune.internal.backends.hosted_artifact_operations._compute_artifact_hash") - @patch("neptune.internal.backends.hosted_artifact_operations._extract_file_list") - @patch("neptune.internal.backends.hosted_artifact_operations.create_new_artifact") - def test_track_to_new_artifact_calls_creation( - self, create_new_artifact, _extract_file_list, _compute_artifact_hash - ): - # given - swagger_mock = self._get_swagger_mock() - _compute_artifact_hash.return_value = self.artifact_hash - _extract_file_list.return_value = self.files - - # when - track_to_new_artifact( - swagger_client=swagger_mock, - project_id=self.project_id, - path=["sub", "one"], - parent_identifier=self.parent_identifier, - entries=[("/path/to/file", "/path/to")], - default_request_params={}, - exclude_directory_files=False, - exclude_metadata_from_hash=False, - ) - - # then - create_new_artifact.assert_called_once_with( - swagger_client=swagger_mock, - project_id=self.project_id, - artifact_hash=self.artifact_hash, - parent_identifier=self.parent_identifier, - size=None, - default_request_params={}, - ) - - @patch("neptune.internal.backends.hosted_artifact_operations._compute_artifact_hash_without_metadata") - @patch("neptune.internal.backends.hosted_artifact_operations._extract_file_list") - @patch("neptune.internal.backends.hosted_artifact_operations.create_new_artifact") - @patch("neptune.internal.backends.hosted_artifact_operations.upload_artifact_files_metadata") - def test_track_to_new_artifact_calls_upload( - self, - upload_artifact_files_metadata, - create_new_artifact, - _extract_file_list, - _compute_artifact_hash_without_metadata, - ): - # given - swagger_mock = self._get_swagger_mock() - _compute_artifact_hash_without_metadata.return_value = self.artifact_hash - _extract_file_list.return_value = self.files - create_new_artifact.return_value = ArtifactModel( - received_metadata=False, hash=self.artifact_hash, size=len(self.files) - ) - - # when - track_to_new_artifact( - swagger_client=swagger_mock, - project_id=self.project_id, - path=["sub", "one"], - parent_identifier=self.parent_identifier, - entries=[("/path/to/file", "/path/to")], - default_request_params={}, - exclude_directory_files=True, - exclude_metadata_from_hash=True, - ) - - # then - upload_artifact_files_metadata.assert_called_once_with( - swagger_client=swagger_mock, - project_id=self.project_id, - artifact_hash=self.artifact_hash, - files=[self.files[0]], - default_request_params={}, - ) - - @patch("neptune.internal.backends.hosted_artifact_operations._extract_file_list") - def test_track_to_new_artifact_raises_exception(self, _extract_file_list): - # given - swagger_mock = self._get_swagger_mock() - _extract_file_list.return_value = [self.emptyDirectoryFile] - - # when - with self.assertRaises(ArtifactUploadingError): - track_to_new_artifact( - swagger_client=swagger_mock, - project_id=self.project_id, - path=["sub", "one"], - parent_identifier=self.parent_identifier, - entries=[("/path/to/file", "/path/to")], - default_request_params={}, - exclude_directory_files=True, - exclude_metadata_from_hash=False, - ) - - @patch("neptune.internal.backends.hosted_artifact_operations._extract_file_list") - @patch("neptune.internal.backends.hosted_artifact_operations.create_artifact_version") - def test_track_to_existing_artifact_calls_version(self, create_artifact_version, _extract_file_list): - # given - swagger_mock = self._get_swagger_mock() - _extract_file_list.return_value = self.files - - # when - track_to_existing_artifact( - swagger_client=swagger_mock, - project_id=self.project_id, - path=["sub", "one"], - artifact_hash=self.artifact_hash, - parent_identifier=self.parent_identifier, - entries=[("/path/to/file", "/path/to")], - default_request_params={}, - exclude_directory_files=False, - ) - - # then - create_artifact_version.assert_called_once_with( - swagger_client=swagger_mock, - project_id=self.project_id, - artifact_hash=self.artifact_hash, - parent_identifier=self.parent_identifier, - files=self.files, - default_request_params={}, - ) - - @patch("neptune.internal.backends.hosted_artifact_operations._extract_file_list") - def test_track_to_existing_artifact_raises_exception(self, _extract_file_list): - # given - swagger_mock = self._get_swagger_mock() - _extract_file_list.return_value = [self.emptyDirectoryFile] - - # when - with self.assertRaises(ArtifactUploadingError): - track_to_existing_artifact( - swagger_client=swagger_mock, - project_id=self.project_id, - path=["sub", "one"], - artifact_hash="abcdef", - parent_identifier=self.parent_identifier, - entries=[("/path/to/file", "/path/to")], - default_request_params={}, - exclude_directory_files=True, - ) - - @staticmethod - def _get_swagger_mock(): - swagger_mock = MagicMock() - swagger_mock.swagger_spec.http_client = MagicMock() - swagger_mock.swagger_spec.api_url = "ui.neptune.ai" - swagger_mock.api.createNewArtifact.operation.path_name = "/createNewArtifact" - swagger_mock.api.uploadArtifactFilesMetadata.operation.path_name = "/uploadArtifactFilesMetadata" - return swagger_mock diff --git a/tests/unit/neptune/new/internal/backends/test_hosted_client.py b/tests/unit/neptune/new/internal/backends/test_hosted_client.py deleted file mode 100644 index ba77f1e72..000000000 --- a/tests/unit/neptune/new/internal/backends/test_hosted_client.py +++ /dev/null @@ -1,568 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import unittest -import uuid -from dataclasses import dataclass - -import pytest -from bravado.exception import ( - HTTPBadRequest, - HTTPConflict, - HTTPForbidden, - HTTPNotFound, - HTTPUnprocessableEntity, -) -from bravado.testing.response_mocks import BravadoResponseMock -from mock import ( - MagicMock, - Mock, - patch, -) - -from neptune.api.models import FieldType -from neptune.internal.backends.hosted_client import ( - DEFAULT_REQUEST_KWARGS, - _get_token_client, - create_artifacts_client, - create_backend_client, - create_http_client_with_auth, - create_leaderboard_client, - get_client_config, -) -from neptune.internal.backends.hosted_neptune_backend import _get_column_type_from_entries -from neptune.internal.backends.utils import verify_host_resolution -from neptune.management import ( - MemberRole, - add_project_member, - create_project, - delete_project, - get_project_list, - get_project_member_list, - get_workspace_member_list, - invite_to_workspace, - remove_project_member, -) -from neptune.management.exceptions import ( - AccessRevokedOnDeletion, - AccessRevokedOnMemberRemoval, - ProjectAlreadyExists, - ProjectNotFound, - ProjectPrivacyRestrictedException, - ProjectsLimitReached, - UnsupportedValue, - UserAlreadyHasAccess, - UserNotExistsOrWithoutAccess, - WorkspaceNotFound, -) -from tests.unit.neptune.backend_test_mixin import BackendTestMixin -from tests.unit.neptune.new.utils import response_mock - -API_TOKEN = ( - "eyJhcGlfYWRkcmVzcyI6Imh0dHBzOi8vYXBwLnN0YWdlLm5lcHR1bmUubWwiLCJ" - "hcGlfa2V5IjoiOTJhNzhiOWQtZTc3Ni00ODlhLWI5YzEtNzRkYmI1ZGVkMzAyIn0=" -) - - -@patch("neptune.internal.backends.hosted_client.RequestsClient", new=MagicMock()) -@patch("neptune.internal.backends.hosted_client.NeptuneAuthenticator", new=MagicMock()) -@patch("bravado.client.SwaggerClient.from_url") -@patch("platform.platform", new=lambda: "testPlatform") -@patch("platform.python_version", new=lambda: "3.9.test") -class TestHostedClient(unittest.TestCase, BackendTestMixin): - def setUp(self) -> None: - # Clear all LRU storage - verify_host_resolution.cache_clear() - _get_token_client.cache_clear() - get_client_config.cache_clear() - create_http_client_with_auth.cache_clear() - create_backend_client.cache_clear() - create_leaderboard_client.cache_clear() - create_artifacts_client.cache_clear() - - def test_project_listing_empty(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - swagger_client.api.listProjects.return_value.response = BravadoResponseMock(result=Mock(entries=[])) - - # when: - returned_projects = get_project_list(api_token=API_TOKEN) - - # then: - self.assertEqual([], returned_projects) - - def test_project_listing(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - project1 = MagicMock(organizationName="org1") - project1.name = "project1" - project2 = MagicMock(organizationName="org2") - project2.name = "project2" - projects = Mock(entries=[project1, project2]) - swagger_client.api.listProjects.return_value.response = BravadoResponseMock( - result=projects, - ) - - # when: - returned_projects = get_project_list(api_token=API_TOKEN) - - # then: - self.assertEqual(["org1/project1", "org2/project2"], returned_projects) - - def test_invite_to_workspace(self, swagger_client_factory): - # given: - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # when: - invite_to_workspace( - username="tester1", - workspace="org2", - api_token=API_TOKEN, - ) - - # then: - swagger_client.api.createOrganizationInvitations.assert_called_once_with( - newOrganizationInvitations={ - "invitationsEntries": [ - {"invitee": "tester1", "invitationType": "user", "roleGrant": "member", "addToAllProjects": False} - ], - "organizationIdentifier": "org2", - }, - **DEFAULT_REQUEST_KWARGS, - ) - - def test_invite_to_workspace_username_email_raises(self, swagger_client_factory): - - # neither specified - self.assertRaises(ValueError, invite_to_workspace, workspace="org2", api_token=API_TOKEN) - - # both specified - self.assertRaises( - ValueError, - invite_to_workspace, - workspace="org2", - api_token=API_TOKEN, - username="user", - email="email@email.com", - ) - - def test_invite_to_workspace_invalid_role_raises(self, swagger_client_factory): - self.assertRaises( - ValueError, - invite_to_workspace, - workspace="org2", - username="user", - api_token=API_TOKEN, - role="non-existent-role", - ) - self.assertRaises( - ValueError, invite_to_workspace, workspace="org2", username="user", api_token=API_TOKEN, role="owner" - ) - - def test_workspace_members(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - members = [ - Mock(role="member", registeredMemberInfo=Mock(username="tester1")), - Mock(role="owner", registeredMemberInfo=Mock(username="tester2")), - ] - swagger_client.api.listOrganizationMembers.return_value.response = BravadoResponseMock( - result=members, - ) - - # when: - returned_members = get_workspace_member_list(workspace="org2", api_token=API_TOKEN) - - # then: - self.assertEqual({"tester1": "member", "tester2": "admin"}, returned_members) - - def test_workspace_members_empty(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - members = [] - swagger_client.api.listOrganizationMembers.return_value.response = BravadoResponseMock( - result=members, - ) - - # when: - returned_members = get_workspace_member_list(workspace="org2", api_token=API_TOKEN) - - # then: - self.assertEqual({}, returned_members) - - def test_workspace_members_not_exists(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # when: - swagger_client.api.listOrganizationMembers.side_effect = HTTPNotFound(response=response_mock()) - - # then: - with self.assertRaises(WorkspaceNotFound): - get_workspace_member_list(workspace="org2", api_token=API_TOKEN) - - def test_project_members(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - members = [ - Mock(role="member", registeredMemberInfo=Mock(username="tester1")), - Mock(role="manager", registeredMemberInfo=Mock(username="tester2")), - Mock(role="viewer", registeredMemberInfo=Mock(username="tester3")), - ] - swagger_client.api.listProjectMembers.return_value.response = BravadoResponseMock( - result=members, - ) - - # when: - returned_members = get_project_member_list(project="org/proj", api_token=API_TOKEN) - - # then: - self.assertEqual( - {"tester1": "contributor", "tester2": "owner", "tester3": "viewer"}, - returned_members, - ) - - def test_project_members_empty(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - members = [] - swagger_client.api.listProjectMembers.return_value.response = BravadoResponseMock( - result=members, - ) - - # when: - returned_members = get_project_member_list(project="org/proj", api_token=API_TOKEN) - - # then: - self.assertEqual({}, returned_members) - - def test_project_members_not_exists(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # when: - swagger_client.api.listProjectMembers.side_effect = HTTPNotFound(response=response_mock()) - - # then: - with self.assertRaises(ProjectNotFound): - get_project_member_list(project="org/proj", api_token=API_TOKEN) - - def test_delete_project_not_exists(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # when: - swagger_client.api.deleteProject.side_effect = HTTPNotFound(response=response_mock()) - - # then: - with self.assertRaises(ProjectNotFound): - delete_project(project="org/proj", api_token=API_TOKEN) - - def test_delete_project_permissions(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # when: - swagger_client.api.deleteProject.side_effect = HTTPForbidden(response=response_mock()) - - # then: - with self.assertRaises(AccessRevokedOnDeletion): - delete_project(project="org/proj", api_token=API_TOKEN) - - def test_create_project_already_exists(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - organization = Mock(id=str(uuid.uuid4())) - organization.name = "org" - organizations = [organization] - - # when: - swagger_client.api.listOrganizations.return_value.response = BravadoResponseMock( - result=organizations, - ) - swagger_client.api.createProject.side_effect = HTTPBadRequest( - response=response_mock(), - swagger_result=MagicMock( - code=None, - errorType={"name": "validationError"}, - message=None, - title="Validation Errors", - type=None, - validationErrors=[ - { - "path": ["name"], - "errors": [{"errorCode": {"name": "ERR_NOT_UNIQUE"}}], - } - ], - ), - ) - - # then: - with self.assertRaises(ProjectAlreadyExists): - create_project(name="org/proj", key="PRJ", api_token=API_TOKEN) - - def test_create_project_unknown_visibility(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - organization = Mock(id=str(uuid.uuid4())) - organization.name = "org" - organizations = [organization] - - # when: - swagger_client.api.listOrganizations.return_value.response = BravadoResponseMock( - result=organizations, - ) - - with self.assertRaises(UnsupportedValue): - create_project( - name="org/proj", - key="PRJ", - visibility="unknown_value", - api_token=API_TOKEN, - ) - - def test_create_project_no_workspace(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - organization = Mock(id=str(uuid.uuid4())) - organization.name = "org" - organizations = [organization] - - # when: - swagger_client.api.listOrganizations.return_value.response = BravadoResponseMock( - result=organizations, - ) - - # then: - with self.assertRaises(WorkspaceNotFound): - create_project(name="not_an_org/proj", key="PRJ", api_token=API_TOKEN) - - def test_create_project_limit_reached(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - organization = Mock(id=str(uuid.uuid4())) - organization.name = "org" - organizations = [organization] - - # when: - swagger_client.api.listOrganizations.return_value.response = BravadoResponseMock( - result=organizations, - ) - response = response_mock() - response.json.return_value = { - "errorCode": 422, - "errorType": "LIMIT_OF_PROJECTS_REACHED", - "message": "Maximum number of projects (1000) reached", - } - swagger_client.api.createProject.side_effect = HTTPUnprocessableEntity( - response=response, - ) - - # then: - with self.assertRaises(ProjectsLimitReached): - create_project(name="org/proj", key="PRJ", api_token=API_TOKEN) - - def test_create_project_private_not_allowed(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - organization = Mock(id=str(uuid.uuid4())) - organization.name = "org" - organizations = [organization] - - # when: - swagger_client.api.listOrganizations.return_value.response = BravadoResponseMock( - result=organizations, - ) - response = response_mock() - response.json.return_value = { - "errorType": "VISIBILITY_RESTRICTED", - "message": "Cannot set visibility priv for project. You are limited to: pub, workspace", - "requestedValue": "priv", - "allowedValues": ["pub", "workspace"], - } - swagger_client.api.createProject.side_effect = HTTPUnprocessableEntity( - response=response, - ) - - # then: - with self.assertRaisesRegex(ProjectPrivacyRestrictedException, '.*"priv" visibility.*'): - create_project(name="org/proj", key="PRJ", visibility="priv", api_token=API_TOKEN) - - def test_create_project_private_not_allowed_no_details(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - organization = Mock(id=str(uuid.uuid4())) - organization.name = "org" - organizations = [organization] - - # when: - swagger_client.api.listOrganizations.return_value.response = BravadoResponseMock( - result=organizations, - ) - response = response_mock() - response.json.return_value = { - "errorType": "VISIBILITY_RESTRICTED", - "message": "Cannot set visibility priv for project. You are limited to: pub, workspace", - } - swagger_client.api.createProject.side_effect = HTTPUnprocessableEntity( - response=response, - ) - - # then: - with self.assertRaisesRegex(ProjectPrivacyRestrictedException, ".*selected visibility.*"): - create_project(name="org/proj", key="PRJ", visibility="priv", api_token=API_TOKEN) - - def test_create_project(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # given: - organization = Mock(id=str(uuid.uuid4())) - organization.name = "org" - organizations = [organization] - - # and: - project = Mock(organizationName="org") - project.name = "proj" - - # when: - swagger_client.api.listOrganizations.return_value.response = BravadoResponseMock( - result=organizations, - ) - swagger_client.api.createProject.return_value.response = BravadoResponseMock( - result=project, - ) - - # then: - self.assertEqual("org/proj", create_project(name="org/proj", key="PRJ", api_token=API_TOKEN)) - - def test_add_project_member_project_not_found(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # when: - swagger_client.api.addProjectMember.side_effect = HTTPNotFound( - response=response_mock(), - ) - - # then: - with self.assertRaises(ProjectNotFound): - add_project_member( - project="org/proj", - username="tester", - role=MemberRole.VIEWER, - api_token=API_TOKEN, - ) - - def test_add_project_member_unknown_role(self, swagger_client_factory): - _ = self._get_swagger_client_mock(swagger_client_factory) - - # then: - with self.assertRaises(UnsupportedValue): - add_project_member( - project="org/proj", - username="tester", - role="unknown_role", - api_token=API_TOKEN, - ) - - def test_add_project_member_member_without_access(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # when: - swagger_client.api.addProjectMember.side_effect = HTTPConflict(response=response_mock()) - - # then: - with self.assertRaises(UserAlreadyHasAccess): - add_project_member( - project="org/proj", - username="tester", - role=MemberRole.VIEWER, - api_token=API_TOKEN, - ) - - def test_remove_project_member_project_not_found(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # when: - swagger_client.api.deleteProjectMember.side_effect = HTTPNotFound( - response=response_mock(), - ) - - # then: - with self.assertRaises(ProjectNotFound): - remove_project_member(project="org/proj", username="tester", api_token=API_TOKEN) - - def test_remove_project_member_no_user(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # when: - swagger_client.api.deleteProjectMember.side_effect = HTTPUnprocessableEntity( - response=response_mock(), - ) - - # then: - with self.assertRaises(UserNotExistsOrWithoutAccess): - remove_project_member(project="org/proj", username="tester", api_token=API_TOKEN) - - def test_remove_project_member_permissions(self, swagger_client_factory): - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - - # when: - swagger_client.api.deleteProjectMember.side_effect = HTTPForbidden( - response=response_mock(), - ) - - # then: - with self.assertRaises(AccessRevokedOnMemberRemoval): - remove_project_member(project="org/proj", username="tester", api_token=API_TOKEN) - - -def test__get_column_type_from_entries(): - @dataclass - class DTO: - type: str - name: str = "test_column" - - # when - test_cases = [ - {"entries": [], "exc": ValueError}, - {"entries": [DTO(type="float")], "result": FieldType.FLOAT.value}, - {"entries": [DTO(type="string")], "result": FieldType.STRING.value}, - {"entries": [DTO(type="float"), DTO(type="floatSeries")], "exc": ValueError}, - {"entries": [DTO(type="float"), DTO(type="int")], "result": FieldType.FLOAT.value}, - {"entries": [DTO(type="float"), DTO(type="int"), DTO(type="datetime")], "result": FieldType.STRING.value}, - {"entries": [DTO(type="float"), DTO(type="int"), DTO(type="string")], "result": FieldType.STRING.value}, - { - "entries": [DTO(type="float"), DTO(type="int"), DTO(type="string", name="test_column_different")], - "result": FieldType.FLOAT.value, - }, - ] - - # then - for tc in test_cases: - exc = tc.get("exc", None) - if exc is not None: - with pytest.raises(exc): - _get_column_type_from_entries(tc["entries"], column="test_column") - else: - result = _get_column_type_from_entries(tc["entries"], column="test_column") - assert result == tc["result"] diff --git a/tests/unit/neptune/new/internal/backends/test_hosted_file_operations.py b/tests/unit/neptune/new/internal/backends/test_hosted_file_operations.py deleted file mode 100644 index c28aba1ae..000000000 --- a/tests/unit/neptune/new/internal/backends/test_hosted_file_operations.py +++ /dev/null @@ -1,510 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import os -import random -import unittest -import uuid -from collections import namedtuple -from tempfile import ( - NamedTemporaryFile, - TemporaryDirectory, -) - -import mock -from mock import ( - MagicMock, - call, - patch, -) - -from neptune.internal.backends.api_model import ClientConfig -from neptune.internal.backends.hosted_file_operations import ( - _get_content_disposition_filename, - download_file_attribute, - download_file_set_attribute, - upload_file_attribute, - upload_file_set_attribute, -) -from neptune.internal.utils.utils import IS_WINDOWS -from tests.unit.neptune.backend_test_mixin import BackendTestMixin -from tests.unit.neptune.new.utils.file_helpers import create_file - - -def set_expected_result(endpoint: MagicMock, value: dict): - endpoint.return_value.response.return_value.result = namedtuple(endpoint.__class__.__name__, value.keys())(**value) - - -class HostedFileOperationsHelper(unittest.TestCase): - @staticmethod - def get_random_bytes(count): - return bytes(random.randint(0, 255) for _ in range(count)) - - @staticmethod - def _get_swagger_mock(): - swagger_mock = MagicMock() - swagger_mock.swagger_spec.http_client = MagicMock() - swagger_mock.swagger_spec.api_url = "ui.neptune.ai" - swagger_mock.api.uploadFileSetAttributeChunk.operation.path_name = "/uploadFileSetChunk" - swagger_mock.api.uploadFileSetAttributeTar.operation.path_name = "/uploadFileSetTar" - swagger_mock.api.uploadPath.operation.path_name = "/uploadPath" - swagger_mock.api.uploadAttribute.operation.path_name = "/attributes/upload" - swagger_mock.api.downloadAttribute.operation.path_name = "/attributes/download" - swagger_mock.api.downloadFileSetAttributeZip.operation.path_name = "/attributes/downloadFileSetZip" - swagger_mock.api.downloadFileSetAttributeZip.operation.path_name = "/attributes/downloadFileSetZip" - swagger_mock.api.download.operation.path_name = "/download" - - swagger_mock.api.fileAtomMultipartUploadStart.operation.path_name = "/attributes/storage/file/upload/start" - swagger_mock.api.fileAtomMultipartUploadFinish.operation.path_name = "/attributes/storage/file/upload/finish" - swagger_mock.api.fileAtomMultipartUploadPart.operation.path_name = "/attributes/storage/file/upload/part" - swagger_mock.api.fileAtomUpload.operation.path_name = "/attributes/storage/file/upload" - - swagger_mock.api.fileSetFileMultipartUploadStart.operation.path_name = ( - "/attributes/storage/fileset/upload/start" - ) - swagger_mock.api.fileSetFileMultipartUploadFinish.operation.path_name = ( - "/attributes/storage/fileset/upload/finish" - ) - swagger_mock.api.fileSetFileMultipartUploadPart.operation.path_name = "/attributes/storage/fileset/upload/part" - swagger_mock.api.fileSetFileUpload.operation.path_name = "/attributes/storage/fileset/upload" - return swagger_mock - - -class TestCommonHostedFileOperations(HostedFileOperationsHelper): - def test_get_content_disposition_filename(self): - # given - response_mock = MagicMock() - response_mock.headers = {"Content-Disposition": 'attachment; filename="sample.file"'} - - # when - filename = _get_content_disposition_filename(response_mock) - - # then - self.assertEqual(filename, "sample.file") - - @patch("neptune.internal.backends.hosted_file_operations._store_response_as_file") - @patch("neptune.internal.backends.hosted_file_operations._download_raw_data") - def test_download_file_attribute(self, download_raw, store_response_mock): - # given - swagger_mock = self._get_swagger_mock() - exp_uuid = str(uuid.uuid4()) - - # when - download_file_attribute( - swagger_client=swagger_mock, - container_id=exp_uuid, - attribute="some/attribute", - destination=None, - ) - - # then - download_raw.assert_called_once_with( - http_client=swagger_mock.swagger_spec.http_client, - url="https://ui.neptune.ai/attributes/download", - headers={"Accept": "application/octet-stream"}, - query_params={"experimentId": str(exp_uuid), "attribute": "some/attribute"}, - ) - store_response_mock.assert_called_once_with( - download_raw.return_value, - None, - None, - ) - - @patch("neptune.internal.backends.hosted_file_operations._store_response_as_file") - @patch("neptune.internal.backends.hosted_file_operations._download_raw_data") - @patch( - "neptune.internal.backends.hosted_file_operations._get_download_url", - new=lambda _, _id: "some_url", - ) - def test_download_file_set_attribute(self, download_raw, store_response_mock): - # given - swagger_mock = self._get_swagger_mock() - download_id = str(uuid.uuid4()) - - # when - download_file_set_attribute( - swagger_client=swagger_mock, - download_id=download_id, - destination=None, - ) - - # then - download_raw.assert_called_once_with( - http_client=swagger_mock.swagger_spec.http_client, - url="some_url", - headers={"Accept": "application/zip"}, - ) - store_response_mock.assert_called_once_with( - download_raw.return_value, - None, - None, - ) - - -class TestNewUploadFileOperations(HostedFileOperationsHelper, BackendTestMixin): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - config_swagger_client = self._get_swagger_client_mock(MagicMock()) - client_config = ClientConfig.from_api_response(config_swagger_client.api.getClientConfig().response().result) - self.multipart_config = client_config.multipart_config - - @unittest.skipIf(IS_WINDOWS, "Windows behaves strangely") - @patch("neptune.internal.backends.hosted_file_operations.upload_raw_data") - def test_missing_files_or_directory(self, upload_raw_data_mock): - # given - exp_uuid = str(uuid.uuid4()) - swagger_mock = self._get_swagger_mock() - upload_raw_data_mock.return_value = b"null" - - # when - with NamedTemporaryFile("w") as temp_file_1: - with NamedTemporaryFile("w") as temp_file_2: - with TemporaryDirectory() as temp_dir: - upload_file_set_attribute( - swagger_client=swagger_mock, - container_id=exp_uuid, - attribute="some/attribute", - file_globs=[ - temp_file_1.name, - temp_file_2.name, - os.path.abspath("missing_file"), - temp_dir, - ], - reset=True, - multipart_config=self.multipart_config, - ) - - # then - upload_raw_data_mock.assert_called_once_with( - http_client=swagger_mock.swagger_spec.http_client, - url="https://ui.neptune.ai/uploadFileSetTar", - data=mock.ANY, - headers={"Content-Type": "application/octet-stream"}, - query_params={ - "experimentId": str(exp_uuid), - "attribute": "some/attribute", - "reset": "True", - }, - ) - - @unittest.skipIf(IS_WINDOWS, "Windows behaves strangely") - @patch("neptune.internal.backends.hosted_file_operations.upload_raw_data") - def test_upload_small_file_attribute(self, upload_raw_data): - # given - exp_uuid = str(uuid.uuid4()) - swagger_mock = self._get_swagger_mock() - upload_raw_data.return_value = json.dumps( - { - "uploadId": "placeholder", - "errors": [], - } - ) - data = b"testdata" - - # when - with create_file(content=data, binary_mode=True) as filename: - upload_file_attribute( - swagger_client=swagger_mock, - container_id=exp_uuid, - attribute="target/path.txt", - source=filename, - ext="txt", - multipart_config=self.multipart_config, - ) - - # then - swagger_mock.api.fileSetFileMultipartUploadStart.assert_not_called() - swagger_mock.api.fileSetFileMultipartUploadFinish.assert_not_called() - swagger_mock.api.fileSetFileMultipartUploadPart.assert_not_called() - swagger_mock.api.fileSetFileUpload.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadStart.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadFinish.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadPart.assert_not_called() - swagger_mock.api.fileAtomUpload.assert_not_called() - upload_raw_data.assert_called_once_with( - data=data, - http_client=swagger_mock.swagger_spec.http_client, - url="https://ui.neptune.ai/attributes/storage/file/upload", - query_params={ - "experimentIdentifier": str(exp_uuid), - "attribute": "target/path.txt", - "ext": "txt", - }, - ) - - @unittest.skipIf(IS_WINDOWS, "Windows behaves strangely") - @patch("neptune.internal.backends.hosted_file_operations.upload_raw_data") - def test_upload_big_file_attribute(self, upload_raw_data): - # given - exp_uuid = str(uuid.uuid4()) - swagger_mock = self._get_swagger_mock() - upload_id = "placeholder" - set_expected_result( - swagger_mock.api.fileAtomMultipartUploadStart, - { - "uploadId": upload_id, - "errors": [], - }, - ) - upload_raw_data.return_value = json.dumps( - { - "errors": [], - } - ) - data = self.get_random_bytes(201 * 2**10) # 201 KB (200KB is multipart upload config) - chunk_size = self.multipart_config.min_chunk_size - - # when - with create_file(content=data, binary_mode=True) as filename: - upload_file_attribute( - swagger_client=swagger_mock, - container_id=exp_uuid, - attribute="target/path.txt", - source=filename, - ext="txt", - multipart_config=self.multipart_config, - ) - - # then - swagger_mock.api.fileSetFileMultipartUploadStart.assert_not_called() - swagger_mock.api.fileSetFileMultipartUploadFinish.assert_not_called() - swagger_mock.api.fileSetFileMultipartUploadPart.assert_not_called() - swagger_mock.api.fileSetFileUpload.assert_not_called() - swagger_mock.api.fileAtomUpload.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadStart.assert_called_once_with( - attribute="target/path.txt", - experimentIdentifier=str(exp_uuid), - ext="txt", - totalLength=len(data), - ) - swagger_mock.api.fileAtomMultipartUploadFinish.assert_called_once_with( - attribute="target/path.txt", - experimentIdentifier=str(exp_uuid), - uploadId=upload_id, - ) - upload_raw_data.assert_has_calls( - [ - call( - data=data[:chunk_size], - http_client=swagger_mock.swagger_spec.http_client, - url="https://ui.neptune.ai/attributes/storage/file/upload/part", - headers={"X-Range": f"bytes=0-{chunk_size - 1}/{len(data)}"}, - query_params={ - "uploadPartIdx": 0, - "uploadId": upload_id, - "experimentIdentifier": str(exp_uuid), - "attribute": "target/path.txt", - }, - ), - call( - data=data[chunk_size:], - http_client=swagger_mock.swagger_spec.http_client, - url="https://ui.neptune.ai/attributes/storage/file/upload/part", - headers={"X-Range": f"bytes={chunk_size}-{len(data) - 1}/{len(data)}"}, - query_params={ - "uploadPartIdx": 1, - "uploadId": upload_id, - "experimentIdentifier": str(exp_uuid), - "attribute": "target/path.txt", - }, - ), - ] - ) - - @unittest.skipIf(IS_WINDOWS, "Windows behaves strangely") - @patch("neptune.internal.backends.hosted_file_operations.upload_raw_data") - @patch( - "neptune.internal.utils.glob", - new=lambda path, recursive=False: [path.replace("*", "file.txt")], - ) - def test_upload_single_small_file_in_file_set_attribute(self, upload_raw_data): - # given - exp_uuid = uuid.uuid4() - swagger_mock = self._get_swagger_mock() - upload_raw_data.return_value = json.dumps( - { - "errors": [], - } - ) - data = b"testdata" - - # when - with create_file(content=data, binary_mode=True) as filename: - upload_file_set_attribute( - swagger_client=swagger_mock, - container_id=str(exp_uuid), - attribute="some/attribute", - file_globs=[filename], - reset=True, - multipart_config=self.multipart_config, - ) - - # then - swagger_mock.api.fileSetFileMultipartUploadStart.assert_not_called() - swagger_mock.api.fileSetFileMultipartUploadFinish.assert_not_called() - swagger_mock.api.fileSetFileMultipartUploadPart.assert_not_called() - swagger_mock.api.fileSetFileUpload.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadStart.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadFinish.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadPart.assert_not_called() - swagger_mock.api.fileAtomUpload.assert_not_called() - upload_raw_data.assert_called_once_with( - data=data, - http_client=swagger_mock.swagger_spec.http_client, - url="https://ui.neptune.ai/attributes/storage/fileset/upload", - query_params={ - "subPath": os.path.basename(filename), - "experimentIdentifier": str(exp_uuid), - "attribute": "some/attribute", - }, - ) - - @unittest.skipIf(IS_WINDOWS, "Windows behaves strangely") - @patch("neptune.internal.backends.hosted_file_operations.upload_raw_data") - @patch( - "neptune.internal.utils.glob", - new=lambda path, recursive=False: [path.replace("*", "file.txt")], - ) - def test_upload_single_big_file_in_file_set_attribute(self, upload_raw_data): - # given - exp_uuid = uuid.uuid4() - swagger_mock = self._get_swagger_mock() - upload_id = "placeholder" - set_expected_result( - swagger_mock.api.fileSetFileMultipartUploadStart, - { - "uploadId": upload_id, - "errors": [], - }, - ) - upload_raw_data.return_value = json.dumps( - { - "errors": [], - } - ) - data = self.get_random_bytes(201 * 2**10) # 201 KB (200KB is multipart upload config) - chunk_size = self.multipart_config.min_chunk_size - - # when - with create_file(content=data, binary_mode=True) as filename: - upload_file_set_attribute( - swagger_client=swagger_mock, - container_id=str(exp_uuid), - attribute="some/attribute", - file_globs=[filename], - reset=True, - multipart_config=self.multipart_config, - ) - - # then - swagger_mock.api.fileSetFileMultipartUploadPart.assert_not_called() - swagger_mock.api.fileSetFileUpload.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadStart.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadFinish.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadPart.assert_not_called() - swagger_mock.api.fileAtomUpload.assert_not_called() - swagger_mock.api.fileSetFileMultipartUploadStart.assert_called_once_with( - attribute="some/attribute", - experimentIdentifier=str(exp_uuid), - totalLength=len(data), - subPath=os.path.basename(filename), - ) - swagger_mock.api.fileSetFileMultipartUploadFinish.assert_called_once_with( - attribute="some/attribute", - experimentIdentifier=str(exp_uuid), - subPath=os.path.basename(filename), - uploadId=upload_id, - ) - upload_raw_data.assert_has_calls( - [ - call( - data=data[:chunk_size], - http_client=swagger_mock.swagger_spec.http_client, - url="https://ui.neptune.ai/attributes/storage/fileset/upload/part", - headers={"X-Range": f"bytes=0-{chunk_size - 1}/{len(data)}"}, - query_params={ - "uploadPartIdx": 0, - "uploadId": upload_id, - "subPath": os.path.basename(filename), - "experimentIdentifier": str(exp_uuid), - "attribute": "some/attribute", - }, - ), - call( - data=data[chunk_size:], - http_client=swagger_mock.swagger_spec.http_client, - url="https://ui.neptune.ai/attributes/storage/fileset/upload/part", - headers={"X-Range": f"bytes={chunk_size}-{len(data) - 1}/{len(data)}"}, - query_params={ - "uploadPartIdx": 1, - "uploadId": upload_id, - "subPath": os.path.basename(filename), - "experimentIdentifier": str(exp_uuid), - "attribute": "some/attribute", - }, - ), - ] - ) - - @unittest.skipIf(IS_WINDOWS, "Windows behaves strangely") - @patch("neptune.internal.backends.hosted_file_operations.upload_raw_data") - @patch( - "neptune.internal.utils.glob", - new=lambda path, recursive=False: [path.replace("*", "file.txt")], - ) - def test_upload_multiple_files_in_file_set_attribute(self, upload_raw_data_mock): - # given - exp_uuid = str(uuid.uuid4()) - swagger_mock = self._get_swagger_mock() - upload_raw_data_mock.return_value = b"null" - swagger_mock.api.getUploadConfig.return_value.response.return_value.result.chunkSize = 10 - - # when - with NamedTemporaryFile("w") as temp_file_1: - with NamedTemporaryFile("w") as temp_file_2: - upload_file_set_attribute( - swagger_client=swagger_mock, - container_id=exp_uuid, - attribute="some/attribute", - file_globs=[temp_file_1.name, temp_file_2.name], - reset=True, - multipart_config=self.multipart_config, - ) - - # then - swagger_mock.api.fileSetFileMultipartUploadStart.assert_not_called() - swagger_mock.api.fileSetFileMultipartUploadFinish.assert_not_called() - swagger_mock.api.fileSetFileMultipartUploadPart.assert_not_called() - swagger_mock.api.fileSetFileUpload.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadStart.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadFinish.assert_not_called() - swagger_mock.api.fileAtomMultipartUploadPart.assert_not_called() - swagger_mock.api.fileAtomUpload.assert_not_called() - upload_raw_data_mock.assert_called_once_with( - http_client=swagger_mock.swagger_spec.http_client, - url="https://ui.neptune.ai/uploadFileSetTar", - data=mock.ANY, - headers={"Content-Type": "application/octet-stream"}, - query_params={ - "experimentId": str(exp_uuid), - "attribute": "some/attribute", - "reset": "True", - }, - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/unit/neptune/new/internal/backends/test_hosted_neptune_backend.py b/tests/unit/neptune/new/internal/backends/test_hosted_neptune_backend.py deleted file mode 100644 index b1299d7e5..000000000 --- a/tests/unit/neptune/new/internal/backends/test_hosted_neptune_backend.py +++ /dev/null @@ -1,714 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import socket -import time -import unittest -import uuid -from pathlib import Path -from unittest.mock import ( - Mock, - call, -) - -import pytest -from bravado.exception import ( - HTTPNotFound, - HTTPPaymentRequired, - HTTPTooManyRequests, - HTTPUnprocessableEntity, -) -from mock import ( - MagicMock, - patch, -) -from packaging.version import Version - -from neptune.core.components.operation_storage import OperationStorage -from neptune.exceptions import ( - CannotResolveHostname, - FileSetNotFound, - FileUploadError, - MetadataInconsistency, - NeptuneClientUpgradeRequiredError, - NeptuneLimitExceedException, -) -from neptune.internal.backends.hosted_client import ( - DEFAULT_REQUEST_KWARGS, - _get_token_client, - create_artifacts_client, - create_backend_client, - create_http_client_with_auth, - create_leaderboard_client, - get_client_config, -) -from neptune.internal.backends.hosted_neptune_backend import HostedNeptuneBackend -from neptune.internal.backends.swagger_client_wrapper import SwaggerClientWrapper -from neptune.internal.backends.utils import verify_host_resolution -from neptune.internal.container_type import ContainerType -from neptune.internal.credentials import Credentials -from neptune.internal.operation import ( - AssignString, - LogFloats, - TrackFilesToArtifact, - UploadFile, - UploadFileContent, -) -from neptune.internal.utils import base64_encode -from tests.unit.neptune.backend_test_mixin import BackendTestMixin -from tests.unit.neptune.new.utils import response_mock - -API_TOKEN = ( - "eyJhcGlfYWRkcmVzcyI6Imh0dHBzOi8vYXBwLnN0YWdlLm5lcHR1bmUuYWkiLCJ" - "hcGlfa2V5IjoiOTJhNzhiOWQtZTc3Ni00ODlhLWI5YzEtNzRkYmI1ZGVkMzAyIn0=" -) - -credentials = Credentials.from_token(API_TOKEN) - - -@patch("neptune.internal.backends.hosted_client.RequestsClient", new=MagicMock()) -@patch("neptune.internal.backends.hosted_client.NeptuneAuthenticator", new=MagicMock()) -@patch("bravado.client.SwaggerClient.from_url") -@patch("platform.platform", new=lambda: "testPlatform") -@patch("platform.python_version", new=lambda: "3.9.test") -class TestHostedNeptuneBackend(unittest.TestCase, BackendTestMixin): - def setUp(self) -> None: - # Clear all LRU storage - verify_host_resolution.cache_clear() - _get_token_client.cache_clear() - get_client_config.cache_clear() - create_http_client_with_auth.cache_clear() - create_backend_client.cache_clear() - create_leaderboard_client.cache_clear() - create_artifacts_client.cache_clear() - - self.container_types = [ContainerType.RUN, ContainerType.PROJECT] - self.dummy_operation_storage = OperationStorage(Path("./tests/dummy_storage")) - - @patch("neptune.internal.backends.hosted_neptune_backend.upload_file_attribute") - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_execute_operations(self, upload_mock, swagger_client_factory): - # given - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - backend = HostedNeptuneBackend(credentials) - container_uuid = str(uuid.uuid4()) - - response_error = MagicMock() - response_error.errorDescription = "error1" - swagger_client.api.executeOperations().response().result = [response_error] - swagger_client.api.executeOperations.reset_mock() - upload_mock.return_value = [FileUploadError("file1", "error2")] - some_text = "Some streamed text" - some_binary = b"Some streamed binary" - - for container_type in self.container_types: - with self.subTest(msg=f"For type {container_type.value}"): - upload_mock.reset_mock() - swagger_client_factory.reset_mock() - - # when - result = backend.execute_operations( - container_id=container_uuid, - container_type=container_type, - operations=[ - UploadFile( - path=["some", "files", "some_file"], - ext="", - file_path="path_to_file", - ), - UploadFileContent( - path=["some", "files", "some_text_stream"], - ext="txt", - file_content=base64_encode(some_text.encode("utf-8")), - ), - UploadFileContent( - path=["some", "files", "some_binary_stream"], - ext="bin", - file_content=base64_encode(some_binary), - ), - LogFloats(["images", "img1"], [LogFloats.ValueType(1, 2, 3)]), - AssignString(["properties", "name"], "some text"), - UploadFile( - path=["some", "other", "file.txt"], - ext="txt", - file_path="other/file/path.txt", - ), - ], - operation_storage=self.dummy_operation_storage, - ) - - # then - swagger_client.api.executeOperations.assert_called_once_with( - **{ - "experimentId": str(container_uuid), - "operations": [ - { - "path": "images/img1", - "logFloats": { - "entries": [ - { - "value": 1, - "step": 2, - "timestampMilliseconds": 3000, - } - ] - }, - }, - { - "path": "properties/name", - "assignString": {"value": "some text"}, - }, - ], - **DEFAULT_REQUEST_KWARGS, - } - ) - - upload_mock.assert_has_calls( - [ - call( - swagger_client=backend.leaderboard_client, - container_id=container_uuid, - attribute="some/other/file.txt", - source="other/file/path.txt", - ext="txt", - multipart_config=backend._client_config.multipart_config, - ), - call( - swagger_client=backend.leaderboard_client, - container_id=container_uuid, - attribute="some/files/some_file", - source="path_to_file", - ext="", - multipart_config=backend._client_config.multipart_config, - ), - call( - swagger_client=backend.leaderboard_client, - container_id=container_uuid, - attribute="some/files/some_text_stream", - source=some_text.encode("utf-8"), - ext="txt", - multipart_config=backend._client_config.multipart_config, - ), - call( - swagger_client=backend.leaderboard_client, - container_id=container_uuid, - attribute="some/files/some_binary_stream", - source=some_binary, - ext="bin", - multipart_config=backend._client_config.multipart_config, - ), - ], - any_order=True, - ) - - self.assertEqual( - ( - 6, - [ - FileUploadError("file1", "error2"), - FileUploadError("file1", "error2"), - FileUploadError("file1", "error2"), - FileUploadError("file1", "error2"), - MetadataInconsistency("error1"), - ], - ), - result, - ) - - @pytest.mark.asyncio - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - async def test_too_many_requests(self, swagger_client_factory): - # given - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - backend = HostedNeptuneBackend(credentials) - container_uuid = str(uuid.uuid4()) - - container_type = ContainerType.RUN - - response = MagicMock() - response.response().return_value = [] - - retry_after_seconds = 5 # Przykładowy czas oczekiwania - too_many_requests_response = HTTPTooManyRequests(MagicMock()) - too_many_requests_response.headers = {"retry-after": str(retry_after_seconds)} - - swagger_client.api.executeOperations.side_effect = Mock( - side_effect=[too_many_requests_response, response_mock()] - ) - - # when - result_start_time = time.time() - result = await backend.execute_async( # Użyj await, aby poczekać na wykonanie coroutine - container_id=container_uuid, - container_type=container_type, - operations=[ - LogFloats(["images", "img1"], [LogFloats.ValueType(1, 2, 3)]), - ], - operation_storage=self.dummy_operation_storage, - ) - result_end_time = time.time() - - # then - self.assertEqual(result, (1, [])) - assert retry_after_seconds <= (result_end_time - result_start_time) <= (retry_after_seconds * 2) - - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_execute_operations_retry_request(self, swagger_client_factory): - # given - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - backend = HostedNeptuneBackend(credentials) - container_uuid = str(uuid.uuid4()) - - container_type = ContainerType.RUN - - response = MagicMock() - response.response().return_value = [] - swagger_client.api.executeOperations.side_effect = Mock( - side_effect=[HTTPTooManyRequests(MagicMock()), response_mock()] - ) - - # when - result = backend.execute_operations( - container_id=container_uuid, - container_type=container_type, - operations=[ - LogFloats(["images", "img1"], [LogFloats.ValueType(1, 2, 3)]), - ], - operation_storage=self.dummy_operation_storage, - ) - - # then - self.assertEqual(result, (1, [])) - execution_operation_call = call( - **{ - "experimentId": str(container_uuid), - "operations": [ - { - "path": "images/img1", - "logFloats": { - "entries": [ - { - "value": 1, - "step": 2, - "timestampMilliseconds": 3000, - } - ] - }, - } - ], - **DEFAULT_REQUEST_KWARGS, - } - ) - swagger_client.api.executeOperations.assert_has_calls([execution_operation_call, execution_operation_call]) - - @patch("neptune.internal.backends.hosted_neptune_backend.upload_file_attribute") - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_upload_files_destination_path(self, upload_mock, swagger_client_factory): - # given - self._get_swagger_client_mock(swagger_client_factory) - backend = HostedNeptuneBackend(credentials) - container_uuid = str(uuid.uuid4()) - - for container_type in self.container_types: - with self.subTest(msg=f"For type {container_type.value}"): - upload_mock.reset_mock() - swagger_client_factory.reset_mock() - - # when - backend.execute_operations( - container_id=container_uuid, - container_type=container_type, - operations=[ - UploadFile( - path=["some", "path", "1", "var"], - ext="", - file_path="/path/to/file", - ), - UploadFile( - path=["some", "path", "2", "var"], - ext="txt", - file_path="/some.file/with.dots.txt", - ), - UploadFile( - path=["some", "path", "3", "var"], - ext="jpeg", - file_path="/path/to/some_image.jpeg", - ), - ], - operation_storage=self.dummy_operation_storage, - ) - - # then - upload_mock.assert_has_calls( - [ - call( - swagger_client=backend.leaderboard_client, - container_id=container_uuid, - attribute="some/path/1/var", - source="/path/to/file", - ext="", - multipart_config=backend._client_config.multipart_config, - ), - call( - swagger_client=backend.leaderboard_client, - container_id=container_uuid, - attribute="some/path/2/var", - source="/some.file/with.dots.txt", - ext="txt", - multipart_config=backend._client_config.multipart_config, - ), - call( - swagger_client=backend.leaderboard_client, - container_id=container_uuid, - attribute="some/path/3/var", - source="/path/to/some_image.jpeg", - ext="jpeg", - multipart_config=backend._client_config.multipart_config, - ), - ], - any_order=True, - ) - - @patch("neptune.internal.backends.hosted_neptune_backend.track_to_new_artifact") - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_track_to_new_artifact(self, track_to_new_artifact_mock, swagger_client_factory): - # given - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - backend = HostedNeptuneBackend(credentials) - container_id = str(uuid.uuid4()) - project_id = str(uuid.uuid4()) - - response_error = MagicMock() - response_error.errorDescription = "error1" - swagger_client.api.executeOperations.return_value.response.return_value.result = [response_error] - swagger_client.api.getArtifactAttribute.side_effect = HTTPNotFound(response=response_mock()) - swagger_client_wrapper = SwaggerClientWrapper(swagger_client) - - for container_type in self.container_types: - with self.subTest(msg=f"For type {container_type.value}"): - track_to_new_artifact_mock.reset_mock() - swagger_client_factory.reset_mock() - - # when - backend.execute_operations( - container_id=container_id, - container_type=container_type, - operations=[ - TrackFilesToArtifact( - path=["sub", "one"], - project_id=project_id, - entries=[("/path/to/file", "/path/to")], - ), - TrackFilesToArtifact( - path=["sub", "two"], - project_id=project_id, - entries=[ - ("/path/to/file1", None), - ("/path/to/file2", None), - ], - ), - TrackFilesToArtifact( - path=["sub", "three"], - project_id=project_id, - entries=[("/path/to/file1", None)], - ), - TrackFilesToArtifact( - path=["sub", "three"], - project_id=project_id, - entries=[("/path/to/file2", None)], - ), - ], - operation_storage=self.dummy_operation_storage, - ) - - # then - track_to_new_artifact_mock.assert_has_calls( - [ - call( - swagger_client=swagger_client_wrapper, - project_id=project_id, - path=["sub", "one"], - parent_identifier=str(container_id), - entries=[("/path/to/file", "/path/to")], - default_request_params=DEFAULT_REQUEST_KWARGS, - exclude_directory_files=True, - exclude_metadata_from_hash=True, - ), - call( - swagger_client=swagger_client_wrapper, - project_id=project_id, - path=["sub", "two"], - parent_identifier=str(container_id), - entries=[ - ("/path/to/file1", None), - ("/path/to/file2", None), - ], - default_request_params=DEFAULT_REQUEST_KWARGS, - exclude_directory_files=True, - exclude_metadata_from_hash=True, - ), - call( - swagger_client=swagger_client_wrapper, - project_id=project_id, - path=["sub", "three"], - parent_identifier=str(container_id), - entries=[ - ("/path/to/file1", None), - ("/path/to/file2", None), - ], - default_request_params=DEFAULT_REQUEST_KWARGS, - exclude_directory_files=True, - exclude_metadata_from_hash=True, - ), - ], - any_order=True, - ) - - @patch("neptune.internal.backends.hosted_neptune_backend.track_to_existing_artifact") - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_track_to_existing_artifact(self, track_to_existing_artifact_mock, swagger_client_factory): - # given - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - backend = HostedNeptuneBackend(credentials) - container_id = str(uuid.uuid4()) - project_id = str(uuid.uuid4()) - - response_error = MagicMock() - response_error.errorDescription = "error1" - swagger_client.api.executeOperations.return_value.response.return_value.result = [response_error] - swagger_client.api.getArtifactAttribute.return_value.response.return_value.result.hash = "dummyHash" - swagger_client_wrapper = SwaggerClientWrapper(swagger_client) - - for container_type in self.container_types: - track_to_existing_artifact_mock.reset_mock() - swagger_client_factory.reset_mock() - - with self.subTest(msg=f"For type {container_type.value}"): - track_to_existing_artifact_mock.reset_mock() - swagger_client_factory.reset_mock() - - # when - backend.execute_operations( - container_id=container_id, - container_type=container_type, - operations=[ - TrackFilesToArtifact( - path=["sub", "one"], - project_id=project_id, - entries=[("/path/to/file", "/path/to")], - ), - TrackFilesToArtifact( - path=["sub", "two"], - project_id=project_id, - entries=[ - ("/path/to/file1", None), - ("/path/to/file2", None), - ], - ), - TrackFilesToArtifact( - path=["sub", "three"], - project_id=project_id, - entries=[("/path/to/file1", None)], - ), - TrackFilesToArtifact( - path=["sub", "three"], - project_id=project_id, - entries=[("/path/to/file2", None)], - ), - ], - operation_storage=self.dummy_operation_storage, - ) - - # then - track_to_existing_artifact_mock.assert_has_calls( - [ - call( - swagger_client=swagger_client_wrapper, - project_id=project_id, - path=["sub", "one"], - artifact_hash="dummyHash", - parent_identifier=str(container_id), - entries=[("/path/to/file", "/path/to")], - default_request_params=DEFAULT_REQUEST_KWARGS, - exclude_directory_files=True, - ), - call( - swagger_client=swagger_client_wrapper, - project_id=project_id, - path=["sub", "two"], - artifact_hash="dummyHash", - parent_identifier=str(container_id), - entries=[ - ("/path/to/file1", None), - ("/path/to/file2", None), - ], - default_request_params=DEFAULT_REQUEST_KWARGS, - exclude_directory_files=True, - ), - call( - swagger_client=swagger_client_wrapper, - project_id=project_id, - path=["sub", "three"], - artifact_hash="dummyHash", - parent_identifier=str(container_id), - entries=[ - ("/path/to/file1", None), - ("/path/to/file2", None), - ], - default_request_params=DEFAULT_REQUEST_KWARGS, - exclude_directory_files=True, - ), - ], - any_order=True, - ) - - @patch( - "neptune.internal.backends.hosted_client.neptune_version", - Version("0.5.13"), - ) - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_min_compatible_version_ok(self, swagger_client_factory): - # given - self._get_swagger_client_mock(swagger_client_factory, min_compatible="0.5.13") - - # expect - HostedNeptuneBackend(credentials) - - @patch( - "neptune.internal.backends.hosted_client.neptune_version", - Version("2.0.0-alpha4+dev1234"), - ) - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_min_compatible_pre_release_version_ok(self, swagger_client_factory): - # given - self._get_swagger_client_mock(swagger_client_factory, min_compatible="2.0.0") - - # expect - HostedNeptuneBackend(credentials) - - @patch( - "neptune.internal.backends.hosted_client.neptune_version", - Version("0.5.13"), - ) - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_min_compatible_version_fail(self, swagger_client_factory): - # given - self._get_swagger_client_mock(swagger_client_factory, min_compatible="0.5.14") - - # expect - with self.assertRaises(NeptuneClientUpgradeRequiredError) as ex: - HostedNeptuneBackend(credentials) - - self.assertTrue("minimum required version is >=0.5.14" in str(ex.exception)) - - @patch( - "neptune.internal.backends.hosted_client.neptune_version", - Version("0.5.13"), - ) - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_max_compatible_version_ok(self, swagger_client_factory): - # given - self._get_swagger_client_mock(swagger_client_factory, max_compatible="0.5.12") - - # expect - HostedNeptuneBackend(credentials) - - @patch( - "neptune.internal.backends.hosted_client.neptune_version", - Version("0.5.13"), - ) - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_max_compatible_version_fail(self, swagger_client_factory): - # given - self._get_swagger_client_mock(swagger_client_factory, max_compatible="0.4.999") - - # expect - with self.assertRaises(NeptuneClientUpgradeRequiredError) as ex: - HostedNeptuneBackend(credentials) - - self.assertTrue("minimum required version is ==0.4.0" in str(ex.exception)) - - @patch("socket.gethostbyname") - def test_cannot_resolve_host(self, gethostname_mock, _): - # given - gethostname_mock.side_effect = socket.gaierror - - # expect - with self.assertRaises(CannotResolveHostname): - HostedNeptuneBackend(credentials) - - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_limit_exceed(self, swagger_client_factory): - # given - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - backend = HostedNeptuneBackend(credentials) - container_uuid = str(uuid.uuid4()) - - # when: - error = response_mock() - error.json.return_value = {"title": "Maximum storage limit reached"} - swagger_client.api.executeOperations.side_effect = HTTPPaymentRequired(response=error) - - # then: - for container_type in self.container_types: - with self.subTest(msg=f"For type {container_type.value}"): - with self.assertRaises(NeptuneLimitExceedException): - backend.execute_operations( - container_id=container_uuid, - container_type=container_type, - operations=[ - LogFloats(["float1"], [LogFloats.ValueType(1, 2, 3)]), - ], - operation_storage=self.dummy_operation_storage, - ) - - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_limit_exceed_legacy(self, swagger_client_factory): - # given - swagger_client = self._get_swagger_client_mock(swagger_client_factory) - backend = HostedNeptuneBackend(credentials) - container_uuid = str(uuid.uuid4()) - - # when: - error = response_mock() - error.json.return_value = {"title": "Monitoring hours not left"} - swagger_client.api.executeOperations.side_effect = HTTPUnprocessableEntity(response=error) - - # then: - for container_type in self.container_types: - with self.subTest(msg=f"For type {container_type.value}"): - with self.assertRaises(NeptuneLimitExceedException): - backend.execute_operations( - container_id=container_uuid, - container_type=container_type, - operations=[ - LogFloats(["float1"], [LogFloats.ValueType(1, 2, 3)]), - ], - operation_storage=self.dummy_operation_storage, - ) - - @patch("socket.gethostbyname", MagicMock(return_value="1.1.1.1")) - def test_list_fileset_files_exception(self, swagger_client_factory): - # given - self._get_swagger_client_mock(swagger_client_factory) - backend = HostedNeptuneBackend(credentials) - mock_leaderboard_client = MagicMock() - mock_leaderboard_client.api.lsFileSetAttribute.side_effect = HTTPNotFound(response_mock()) - - backend.leaderboard_client = mock_leaderboard_client - - # then - with pytest.raises(FileSetNotFound): - backend.list_fileset_files(["mock"], "mock", ".") diff --git a/tests/unit/neptune/new/internal/backends/test_neptune_backend_mock.py b/tests/unit/neptune/new/internal/backends/test_neptune_backend_mock.py deleted file mode 100644 index bd516c356..000000000 --- a/tests/unit/neptune/new/internal/backends/test_neptune_backend_mock.py +++ /dev/null @@ -1,448 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import datetime -import random -import string -import unittest -import uuid -from pathlib import Path -from time import time - -from freezegun import freeze_time - -from neptune.api.models import ( - DateTimeField, - FloatField, - FloatPointValue, - FloatSeriesField, - FloatSeriesValues, - StringField, - StringPointValue, - StringSeriesField, - StringSeriesValues, - StringSetField, -) -from neptune.core.components.operation_storage import OperationStorage -from neptune.exceptions import ( - ContainerUUIDNotFound, - MetadataInconsistency, -) -from neptune.internal.backends.neptune_backend_mock import NeptuneBackendMock -from neptune.internal.container_type import ContainerType -from neptune.internal.operation import ( - AddStrings, - AssignDatetime, - AssignFloat, - AssignString, - LogFloats, - LogStrings, -) - - -def a_string() -> str: - char_set = string.ascii_letters - return "".join(random.sample(char_set * 10, 10)) - - -class TestNeptuneBackendMock(unittest.TestCase): - def setUp(self) -> None: - self.backend = NeptuneBackendMock() - project_id = self.backend._project_id - exp = self.backend.create_run(project_id=project_id) - model = self.backend.create_model( - project_id=project_id, - key="MOD", - ) - model_version = self.backend.create_model_version(project_id=project_id, model_id=model.id) - self.ids_with_types = [ - (self.backend._project_id, ContainerType.PROJECT), - (exp.id, ContainerType.RUN), - (model.id, ContainerType.MODEL), - (model_version.id, ContainerType.MODEL_VERSION), - ] - self.dummy_operation_storage = OperationStorage(Path("./tests/dummy_storage")) - - def test_get_float_attribute(self): - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - # given - digit = random.randint(1, 10**4) - path = ["x"] - self.backend.execute_operations( - container_id, - container_type, - operations=[AssignFloat(path, digit)], - operation_storage=self.dummy_operation_storage, - ) - - # when - ret = self.backend.get_float_attribute(container_id, container_type, path=path) - - # then - self.assertEqual(FloatField(path="x", value=digit), ret) - - def test_get_string_attribute(self): - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - # given - text = a_string() - path = ["x"] - self.backend.execute_operations( - container_id, - container_type, - operations=[AssignString(path, text)], - operation_storage=self.dummy_operation_storage, - ) - - # when - ret = self.backend.get_string_attribute(container_id, container_type, path=path) - - # then - self.assertEqual(StringField(path="x", value=text), ret) - - def test_get_datetime_attribute(self): - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - # given - now = datetime.datetime.now() - now = now.replace(microsecond=1000 * int(now.microsecond / 1000)) - path = ["x"] - - # and - self.backend.execute_operations( - container_id, - container_type, - [AssignDatetime(path, now)], - operation_storage=self.dummy_operation_storage, - ) - - # when - ret = self.backend.get_datetime_attribute(container_id, container_type, path) - - # then - self.assertEqual(DateTimeField(path="x", value=now), ret) - - def test_get_float_series_attribute(self): - # given - path = ["x"] - - # and - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - self.backend.execute_operations( - container_id, - container_type, - [ - LogFloats( - path, - [ - LogFloats.ValueType(5, None, time()), - LogFloats.ValueType(3, None, time()), - ], - ) - ], - operation_storage=self.dummy_operation_storage, - ) - self.backend.execute_operations( - container_id, - container_type, - [ - LogFloats( - path, - [ - LogFloats.ValueType(2, None, time()), - LogFloats.ValueType(9, None, time()), - ], - ) - ], - operation_storage=self.dummy_operation_storage, - ) - - # when - ret = self.backend.get_float_series_attribute(container_id, container_type, path) - - # then - self.assertEqual(FloatSeriesField(last=9, path="x"), ret) - - def test_get_string_series_attribute(self): - # given - path = ["x"] - - # and - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - self.backend.execute_operations( - container_id, - container_type, - [ - LogStrings( - path, - [ - LogStrings.ValueType("adf", None, time()), - LogStrings.ValueType("sdg", None, time()), - ], - ) - ], - operation_storage=self.dummy_operation_storage, - ) - self.backend.execute_operations( - container_id, - container_type, - [ - LogStrings( - path, - [ - LogStrings.ValueType("dfh", None, time()), - LogStrings.ValueType("qwe", None, time()), - ], - ) - ], - operation_storage=self.dummy_operation_storage, - ) - - # when - ret = self.backend.get_string_series_attribute(container_id, container_type, path) - - # then - self.assertEqual(StringSeriesField(last="qwe", path="x"), ret) - - def test_get_string_set_attribute(self): - # given - path = ["x"] - - # and - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - self.backend.execute_operations( - container_id, - container_type, - [AddStrings(path, {"abcx", "qwe"})], - operation_storage=self.dummy_operation_storage, - ) - - # when - ret = self.backend.get_string_set_attribute(container_id, container_type, path) - - # then - self.assertEqual(StringSetField(values={"abcx", "qwe"}, path="x"), ret) - - @freeze_time("2024-01-01T12:34:56.123456Z") - def test_get_string_series_values(self): - # given - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - self.backend.execute_operations( - container_id, - container_type, - [ - LogStrings( - ["x"], - [ - LogStrings.ValueType("adf", None, time()), - LogStrings.ValueType("sdg", None, time()), - ], - ) - ], - operation_storage=self.dummy_operation_storage, - ) - self.backend.execute_operations( - container_id, - container_type, - [ - LogStrings( - ["x"], - [ - LogStrings.ValueType("dfh", None, time()), - LogStrings.ValueType("qwe", None, time()), - ], - ) - ], - operation_storage=self.dummy_operation_storage, - ) - - # when - ret = self.backend.get_string_series_values(container_id, container_type, path=["x"], limit=100) - - # then - self.assertEqual( - StringSeriesValues( - 4, - [ - StringPointValue( - timestamp=datetime.datetime(2024, 1, 1, 12, 34, 56, 123456), step=0, value="adf" - ), - StringPointValue( - timestamp=datetime.datetime(2024, 1, 1, 12, 34, 56, 123456), step=1, value="sdg" - ), - StringPointValue( - timestamp=datetime.datetime(2024, 1, 1, 12, 34, 56, 123456), step=2, value="dfh" - ), - StringPointValue( - timestamp=datetime.datetime(2024, 1, 1, 12, 34, 56, 123456), step=3, value="qwe" - ), - ], - ), - ret, - ) - - @freeze_time("2024-01-01T12:34:56.123456Z") - def test_get_float_series_values(self): - # given - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - self.backend.execute_operations( - container_id, - container_type, - [ - LogFloats( - ["x"], - [ - LogFloats.ValueType(5, None, time()), - LogFloats.ValueType(3, None, time()), - ], - ) - ], - operation_storage=self.dummy_operation_storage, - ) - self.backend.execute_operations( - container_id, - container_type, - [ - LogFloats( - ["x"], - [ - LogFloats.ValueType(2, None, time()), - LogFloats.ValueType(9, None, time()), - ], - ) - ], - operation_storage=self.dummy_operation_storage, - ) - - # when - ret = self.backend.get_float_series_values(container_id, container_type, path=["x"], limit=100) - - # then - self.assertEqual( - FloatSeriesValues( - 4, - [ - FloatPointValue( - timestamp=datetime.datetime(2024, 1, 1, 12, 34, 56, 123456), step=0, value=5 - ), - FloatPointValue( - timestamp=datetime.datetime(2024, 1, 1, 12, 34, 56, 123456), step=1, value=3 - ), - FloatPointValue( - timestamp=datetime.datetime(2024, 1, 1, 12, 34, 56, 123456), step=2, value=2 - ), - FloatPointValue( - timestamp=datetime.datetime(2024, 1, 1, 12, 34, 56, 123456), step=3, value=9 - ), - ], - ), - ret, - ) - - def test_get_float_attribute_wrong_type(self): - # given - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - self.backend.execute_operations( - container_id, - container_type, - [AssignString(["x"], "abc")], - operation_storage=self.dummy_operation_storage, - ) - - # then - with self.assertRaises(MetadataInconsistency): - self.backend.get_float_series_attribute(container_id, container_type, ["x"]) - - def test_get_string_attribute_wrong_type(self): - # given - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - self.backend.execute_operations( - container_id, - container_type, - [AssignFloat(["x"], 5)], - operation_storage=self.dummy_operation_storage, - ) - - # then - with self.assertRaises(MetadataInconsistency): - self.backend.get_string_attribute(container_id, container_type, ["x"]) - - def test_get_datetime_attribute_wrong_type(self): - # given - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - self.backend.execute_operations( - container_id, - container_type, - [AssignString(["x"], "abc")], - operation_storage=self.dummy_operation_storage, - ) - - # then - with self.assertRaises(MetadataInconsistency): - self.backend.get_datetime_attribute(container_id, container_type, ["x"]) - - def test_get_string_series_attribute_wrong_type(self): - # given - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - self.backend.execute_operations( - container_id, - container_type, - [AssignString(["x"], "abc")], - operation_storage=self.dummy_operation_storage, - ) - - # then - with self.assertRaises(MetadataInconsistency): - self.backend.get_string_series_attribute(container_id, container_type, ["x"]) - - def test_get_string_set_attribute_wrong_type(self): - # given - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - self.backend.execute_operations( - container_id, - container_type, - [AssignString(["x"], "abc")], - operation_storage=self.dummy_operation_storage, - ) - - # then - with self.assertRaises(MetadataInconsistency): - self.backend.get_string_set_attribute(container_id, container_type, ["x"]) - - def test_container_not_found(self): - # given - for container_id, container_type in self.ids_with_types: - with self.subTest(f"For containerType: {container_type}"): - self.backend.execute_operations( - container_id, - container_type, - [AssignString(["x"], "abc")], - operation_storage=self.dummy_operation_storage, - ) - - # then - with self.assertRaises(ContainerUUIDNotFound): - self.backend.get_float_series_attribute(str(uuid.uuid4()), container_type, ["x"]) diff --git a/tests/unit/neptune/new/internal/backends/test_nql.py b/tests/unit/neptune/new/internal/backends/test_nql.py deleted file mode 100644 index f7a2a349d..000000000 --- a/tests/unit/neptune/new/internal/backends/test_nql.py +++ /dev/null @@ -1,163 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import unittest - -from neptune.internal.backends.nql import ( - NQLAggregator, - NQLAttributeOperator, - NQLAttributeType, - NQLEmptyQuery, - NQLQueryAggregate, - NQLQueryAttribute, - RawNQLQuery, -) - - -class TestNQL(unittest.TestCase): - def test_attributes(self): - self.assertEqual( - str( - NQLQueryAttribute( - name="sys/owner", - type=NQLAttributeType.STRING, - operator=NQLAttributeOperator.EQUALS, - value="user1", - ) - ), - '(`sys/owner`:string = "user1")', - ) - self.assertEqual( - str( - NQLQueryAttribute( - name="sys/tags", - type=NQLAttributeType.STRING_SET, - operator=NQLAttributeOperator.CONTAINS, - value="tag1", - ) - ), - '(`sys/tags`:stringSet CONTAINS "tag1")', - ) - self.assertEqual( - str( - NQLQueryAttribute( - name="sys/state", - type=NQLAttributeType.EXPERIMENT_STATE, - operator=NQLAttributeOperator.EQUALS, - value="running", - ) - ), - '(`sys/state`:experimentState = "running")', - ) - self.assertEqual( - str( - NQLQueryAttribute( - name="sys/trashed", - type=NQLAttributeType.BOOLEAN, - operator=NQLAttributeOperator.EQUALS, - value=False, - ) - ), - "(`sys/trashed`:bool = false)", - ) - - def test_multiple_attribute_values(self): - self.assertEqual( - str( - NQLQueryAggregate( - items=[ - NQLQueryAttribute( - name="sys/owner", - type=NQLAttributeType.STRING, - operator=NQLAttributeOperator.EQUALS, - value=user, - ) - for user in ["user1", "user2"] - ], - aggregator=NQLAggregator.OR, - ) - ), - '((`sys/owner`:string = "user1") OR (`sys/owner`:string = "user2"))', - ) - - def test_multiple_queries(self): - self.assertEqual( - str( - NQLQueryAggregate( - items=[ - NQLQueryAggregate( - items=[ - NQLQueryAttribute( - name="sys/owner", - type=NQLAttributeType.STRING, - operator=NQLAttributeOperator.EQUALS, - value=user, - ) - for user in ["user1", "user2"] - ], - aggregator=NQLAggregator.OR, - ), - NQLQueryAggregate( - items=[ - NQLQueryAttribute( - name="sys/tags", - type=NQLAttributeType.STRING_SET, - operator=NQLAttributeOperator.CONTAINS, - value=tag, - ) - for tag in ["tag1", "tag2"] - ], - aggregator=NQLAggregator.OR, - ), - ], - aggregator=NQLAggregator.AND, - ) - ), - '(((`sys/owner`:string = "user1") OR (`sys/owner`:string = "user2")) AND ' - '((`sys/tags`:stringSet CONTAINS "tag1") OR (`sys/tags`:stringSet CONTAINS "tag2")))', - ) - - def test_raw_query(self): - self.assertEqual(str(RawNQLQuery("")), str(NQLEmptyQuery())) - self.assertEqual(RawNQLQuery("").eval(), NQLEmptyQuery().eval()) - self.assertEqual( - str(RawNQLQuery('(`sys/owner`:string = "user1")')), - str( - NQLQueryAttribute( - name="sys/owner", - type=NQLAttributeType.STRING, - operator=NQLAttributeOperator.EQUALS, - value="user1", - ) - ), - ) - - def test_nql_evaluation(self): - self.assertEqual( - NQLQueryAggregate(items=[NQLEmptyQuery(), RawNQLQuery("")], aggregator=NQLAggregator.AND).eval(), - NQLEmptyQuery(), - ) - - query_of_interest = NQLQueryAttribute( - name="sys/owner", - type=NQLAttributeType.STRING, - operator=NQLAttributeOperator.EQUALS, - value="user1", - ) - - self.assertEqual( - NQLQueryAggregate(items=[query_of_interest, NQLEmptyQuery()], aggregator=NQLAggregator.AND).eval(), - query_of_interest.eval(), - ) diff --git a/tests/unit/neptune/new/internal/backends/test_operations_preprocessor.py b/tests/unit/neptune/new/internal/backends/test_operations_preprocessor.py deleted file mode 100644 index 042da27ae..000000000 --- a/tests/unit/neptune/new/internal/backends/test_operations_preprocessor.py +++ /dev/null @@ -1,416 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import uuid - -from neptune.exceptions import MetadataInconsistency -from neptune.internal.backends.operations_preprocessor import OperationsPreprocessor -from neptune.internal.operation import ( - AddStrings, - AssignFloat, - AssignString, - ClearFloatLog, - ClearImageLog, - ClearStringSet, - ConfigFloatSeries, - DeleteAttribute, - LogFloats, - LogImages, - LogStrings, - RemoveStrings, - TrackFilesToArtifact, - UploadFileSet, -) -from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase - -FLog = LogFloats.ValueType -SLog = LogStrings.ValueType -ILog = LogImages.ValueType - - -class TestOperationsPreprocessor(TestAttributeBase): - def test_delete_attribute(self): - # given - processor = OperationsPreprocessor() - - operations = [ - DeleteAttribute(["a"]), - AssignFloat(["a"], 1), - DeleteAttribute(["a"]), - AssignString(["b"], "2"), - DeleteAttribute(["b"]), - AssignString(["c"], "2"), - AssignString(["c"], "3"), - DeleteAttribute(["c"]), - DeleteAttribute(["d"]), - DeleteAttribute(["d"]), - DeleteAttribute(["e"]), - ] - - # when - processor.process(operations) - - # then - result = processor.get_operations() - self.assertEqual(result.upload_operations, []) - self.assertEqual(result.artifact_operations, []) - self.assertEqual( - result.other_operations, - [ - DeleteAttribute(["a"]), - AssignString(["b"], "2"), - DeleteAttribute(["b"]), - AssignString(["c"], "3"), - DeleteAttribute(["c"]), - DeleteAttribute(["d"]), - DeleteAttribute(["e"]), - ], - ) - self.assertEqual(result.errors, []) - self.assertEqual(processor.processed_ops_count, len(operations)) - - def test_assign(self): - # given - processor = OperationsPreprocessor() - - operations = [ - AssignFloat(["a"], 1), - DeleteAttribute(["a"]), - AssignString(["a"], "111"), - DeleteAttribute(["b"]), - AssignFloat(["b"], 2), - AssignFloat(["c"], 3), - AssignString(["c"], "333"), - AssignString(["d"], "44"), - AssignFloat(["e"], 5), - AssignFloat(["e"], 10), - AssignFloat(["e"], 33), - ] - - # when - processor.process(operations) - - # then - result = processor.get_operations() - self.assertEqual(result.upload_operations, []) - self.assertEqual(result.artifact_operations, []) - self.assertEqual( - result.other_operations, - [ - AssignFloat(["a"], 1), - DeleteAttribute(["a"]), - AssignString(["a"], "111"), - DeleteAttribute(["b"]), - AssignFloat(["b"], 2), - AssignFloat(["c"], 3), - AssignString(["d"], "44"), - AssignFloat(["e"], 33), - ], - ) - self.assertEqual( - result.errors, - [MetadataInconsistency("Cannot perform AssignString operation on c: Attribute is not a String")], - ) - self.assertEqual(processor.processed_ops_count, len(operations)) - - def test_series(self): - # given - processor = OperationsPreprocessor() - - operations = [ - LogFloats(["a"], [FLog(1, 2, 3)]), - ConfigFloatSeries(["a"], min=7, max=70, unit="%"), - DeleteAttribute(["a"]), - LogStrings(["a"], [SLog("111", 3, 4)]), - DeleteAttribute(["b"]), - LogStrings(["b"], [SLog("222", None, 6)]), - LogFloats(["c"], [FLog(1, 2, 3)]), - LogFloats(["c"], [FLog(10, 20, 30), FLog(100, 200, 300)]), - LogStrings(["d"], [SLog("4", 111, 222)]), - ClearFloatLog(["e"]), - LogImages(["f"], [ILog("1", 2, 3)]), - LogImages(["f"], [ILog("10", 20, 30), FLog("100", 200, 300)]), - LogImages(["f"], [ILog("1", 2, 3)]), - LogImages(["f"], [ILog("10", 20, 30), FLog("100", 200, 300)]), - ClearImageLog(["f"]), - LogImages(["f"], [ILog("3", 20, 30), FLog("4", 200, 300)]), - LogImages(["f"], [ILog("5", 2, 3)]), - LogImages(["f"], [ILog("8", 20, 30), FLog("1000", 200, 300)]), - LogImages(["g"], [ILog("10", 20, 30), FLog("100", 200, 300)]), - ClearImageLog(["g"]), - AssignString(["h"], "44"), - LogFloats(["h"], [FLog(10, 20, 30), FLog(100, 200, 300)]), - LogFloats(["i"], [FLog(1, 2, 3)]), - ConfigFloatSeries(["i"], min=7, max=70, unit="%"), - ClearFloatLog(["i"]), - LogFloats(["i"], [FLog(10, 20, 30), FLog(100, 200, 300)]), - ] - - # when - processor.process(operations) - - # then - result = processor.get_operations() - self.assertEqual(result.upload_operations, []) - self.assertEqual(result.artifact_operations, []) - self.assertEqual( - result.other_operations, - [ - LogFloats(["a"], [FLog(1, 2, 3)]), - DeleteAttribute(["a"]), - LogStrings(["a"], [FLog("111", 3, 4)]), - DeleteAttribute(["b"]), - LogStrings(["b"], [SLog("222", None, 6)]), - LogFloats(["c"], [FLog(1, 2, 3), FLog(10, 20, 30), FLog(100, 200, 300)]), - LogStrings(["d"], [SLog("4", 111, 222)]), - ClearFloatLog(["e"]), - ClearImageLog(["f"]), - LogImages( - ["f"], - [ - ILog("3", 20, 30), - FLog("4", 200, 300), - ILog("5", 2, 3), - ILog("8", 20, 30), - FLog("1000", 200, 300), - ], - ), - ClearImageLog(["g"]), - AssignString(["h"], "44"), - ClearFloatLog(["i"]), - LogFloats(["i"], [FLog(10, 20, 30), FLog(100, 200, 300)]), - ConfigFloatSeries(["i"], min=7, max=70, unit="%"), - ], - ) - self.assertEqual( - result.errors, - [MetadataInconsistency("Cannot perform LogFloats operation on h: Attribute is not a Float Series")], - ) - self.assertEqual(processor.processed_ops_count, len(operations)) - - def test_sets(self): - # given - processor = OperationsPreprocessor() - - operations = [ - AddStrings(["a"], {"xx", "y", "abc"}), - DeleteAttribute(["a"]), - AddStrings(["a"], {"hhh", "gij"}), - DeleteAttribute(["b"]), - RemoveStrings(["b"], {"abc", "defgh"}), - AddStrings(["c"], {"hhh", "gij"}), - RemoveStrings(["c"], {"abc", "defgh"}), - AddStrings(["c"], {"qqq"}), - ClearStringSet(["d"]), - RemoveStrings(["e"], {"abc", "defgh"}), - AddStrings(["e"], {"qqq"}), - ClearStringSet(["e"]), - AddStrings(["f"], {"hhh", "gij"}), - RemoveStrings(["f"], {"abc", "defgh"}), - AddStrings(["f"], {"qqq"}), - ClearStringSet(["f"]), - AddStrings(["f"], {"xx", "y", "abc"}), - RemoveStrings(["f"], {"abc", "defgh"}), - AssignString(["h"], "44"), - RemoveStrings(["h"], {""}), - AssignFloat(["i"], 5), - AddStrings(["i"], {""}), - ] - - # when - processor.process(operations) - - # then - result = processor.get_operations() - self.assertEqual(result.upload_operations, []) - self.assertEqual(result.artifact_operations, []) - self.assertEqual( - result.other_operations, - [ - AddStrings(["a"], {"xx", "y", "abc"}), - DeleteAttribute(["a"]), - AddStrings(["a"], {"hhh", "gij"}), - DeleteAttribute(["b"]), - RemoveStrings(["b"], {"abc", "defgh"}), - AddStrings(["c"], {"hhh", "gij"}), - RemoveStrings(["c"], {"abc", "defgh"}), - AddStrings(["c"], {"qqq"}), - ClearStringSet(["d"]), - ClearStringSet(["e"]), - ClearStringSet(["f"]), - AddStrings(["f"], {"xx", "y", "abc"}), - RemoveStrings(["f"], {"abc", "defgh"}), - AssignString(["h"], "44"), - AssignFloat(["i"], 5), - ], - ) - self.assertEqual( - result.errors, - [ - MetadataInconsistency("Cannot perform RemoveStrings operation on h: Attribute is not a String Set"), - MetadataInconsistency("Cannot perform AddStrings operation on i: Attribute is not a String Set"), - ], - ) - self.assertEqual(processor.processed_ops_count, len(operations)) - - def test_file_set(self): - # given - processor = OperationsPreprocessor() - - operations = [ - UploadFileSet(["a"], ["xx", "y", "abc"], reset=False), - UploadFileSet(["a"], ["hhh", "gij"], reset=False), - UploadFileSet(["b"], ["abc", "defgh"], reset=True), - UploadFileSet(["c"], ["hhh", "gij"], reset=False), - UploadFileSet(["c"], ["abc", "defgh"], reset=True), - UploadFileSet(["c"], ["qqq"], reset=False), - UploadFileSet(["d"], ["hhh", "gij"], reset=False), - AssignFloat(["e"], 5), - UploadFileSet(["e"], [""], reset=False), - ] - - # when - processor.process(operations) - - # then - result = processor.get_operations() - self.assertEqual( - result.upload_operations, - [ - UploadFileSet(["a"], ["xx", "y", "abc"], reset=False), - UploadFileSet(["a"], ["hhh", "gij"], reset=False), - UploadFileSet(["b"], ["abc", "defgh"], reset=True), - UploadFileSet(["c"], ["abc", "defgh"], reset=True), - UploadFileSet(["c"], ["qqq"], reset=False), - UploadFileSet(["d"], ["hhh", "gij"], reset=False), - ], - ) - self.assertEqual(result.artifact_operations, []) - self.assertEqual( - result.other_operations, - [ - AssignFloat(["e"], 5), - ], - ) - self.assertEqual( - result.errors, - [MetadataInconsistency("Cannot perform UploadFileSet operation on e: Attribute is not a File Set")], - ) - self.assertEqual(processor.processed_ops_count, len(operations)) - - def test_file_ops_delete(self): - # given - processor = OperationsPreprocessor() - - operations = [ - UploadFileSet(["b"], ["abc", "defgh"], reset=True), - UploadFileSet(["c"], ["hhh", "gij"], reset=False), - UploadFileSet(["c"], ["abc", "defgh"], reset=True), - UploadFileSet(["c"], ["qqq"], reset=False), - UploadFileSet(["d"], ["hhh", "gij"], reset=False), - DeleteAttribute(["a"]), - UploadFileSet(["a"], ["xx", "y", "abc"], reset=False), - UploadFileSet(["a"], ["hhh", "gij"], reset=False), - DeleteAttribute(["b"]), - ] - - # when - processor.process(operations) - - # then: there's a cutoff after DeleteAttribute(["a"]) - result = processor.get_operations() - self.assertEqual( - result.upload_operations, - [ - UploadFileSet(["b"], ["abc", "defgh"], reset=True), - UploadFileSet(["c"], ["abc", "defgh"], reset=True), - UploadFileSet(["c"], ["qqq"], reset=False), - UploadFileSet(["d"], ["hhh", "gij"], reset=False), - ], - ) - self.assertEqual(result.artifact_operations, []) - self.assertEqual( - result.other_operations, - [ - DeleteAttribute(["a"]), - ], - ) - self.assertEqual(result.errors, []) - self.assertEqual(processor.processed_ops_count, 6) - - def test_artifacts(self): - # given - processor = OperationsPreprocessor() - project_uuid = str(uuid.uuid4()) - - operations = [ - TrackFilesToArtifact(["a"], project_uuid, [("dir1/", None)]), - DeleteAttribute(["a"]), - TrackFilesToArtifact(["b"], project_uuid, [("dir1/", None)]), - TrackFilesToArtifact(["b"], project_uuid, [("dir2/dir3/", "dir2/")]), - TrackFilesToArtifact(["b"], project_uuid, [("dir4/dir5/", "dir4/")]), - AssignFloat(["c"], 5), - TrackFilesToArtifact(["c"], project_uuid, [("dir1/", None)]), - TrackFilesToArtifact(["d"], project_uuid, [("dir2/dir3/", "dir2/")]), - TrackFilesToArtifact(["d"], project_uuid, [("dir4/", None)]), - TrackFilesToArtifact(["e"], project_uuid, [("dir1/", None)]), - TrackFilesToArtifact(["e"], project_uuid, [("dir2/dir3/", "dir2/")]), - TrackFilesToArtifact(["f"], project_uuid, [("dir1/", None)]), - TrackFilesToArtifact(["f"], project_uuid, [("dir2/dir3/", "dir2/")]), - TrackFilesToArtifact(["f"], project_uuid, [("dir4/", None)]), - TrackFilesToArtifact(["a"], project_uuid, [("dir1/", None)]), - ] - - # when - processor.process(operations) - - # then: there's a cutoff before second TrackFilesToArtifact(["a"]) due to DeleteAttribute(["a"]) - result = processor.get_operations() - self.assertEqual(result.upload_operations, []) - self.assertEqual( - result.artifact_operations, - [ - TrackFilesToArtifact(["a"], project_uuid, [("dir1/", None)]), - TrackFilesToArtifact( - ["b"], - project_uuid, - [("dir1/", None), ("dir2/dir3/", "dir2/"), ("dir4/dir5/", "dir4/")], - ), - TrackFilesToArtifact(["d"], project_uuid, [("dir2/dir3/", "dir2/"), ("dir4/", None)]), - TrackFilesToArtifact(["e"], project_uuid, [("dir1/", None), ("dir2/dir3/", "dir2/")]), - TrackFilesToArtifact( - ["f"], - project_uuid, - [("dir1/", None), ("dir2/dir3/", "dir2/"), ("dir4/", None)], - ), - ], - ) - self.assertEqual( - result.other_operations, - [ - DeleteAttribute(["a"]), - AssignFloat(["c"], 5), - ], - ) - self.assertEqual( - result.errors, - [ - MetadataInconsistency( - "Cannot perform TrackFilesToArtifact operation on c: Attribute is not a Artifact" - ), - ], - ) - self.assertEqual(processor.processed_ops_count, len(operations) - 1) diff --git a/tests/unit/neptune/new/internal/backends/test_swagger_client_wrapper.py b/tests/unit/neptune/new/internal/backends/test_swagger_client_wrapper.py deleted file mode 100644 index 2cf848044..000000000 --- a/tests/unit/neptune/new/internal/backends/test_swagger_client_wrapper.py +++ /dev/null @@ -1,43 +0,0 @@ -# -# Copyright (c) 2022, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import unittest -from unittest.mock import MagicMock - -from neptune.internal.backends.swagger_client_wrapper import SwaggerClientWrapper - - -class TestSwaggerClientWrapper(unittest.TestCase): - def setUp(self) -> None: - pass - - def test_api_callable_objects(self): - # given - swagger_client = MagicMock() - api = MagicMock() - api.callable_object = MagicMock() - api.callable_object.sub_property = 13 - swagger_client.api = api - - wrapper = SwaggerClientWrapper(swagger_client) - - # when - wrapper.api.method("arg1", kwarg="kwarg1") - wrapper.api.callable_object("arg2", kwarg="kwarg2") - - # then - api.method.assert_called_once_with("arg1", kwarg="kwarg1") - api.callable_object.assert_called_once_with("arg2", kwarg="kwarg2") - self.assertEqual(13, wrapper.api.callable_object.sub_property) diff --git a/tests/unit/neptune/new/internal/backends/test_utils.py b/tests/unit/neptune/new/internal/backends/test_utils.py deleted file mode 100644 index 0d97d9e8d..000000000 --- a/tests/unit/neptune/new/internal/backends/test_utils.py +++ /dev/null @@ -1,209 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import unittest -import uuid -from typing import Optional -from unittest.mock import ( - Mock, - patch, -) - -import pytest - -from neptune.attributes import ( - Integer, - String, -) -from neptune.exceptions import FetchAttributeNotFoundException -from neptune.internal import operation -from neptune.internal.backends.neptune_backend import NeptuneBackend -from neptune.internal.backends.utils import ( - ExecuteOperationsBatchingManager, - _check_if_tqdm_installed, - build_operation_url, - which_progress_bar, -) -from neptune.internal.container_type import ContainerType -from neptune.typing import ProgressBarCallback -from neptune.utils import ( - NullProgressBar, - TqdmProgressBar, -) - - -class CustomProgressBar(ProgressBarCallback): - def __enter__(self): ... - - def __exit__(self, exc_type, exc_val, exc_tb): ... - - def update(self, *, by: int, total: Optional[int] = None) -> None: - pass - - -class TestNeptuneBackendMock(unittest.TestCase): - def test_building_operation_url(self): - urls = { - build_operation_url("https://app.stage.neptune.ai", "api/leaderboard/v1/attributes/download"), - build_operation_url( - "https://app.stage.neptune.ai", - "/api/leaderboard/v1/attributes/download", - ), - build_operation_url( - "https://app.stage.neptune.ai/", - "api/leaderboard/v1/attributes/download", - ), - build_operation_url( - "https://app.stage.neptune.ai/", - "/api/leaderboard/v1/attributes/download", - ), - build_operation_url("app.stage.neptune.ai", "api/leaderboard/v1/attributes/download"), - build_operation_url("app.stage.neptune.ai", "/api/leaderboard/v1/attributes/download"), - build_operation_url("app.stage.neptune.ai/", "api/leaderboard/v1/attributes/download"), - build_operation_url("app.stage.neptune.ai/", "/api/leaderboard/v1/attributes/download"), - } - self.assertEqual( - {"https://app.stage.neptune.ai/api/leaderboard/v1/attributes/download"}, - urls, - ) - - -class TestExecuteOperationsBatchingManager(unittest.TestCase): - def test_cut_batch_on_copy(self): - backend = Mock(spec=NeptuneBackend) - manager = ExecuteOperationsBatchingManager(backend) - - operations = [ - operation.AssignInt(["a"], 12), - operation.AssignString(["b/c"], "test"), - operation.CopyAttribute(["a"], str(uuid.uuid4()), ContainerType.RUN, ["b"], Integer), - operation.AssignFloat(["q/d"], 44.12), - operation.AssignInt(["pp"], 12), - operation.CopyAttribute(["q/d"], str(uuid.uuid4()), ContainerType.RUN, ["b"], String), - ] - - batch = manager.get_batch(operations) - self.assertEqual(operations[0:2], batch.operations) - self.assertEqual([], batch.errors) - self.assertEqual(0, batch.dropped_operations_count) - - def test_get_nonempty_batch_with_copy_first(self): - backend = Mock(spec=NeptuneBackend) - manager = ExecuteOperationsBatchingManager(backend) - - operations = [ - operation.CopyAttribute(["a"], str(uuid.uuid4()), ContainerType.RUN, ["b"], Integer), - operation.AssignFloat(["q/d"], 44.12), - operation.AssignInt(["pp"], 12), - operation.CopyAttribute(["q/d"], str(uuid.uuid4()), ContainerType.RUN, ["b"], String), - ] - - batch = manager.get_batch(operations) - expected_batch = [ - operation.AssignInt(operations[0].path, backend.get_int_attribute.return_value.value) - ] + operations[1:3] - self.assertEqual(expected_batch, batch.operations) - self.assertEqual([], batch.errors) - self.assertEqual(0, batch.dropped_operations_count) - - def test_no_copies_is_ok(self): - backend = Mock(spec=NeptuneBackend) - manager = ExecuteOperationsBatchingManager(backend) - - operations = [ - operation.AssignInt(["a"], 12), - operation.AssignString(["b/c"], "test"), - operation.AssignFloat(["q/d"], 44.12), - operation.AssignInt(["pp"], 12), - ] - - batch = manager.get_batch(operations) - self.assertEqual(operations, batch.operations) - self.assertEqual([], batch.errors) - self.assertEqual(0, batch.dropped_operations_count) - - def test_no_ops_is_ok(self): - backend = Mock(spec=NeptuneBackend) - manager = ExecuteOperationsBatchingManager(backend) - - batch = manager.get_batch([]) - self.assertEqual([], batch.operations) - self.assertEqual([], batch.errors) - self.assertEqual(0, batch.dropped_operations_count) - - def test_subsequent_copies_is_ok(self): - backend = Mock(spec=NeptuneBackend) - manager = ExecuteOperationsBatchingManager(backend) - - operations = [ - operation.CopyAttribute(["a"], str(uuid.uuid4()), ContainerType.RUN, ["b"], Integer), - operation.CopyAttribute(["q/d"], str(uuid.uuid4()), ContainerType.RUN, ["b"], String), - operation.CopyAttribute(["pp"], str(uuid.uuid4()), ContainerType.RUN, ["b"], Integer), - ] - - batch = manager.get_batch(operations) - expected_batch = [operation.AssignInt(operations[0].path, backend.get_int_attribute.return_value.value)] - self.assertEqual(expected_batch, batch.operations) - self.assertEqual([], batch.errors) - self.assertEqual(0, batch.dropped_operations_count) - - def test_handle_failed_copy(self): - backend = Mock(spec=NeptuneBackend) - backend.get_int_attribute.side_effect = FetchAttributeNotFoundException("b") - manager = ExecuteOperationsBatchingManager(backend) - - operations = [ - operation.CopyAttribute(["q/d"], str(uuid.uuid4()), ContainerType.RUN, ["b"], Integer), - operation.AssignInt(["a"], 12), - operation.AssignString(["b/c"], "test"), - operation.AssignInt(["pp"], 12), - ] - - batch = manager.get_batch(operations) - # skipped erroneous CopyAttribute - self.assertEqual(operations[1:], batch.operations) - self.assertEqual([backend.get_int_attribute.side_effect], batch.errors) - self.assertEqual(1, batch.dropped_operations_count) - - -@patch("neptune.internal.backends.utils._check_if_tqdm_installed") -def test_which_progress_bar(mock_tqdm_installed): - mock_tqdm_installed.return_value = True - - assert which_progress_bar(None) == TqdmProgressBar - assert which_progress_bar(True) == TqdmProgressBar - assert which_progress_bar(False) == NullProgressBar - assert which_progress_bar(CustomProgressBar) == CustomProgressBar - - mock_tqdm_installed.return_value = False - assert which_progress_bar(None) == NullProgressBar - assert which_progress_bar(True) == NullProgressBar - assert which_progress_bar(False) == NullProgressBar - assert which_progress_bar(CustomProgressBar) == CustomProgressBar - - assert mock_tqdm_installed.call_count == 4 # 2 x 'None' + 2 x 'True' - - with pytest.raises(TypeError): - which_progress_bar(1) - - -@patch.dict("sys.modules", {"tqdm": None}) -def test_check_if_tqdm_installed_not_installed(): - assert not _check_if_tqdm_installed() - - -@patch.dict("sys.modules", {"tqdm": {}}) -def test_check_if_tqdm_installed_installed(): - assert _check_if_tqdm_installed() diff --git a/tests/unit/neptune/new/internal/operation_processors/__init__.py b/tests/unit/neptune/new/internal/operation_processors/__init__.py deleted file mode 100644 index 8d06af532..000000000 --- a/tests/unit/neptune/new/internal/operation_processors/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/internal/operation_processors/test_async_operation_processor.py b/tests/unit/neptune/new/internal/operation_processors/test_async_operation_processor.py deleted file mode 100644 index 4c41e5c65..000000000 --- a/tests/unit/neptune/new/internal/operation_processors/test_async_operation_processor.py +++ /dev/null @@ -1,240 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from pathlib import Path -from uuid import uuid4 - -from mock import ( - MagicMock, - patch, -) - -from neptune.constants import NEPTUNE_DATA_DIRECTORY -from neptune.internal.container_type import ContainerType -from neptune.internal.id_formats import UniqueId -from neptune.internal.operation_processors.async_operation_processor import AsyncOperationProcessor - - -@patch("neptune.internal.operation_processors.utils.random.choice") -@patch("neptune.internal.operation_processors.async_operation_processor.Path.mkdir") -@patch("neptune.internal.operation_processors.async_operation_processor.DiskQueue") -@patch("neptune.internal.operation_processors.async_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.async_operation_processor.MetadataFile") -@patch("neptune.internal.operation_processors.utils.os.getpid", return_value=42) -def test_setup(_, __, ___, ____, mkdir_mock, random_choice_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - random_choice_mock.side_effect = tuple("abcdefgh") - - # and - processor = AsyncOperationProcessor( - container_id=container_id, - container_type=container_type, - backend=MagicMock(), - lock=MagicMock(), - queue=MagicMock(), - ) - - # then - mkdir_mock.assert_called_once_with(parents=True, exist_ok=True) - - # and - assert ( - processor.data_path - == Path(NEPTUNE_DATA_DIRECTORY) / "async" / f"{container_type.value}__{container_id}__42__abcdefgh" - ) - - -@patch("neptune.internal.operation_processors.async_operation_processor.DiskQueue") -@patch("neptune.internal.operation_processors.async_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.async_operation_processor.MetadataFile") -def test_flush(metadata_file_mock, operation_storage_mock, disk_queue_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - metadata_file = metadata_file_mock.return_value - operation_storage = operation_storage_mock.return_value - disk_queue = disk_queue_mock.return_value - - # and - processor = AsyncOperationProcessor( - container_id=container_id, - container_type=container_type, - backend=MagicMock(), - lock=MagicMock(), - queue=MagicMock(), - ) - - # when - processor.flush() - - # then - disk_queue.flush.assert_called_once() - operation_storage.flush.assert_called_once() - metadata_file.flush.assert_called_once() - - -@patch("neptune.internal.operation_processors.async_operation_processor.DiskQueue") -@patch("neptune.internal.operation_processors.async_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.async_operation_processor.MetadataFile") -def test_close(metadata_file_mock, operation_storage_mock, disk_queue_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - metadata_file = metadata_file_mock.return_value - operation_storage = operation_storage_mock.return_value - disk_queue = disk_queue_mock.return_value - - # and - processor = AsyncOperationProcessor( - container_id=container_id, - container_type=container_type, - backend=MagicMock(), - lock=MagicMock(), - queue=MagicMock(), - ) - - # when - processor.close() - - # then - disk_queue.close.assert_called_once() - operation_storage.close.assert_called_once() - metadata_file.close.assert_called_once() - - -@patch("neptune.internal.operation_processors.async_operation_processor.DiskQueue") -@patch("neptune.internal.operation_processors.async_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.async_operation_processor.MetadataFile") -def test_stop_if_empty(metadata_file_mock, operation_storage_mock, disk_queue_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - metadata_file = metadata_file_mock.return_value - operation_storage = operation_storage_mock.return_value - disk_queue = disk_queue_mock.return_value - disk_queue.is_empty.return_value = True - disk_queue.size.return_value = 0 - - # and - processor = AsyncOperationProcessor( - container_id=container_id, - container_type=container_type, - backend=MagicMock(), - lock=MagicMock(), - queue=MagicMock(), - ) - - # and - processor.start() - - # when - processor.stop(seconds=1) - - # then - disk_queue.flush.assert_called() - operation_storage.flush.assert_called() - metadata_file.flush.assert_called() - - # and - disk_queue.close.assert_called() - operation_storage.close.assert_called() - metadata_file.close.assert_called() - disk_queue.is_empty.assert_called() - - # and - disk_queue.cleanup.assert_called() - operation_storage.cleanup.assert_called() - metadata_file.cleanup.assert_called() - - -@patch("neptune.internal.operation_processors.async_operation_processor.DiskQueue") -@patch("neptune.internal.operation_processors.async_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.async_operation_processor.MetadataFile") -def test_stop_if_not_empty(metadata_file_mock, operation_storage_mock, disk_queue_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - metadata_file = metadata_file_mock.return_value - operation_storage = operation_storage_mock.return_value - disk_queue = disk_queue_mock.return_value - disk_queue.is_empty.return_value = False - disk_queue.size.return_value = 1 - - # and - processor = AsyncOperationProcessor( - container_id=container_id, - container_type=container_type, - backend=MagicMock(), - lock=MagicMock(), - queue=MagicMock(), - ) - - # and - processor.start() - - # when - processor.stop(seconds=1) - - # then - disk_queue.flush.assert_called() - operation_storage.flush.assert_called() - metadata_file.flush.assert_called() - - # and - disk_queue.close.assert_called() - operation_storage.close.assert_called() - metadata_file.close.assert_called() - disk_queue.is_empty.assert_called() - - # and - disk_queue.cleanup.assert_not_called() - operation_storage.cleanup.assert_not_called() - metadata_file.cleanup.assert_not_called() - - -@patch("neptune.internal.operation_processors.async_operation_processor.DiskQueue") -@patch("neptune.internal.operation_processors.async_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.async_operation_processor.MetadataFile") -def test_metadata(metadata_file_mock, _, __): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # when - AsyncOperationProcessor( - container_id=container_id, - container_type=container_type, - backend=MagicMock(), - lock=MagicMock(), - queue=MagicMock(), - ) - - # then - metadata = metadata_file_mock.call_args_list[0][1]["metadata"] - assert metadata["mode"] == "async" - assert metadata["containerType"] == ContainerType.RUN - assert metadata["containerId"] == container_id diff --git a/tests/unit/neptune/new/internal/operation_processors/test_lazy_operation_processor_wrapper.py b/tests/unit/neptune/new/internal/operation_processors/test_lazy_operation_processor_wrapper.py deleted file mode 100644 index 95bf0d01b..000000000 --- a/tests/unit/neptune/new/internal/operation_processors/test_lazy_operation_processor_wrapper.py +++ /dev/null @@ -1,70 +0,0 @@ -import unittest.mock as mock - -from neptune.internal.operation_processors.lazy_operation_processor_wrapper import LazyOperationProcessorWrapper -from neptune.internal.operation_processors.operation_processor import OperationProcessor - - -def test_lazy_initialization(): - # given - operation_processor = mock.Mock(spec=OperationProcessor) - operation_processor_getter = mock.Mock(return_value=operation_processor) - - # when - lazy_wrapper = LazyOperationProcessorWrapper(operation_processor_getter) - - # then - operation_processor_getter.assert_not_called() - assert not lazy_wrapper.is_evaluated - - # when - lazy_wrapper.enqueue_operation(mock.Mock(), wait=False) - lazy_wrapper.enqueue_operation(mock.Mock(), wait=False) - - # then - operation_processor_getter.assert_called_once() - assert lazy_wrapper.is_evaluated - - -def test_call_propagation_to_wrapped(): - # given - operation_processor = mock.Mock(spec=OperationProcessor) - operation_processor_getter = mock.Mock(return_value=operation_processor) - lazy_wrapper = LazyOperationProcessorWrapper(operation_processor_getter) - - # when - arg_mock = mock.Mock() - lazy_wrapper.enqueue_operation(arg_mock, wait=True) - - # then - operation_processor.enqueue_operation.assert_called_once_with(arg_mock, wait=True) - - # when - with mock.patch.object( - LazyOperationProcessorWrapper, "operation_storage", new_callable=mock.PropertyMock - ) as operation_storage: - lazy_wrapper.operation_storage - - # then - operation_storage.assert_called_once() - - for method in ["pause", "resume", "flush", "wait", "stop", "close"]: - # when - getattr(lazy_wrapper, method)() - - # then - getattr(operation_processor, method).assert_called_once() - - -def test_op_processor_started_after_evaluation(): - - # given - operation_processor = mock.Mock(spec=OperationProcessor) - operation_processor_getter = mock.Mock(return_value=operation_processor) - lazy_wrapper = LazyOperationProcessorWrapper(operation_processor_getter) - - # when - lazy_wrapper.evaluate() - lazy_wrapper.evaluate() - - # then - operation_processor.start.assert_called_once() diff --git a/tests/unit/neptune/new/internal/operation_processors/test_offline_operation_processor.py b/tests/unit/neptune/new/internal/operation_processors/test_offline_operation_processor.py deleted file mode 100644 index f250021a0..000000000 --- a/tests/unit/neptune/new/internal/operation_processors/test_offline_operation_processor.py +++ /dev/null @@ -1,166 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from pathlib import Path -from uuid import uuid4 - -from mock import ( - MagicMock, - patch, -) - -from neptune.constants import NEPTUNE_DATA_DIRECTORY -from neptune.internal.container_type import ContainerType -from neptune.internal.id_formats import UniqueId -from neptune.internal.operation_processors.offline_operation_processor import OfflineOperationProcessor - - -@patch("neptune.internal.operation_processors.utils.random.choice") -@patch("neptune.internal.operation_processors.offline_operation_processor.Path.mkdir") -@patch("neptune.internal.operation_processors.offline_operation_processor.DiskQueue") -@patch("neptune.internal.operation_processors.offline_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.offline_operation_processor.MetadataFile") -@patch("neptune.internal.operation_processors.utils.os.getpid", return_value=42) -def test_setup(_, __, ___, ____, mkdir_mock, random_choice_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - random_choice_mock.side_effect = tuple("abcdefgh") - - # and - processor = OfflineOperationProcessor(container_id=container_id, container_type=container_type, lock=MagicMock()) - - # then - mkdir_mock.assert_called_once_with(parents=True, exist_ok=True) - - # and - assert ( - processor.data_path - == Path(NEPTUNE_DATA_DIRECTORY) / "offline" / f"{container_type.value}__{container_id}__42__abcdefgh" - ) - - -@patch("neptune.internal.operation_processors.offline_operation_processor.DiskQueue") -@patch("neptune.internal.operation_processors.offline_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.offline_operation_processor.MetadataFile") -def test_flush(metadata_file_mock, operation_storage_mock, disk_queue_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - metadata_file = metadata_file_mock.return_value - operation_storage = operation_storage_mock.return_value - disk_queue = disk_queue_mock.return_value - - # and - processor = OfflineOperationProcessor(container_id=container_id, container_type=container_type, lock=MagicMock()) - - # and - processor.start() - - # when - processor.flush() - - # then - disk_queue.flush.assert_called_once() - operation_storage.flush.assert_called_once() - metadata_file.flush.assert_called_once() - - -@patch("neptune.internal.operation_processors.offline_operation_processor.DiskQueue") -@patch("neptune.internal.operation_processors.offline_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.offline_operation_processor.MetadataFile") -def test_close(metadata_file_mock, operation_storage_mock, disk_queue_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - metadata_file = metadata_file_mock.return_value - operation_storage = operation_storage_mock.return_value - disk_queue = disk_queue_mock.return_value - - # and - processor = OfflineOperationProcessor(container_id=container_id, container_type=container_type, lock=MagicMock()) - - # and - processor.start() - - # when - processor.close() - - # then - disk_queue.close.assert_called_once() - operation_storage.close.assert_called_once() - metadata_file.close.assert_called_once() - - -@patch("neptune.internal.operation_processors.offline_operation_processor.DiskQueue") -@patch("neptune.internal.operation_processors.offline_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.offline_operation_processor.MetadataFile") -def test_stop(metadata_file_mock, operation_storage_mock, disk_queue_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - metadata_file = metadata_file_mock.return_value - operation_storage = operation_storage_mock.return_value - disk_queue = disk_queue_mock.return_value - - # and - processor = OfflineOperationProcessor(container_id=container_id, container_type=container_type, lock=MagicMock()) - - # and - processor.start() - - # when - processor.stop() - - # then - disk_queue.flush.assert_called_once() - operation_storage.flush.assert_called_once() - metadata_file.flush.assert_called_once() - - # and - disk_queue.close.assert_called_once() - operation_storage.close.assert_called_once() - metadata_file.close.assert_called_once() - - # and - operation_storage.cleanup.assert_not_called() - disk_queue.cleanup.assert_not_called() - metadata_file.cleanup.assert_not_called() - - -@patch("neptune.internal.operation_processors.offline_operation_processor.DiskQueue") -@patch("neptune.internal.operation_processors.offline_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.offline_operation_processor.MetadataFile") -def test_metadata(metadata_file_mock, _, __): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # when - OfflineOperationProcessor(container_id=container_id, container_type=container_type, lock=MagicMock()) - - # then - metadata = metadata_file_mock.call_args_list[0][1]["metadata"] - assert metadata["mode"] == "offline" - assert metadata["containerType"] == ContainerType.RUN - assert metadata["containerId"] == container_id diff --git a/tests/unit/neptune/new/internal/operation_processors/test_operation_logger.py b/tests/unit/neptune/new/internal/operation_processors/test_operation_logger.py deleted file mode 100644 index 192aab88d..000000000 --- a/tests/unit/neptune/new/internal/operation_processors/test_operation_logger.py +++ /dev/null @@ -1,124 +0,0 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import unittest -from unittest.mock import Mock - -from neptune.internal.operation_processors.operation_logger import ( - CONNECTION_INTERRUPTED_MSG, - RECONNECT_FAILURE_MSG, - STILL_WAITING_MSG, - SUCCESS_MSG, - SYNC_FAILURE_MSG, - WAITING_FOR_OPERATIONS_MSG, - ProcessorStopLogger, - ProcessorStopSignal, - ProcessorStopSignalData, - ProcessorStopSignalType, -) - - -class TestOperationLoggerNoQueue(unittest.TestCase): - def setUp(self): - self.logger = ProcessorStopLogger(processor_id=0, signal_queue=None, logger=Mock()) - - def test_log_connection_interruption(self): - self.logger.log_connection_interruption(10) - - self.logger._logger.warning.assert_called_once_with(CONNECTION_INTERRUPTED_MSG, 10) - - def test_log_remaining_operations(self): - self.logger.log_remaining_operations(10) - - self.logger._logger.info.assert_called_once_with(WAITING_FOR_OPERATIONS_MSG, 10) - - self.logger._logger.warning.reset_mock() - - self.logger.log_remaining_operations(0) - - self.logger._logger.warning.assert_not_called() - - def test_log_success(self): - self.logger.log_success(10) - - self.logger._logger.info.assert_called_once_with(SUCCESS_MSG, 10) - - def test_log_sync_failure(self): - self.logger.log_sync_failure(10, 20) - - self.logger._logger.warning.assert_called_once_with(SYNC_FAILURE_MSG, 10, 20) - - def test_log_reconnect_failure(self): - self.logger.log_reconnect_failure(10, 20) - - self.logger._logger.warning.assert_called_once_with(RECONNECT_FAILURE_MSG, 10, 20) - - def test_log_still_waiting(self): - self.logger.log_still_waiting(10, 10, 20) - - self.logger._logger.info.assert_called_once_with(STILL_WAITING_MSG, 10, 20) - - -class TestOperationLoggerWithQueue(unittest.TestCase): - def setUp(self): - self.logger = ProcessorStopLogger(processor_id=0, signal_queue=Mock(), logger=Mock()) - - def test_log_connection_interruption(self): - self.logger.log_connection_interruption(10) - - self.logger._logger.warning.assert_not_called() - self.logger._signal_queue.put.assert_called_once_with( - ProcessorStopSignal( - data=ProcessorStopSignalData(max_reconnect_wait_time=10), - signal_type=ProcessorStopSignalType.CONNECTION_INTERRUPTED, - ) - ) - - def test_log_remaining_operations(self): - self.logger.log_remaining_operations(10) - - self.logger._logger.warning.assert_not_called() - self.logger._signal_queue.put.assert_called_once_with( - ProcessorStopSignal( - data=ProcessorStopSignalData(size_remaining=10), - signal_type=ProcessorStopSignalType.WAITING_FOR_OPERATIONS, - ) - ) - - def test_log_success(self): - self.logger.log_success(10) - - self.logger._logger.info.assert_not_called() - - def test_log_sync_failure(self): - self.logger.log_sync_failure(10, 20) - - self.logger._logger.warning.assert_not_called() - - def test_log_reconnect_failure(self): - self.logger.log_reconnect_failure(10, 20) - - self.logger._logger.warning.assert_not_called() - - def test_log_still_waiting(self): - self.logger.log_still_waiting(10, 10, 20) - - self.logger._logger.warning.assert_not_called() - self.logger._signal_queue.put.assert_called_once_with( - ProcessorStopSignal( - data=ProcessorStopSignalData(size_remaining=10, already_synced=10, already_synced_proc=20), - signal_type=ProcessorStopSignalType.STILL_WAITING, - ), - ) diff --git a/tests/unit/neptune/new/internal/operation_processors/test_sync_operation_processor.py b/tests/unit/neptune/new/internal/operation_processors/test_sync_operation_processor.py deleted file mode 100644 index 9aa795583..000000000 --- a/tests/unit/neptune/new/internal/operation_processors/test_sync_operation_processor.py +++ /dev/null @@ -1,157 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from pathlib import Path -from uuid import uuid4 - -from mock import ( - MagicMock, - patch, -) - -from neptune.constants import NEPTUNE_DATA_DIRECTORY -from neptune.internal.container_type import ContainerType -from neptune.internal.id_formats import UniqueId -from neptune.internal.operation_processors.sync_operation_processor import SyncOperationProcessor - - -@patch("neptune.internal.operation_processors.utils.random.choice") -@patch("neptune.internal.operation_processors.sync_operation_processor.Path.mkdir") -@patch("neptune.internal.operation_processors.sync_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.sync_operation_processor.MetadataFile") -@patch("neptune.internal.operation_processors.utils.os.getpid", return_value=42) -def test_setup(_, __, ___, mkdir_mock, random_choice_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - random_choice_mock.side_effect = tuple("abcdefgh") - - # and - processor = SyncOperationProcessor(container_id=container_id, container_type=container_type, backend=MagicMock()) - - # then - mkdir_mock.assert_called_once_with(parents=True, exist_ok=True) - - # and - assert ( - processor.data_path - == Path(NEPTUNE_DATA_DIRECTORY) / "sync" / f"{container_type.value}__{container_id}__42__abcdefgh" - ) - - -@patch("neptune.internal.operation_processors.sync_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.sync_operation_processor.MetadataFile") -def test_flush(metadata_file_mock, operation_storage_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - metadata_file = metadata_file_mock.return_value - operation_storage = operation_storage_mock.return_value - - # and - processor = SyncOperationProcessor(container_id=container_id, container_type=container_type, backend=MagicMock()) - - # and - processor.start() - - # when - processor.flush() - - # then - metadata_file.flush.assert_called_once() - operation_storage.flush.assert_called_once() - - -@patch("neptune.internal.operation_processors.sync_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.sync_operation_processor.MetadataFile") -def test_close(metadata_file_mock, operation_storage_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - metadata_file = metadata_file_mock.return_value - operation_storage = operation_storage_mock.return_value - - # and - processor = SyncOperationProcessor(container_id=container_id, container_type=container_type, backend=MagicMock()) - - # and - processor.start() - - # when - processor.close() - - # then - metadata_file.close.assert_called_once() - operation_storage.close.assert_called_once() - - -@patch("neptune.internal.operation_processors.sync_operation_processor.Path.rmdir") -@patch("neptune.internal.operation_processors.sync_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.sync_operation_processor.MetadataFile") -def test_stop(metadata_file_mock, operation_storage_mock, rmdir_mock): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # and - metadata_file = metadata_file_mock.return_value - operation_storage = operation_storage_mock.return_value - - # and - processor = SyncOperationProcessor(container_id=container_id, container_type=container_type, backend=MagicMock()) - - # and - processor.start() - - # when - processor.stop() - - # then - metadata_file.flush.assert_called_once() - operation_storage.flush.assert_called_once() - - # and - metadata_file.close.assert_called_once() - operation_storage.close.assert_called_once() - - # and - operation_storage.cleanup.assert_called() - metadata_file.cleanup.assert_called() - - # and - rmdir_mock.assert_called_once() - - -@patch("neptune.internal.operation_processors.sync_operation_processor.OperationStorage") -@patch("neptune.internal.operation_processors.sync_operation_processor.MetadataFile") -def test_metadata(metadata_file_mock, _): - # given - container_id = UniqueId(str(uuid4())) - container_type = ContainerType.RUN - - # when - SyncOperationProcessor(container_id=container_id, container_type=container_type, backend=MagicMock()) - - # then - metadata = metadata_file_mock.call_args_list[0][1]["metadata"] - assert metadata["mode"] == "sync" - assert metadata["containerType"] == ContainerType.RUN - assert metadata["containerId"] == container_id diff --git a/tests/unit/neptune/new/internal/signals_processing/__init__.py b/tests/unit/neptune/new/internal/signals_processing/__init__.py deleted file mode 100644 index 8d06af532..000000000 --- a/tests/unit/neptune/new/internal/signals_processing/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/internal/signals_processing/test_signals_processor.py b/tests/unit/neptune/new/internal/signals_processing/test_signals_processor.py deleted file mode 100644 index dedfe899c..000000000 --- a/tests/unit/neptune/new/internal/signals_processing/test_signals_processor.py +++ /dev/null @@ -1,546 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from queue import Queue - -from mock import ( - MagicMock, - call, - patch, -) - -from neptune.internal.signals_processing.signals_processor import SignalsProcessor -from neptune.internal.signals_processing.utils import ( - signal_batch_lag, - signal_batch_processed, - signal_batch_started, -) - - -def test__no_progress__no_signal(): - # given - container = MagicMock() - async_no_progress_callback = MagicMock() - - # and - queue = Queue() - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=1.0, - async_no_progress_callback=async_no_progress_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_no_progress_callback.assert_not_called() - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__no_progress__proper_execution_of_batch(monotonic): - # given - container = MagicMock() - async_no_progress_callback = MagicMock() - - # and - monotonic.side_effect = [ - 5.0, - ] - - # and - queue = Queue() - # First proper batch - signal_batch_started(queue=queue, occured_at=0.0) - signal_batch_processed(queue=queue, occured_at=3.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=5.0, - async_no_progress_callback=async_no_progress_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_no_progress_callback.assert_not_called() - monotonic.assert_has_calls(calls=(call(),), any_order=True) - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__no_progress__too_long_batch_execution(monotonic): - # given - container = MagicMock() - async_no_progress_callback = MagicMock() - - # and - monotonic.side_effect = [11.0, 11.01] - - # and - queue = Queue() - # First too long batch - signal_batch_started(queue=queue, occured_at=1.0) - signal_batch_processed(queue=queue, occured_at=9.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=5.0, - async_no_progress_callback=async_no_progress_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_no_progress_callback.assert_called_once_with(container) - monotonic.assert_has_calls(calls=(call(), call()), any_order=True) - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__no_progress__proper_then_too_long(monotonic): - # given - container = MagicMock() - async_no_progress_callback = MagicMock() - - # and - monotonic.side_effect = [16.0, 16.01] - - # and - queue = Queue() - # First proper batch - signal_batch_started(queue=queue, occured_at=0.0) - signal_batch_processed(queue=queue, occured_at=4.0) - # Second too long batch - signal_batch_started(queue=queue, occured_at=5.0) - signal_batch_processed(queue=queue, occured_at=15.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=5.0, - async_no_progress_callback=async_no_progress_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_no_progress_callback.assert_called_once_with(container) - monotonic.assert_has_calls(calls=(call(), call()), any_order=True) - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__no_progress__proper_then_non_ended(monotonic): - # given - container = MagicMock() - async_no_progress_callback = MagicMock() - - # and - monotonic.side_effect = [16.0, 16.01] - - # and - queue = Queue() - # First proper batch - signal_batch_started(queue=queue, occured_at=0.0) - signal_batch_processed(queue=queue, occured_at=4.0) - # Second non-ended batch - signal_batch_started(queue=queue, occured_at=5.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=5.0, - async_no_progress_callback=async_no_progress_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_no_progress_callback.assert_called_once_with(container) - monotonic.assert_has_calls(calls=(call(), call()), any_order=True) - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__no_progress__too_short_time_between_callbacks(monotonic): - # given - container = MagicMock() - async_no_progress_callback = MagicMock() - - # and - monotonic.side_effect = [14.0, 14.01] - - # and - queue = Queue() - # First failing batch - signal_batch_started(queue=queue, occured_at=0.0) - signal_batch_processed(queue=queue, occured_at=6.0) - # Almost immediate second failing batch - signal_batch_started(queue=queue, occured_at=7.0) - signal_batch_processed(queue=queue, occured_at=13.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=5.0, - async_no_progress_callback=async_no_progress_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_no_progress_callback.assert_called_once_with(container) - monotonic.assert_has_calls(calls=(call(), call()), any_order=True) - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__no_progress__ack_in_between(monotonic): - # given - container = MagicMock() - async_no_progress_callback = MagicMock() - - # and - monotonic.side_effect = [17.0, 17.01] - - # and - queue = Queue() - # First failing batch - signal_batch_started(queue=queue, occured_at=0.0) - signal_batch_processed(queue=queue, occured_at=6.0) - # Proper batch - signal_batch_started(queue=queue, occured_at=7.0) - signal_batch_processed(queue=queue, occured_at=9.0) - # Second failing batch - signal_batch_started(queue=queue, occured_at=10.0) - signal_batch_processed(queue=queue, occured_at=16.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=5.0, - async_no_progress_callback=async_no_progress_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - monotonic.assert_has_calls(calls=(call(), call()), any_order=True) - async_no_progress_callback.assert_called_once_with(container) - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__no_progress__proper_then_too_long_different_cycles(monotonic): - # given - container = MagicMock() - async_no_progress_callback = MagicMock() - - # and - monotonic.side_effect = [5.0, 5.01, 16.0, 16.01] - - # and - queue = Queue() - # First proper batch - signal_batch_started(queue=queue, occured_at=0.0) - signal_batch_processed(queue=queue, occured_at=4.0) - # Second too long batch - signal_batch_started(queue=queue, occured_at=5.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=5.0, - async_no_progress_callback=async_no_progress_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_no_progress_callback.assert_not_called() - - # given - signal_batch_processed(queue=queue, occured_at=15.0) - - # when - processor.work() - - # then - async_no_progress_callback.assert_called_once_with(container) - - # and - monotonic.assert_has_calls(calls=(call(), call(), call())) - async_no_progress_callback.assert_has_calls(calls=(call(container),), any_order=True) - - -def test__lag__no_signal(): - # given - container = MagicMock() - async_lag_callback = MagicMock() - - # and - queue = Queue() - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=1.0, - async_lag_callback=async_lag_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_lag_callback.assert_not_called() - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__lag__proper_execution_of_batch(monotonic): - # given - container = MagicMock() - async_lag_callback = MagicMock() - - # and - monotonic.side_effect = [ - 5.0, - ] - - # and - queue = Queue() - signal_batch_lag(queue=queue, lag=0.1, occured_at=1.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=1.0, - async_lag_callback=async_lag_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_lag_callback.assert_not_called() - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__lag__too_big_lag(monotonic): - # given - container = MagicMock() - async_lag_callback = MagicMock() - - # and - monotonic.side_effect = [ - 7.0, - 7.01, - ] - - # and - queue = Queue() - signal_batch_lag(queue=queue, lag=5.0, occured_at=6.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=1.0, - async_lag_callback=async_lag_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_lag_callback.assert_called_once_with(container) - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__lag__too_short_interval(monotonic): - # given - container = MagicMock() - async_lag_callback = MagicMock() - - # and - monotonic.side_effect = [7.0, 7.01, 11.0, 11.01] - - # and - queue = Queue() - signal_batch_lag(queue=queue, lag=5.0, occured_at=6.0) - signal_batch_lag(queue=queue, lag=3.0, occured_at=10.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=1.0, - async_lag_callback=async_lag_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_lag_callback.assert_called_once_with(container) - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__lag__longer_interval(monotonic): - # given - container = MagicMock() - async_lag_callback = MagicMock() - - # and - monotonic.side_effect = [7.0, 20.0, 20.01] - - # and - queue = Queue() - signal_batch_lag(queue=queue, lag=5.0, occured_at=6.0) - signal_batch_lag(queue=queue, lag=3.0, occured_at=19.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=1.0, - async_lag_callback=async_lag_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_lag_callback.assert_has_calls( - calls=( - call(container), - call(container), - ), - any_order=True, - ) - - -@patch("neptune.internal.signals_processing.signals_processor.monotonic") -def test__lag__longer_interval_different_cycles(monotonic): - # given - container = MagicMock() - async_lag_callback = MagicMock() - - # and - monotonic.side_effect = [7.0, 7.01, 20.0, 20.01] - - # and - queue = Queue() - signal_batch_lag(queue=queue, lag=5.0, occured_at=6.0) - - # and - processor = SignalsProcessor( - period=10, - container=container, - queue=queue, - async_lag_threshold=1.0, - async_no_progress_threshold=1.0, - async_lag_callback=async_lag_callback, - callbacks_interval=5, - in_async=False, - ) - - # when - processor.work() - - # then - async_lag_callback.assert_called_with(container) - - # given - signal_batch_lag(queue=queue, lag=3.0, occured_at=19.0) - - # when - processor.work() - - # then - async_lag_callback.assert_has_calls( - calls=( - call(container), - call(container), - ), - any_order=True, - ) diff --git a/tests/unit/neptune/new/internal/test_container_structure.py b/tests/unit/neptune/new/internal/test_container_structure.py deleted file mode 100644 index 537169819..000000000 --- a/tests/unit/neptune/new/internal/test_container_structure.py +++ /dev/null @@ -1,162 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import unittest -import uuid - -from neptune.exceptions import MetadataInconsistency -from neptune.internal.backends.neptune_backend_mock import NeptuneBackendMock -from neptune.internal.container_structure import ContainerStructure -from neptune.internal.container_type import ContainerType -from neptune.types.value import Value - - -class TestRunStructure(unittest.TestCase): - def test_get_none(self): - exp = ContainerStructure[int, dict]() - self.assertEqual(exp.get(["some", "path", "val"]), None) - - def test_get_nested_variable_fails(self): - exp = ContainerStructure[int, dict]() - exp.set(["some", "path", "val"], 3) - with self.assertRaises(MetadataInconsistency): - exp.get(["some", "path", "val", "nested"]) - with self.assertRaises(MetadataInconsistency): - exp.get(["some", "path", "val", "nested", "nested"]) - - def test_get_ns(self): - exp = ContainerStructure[int, dict]() - exp.set(["some", "path", "val"], 3) - self.assertEqual(exp.get(["some", "path"]), {"val": 3}) - - def test_set(self): - exp = ContainerStructure[int, dict]() - exp.set(["some", "path", "val"], 3) - self.assertEqual(exp.get(["some", "path", "val"]), 3) - - def test_set_nested_variable_fails(self): - exp = ContainerStructure[int, dict]() - exp.set(["some", "path", "val"], 3) - with self.assertRaises(MetadataInconsistency): - exp.set(["some", "path", "val", "nested"], 3) - with self.assertRaises(MetadataInconsistency): - exp.set(["some", "path", "val", "nested", "nested"], 3) - - def test_set_ns_collision(self): - exp = ContainerStructure[int, dict]() - exp.set(["some", "path", "val"], 3) - with self.assertRaises(MetadataInconsistency): - exp.set(["some", "path"], 5) - - def test_pop(self): - exp = ContainerStructure[int, dict]() - exp.set(["some", "path", "val1"], 3) - exp.set(["some", "path", "val2"], 5) - exp.pop(["some", "path", "val2"]) - self.assertEqual(exp.get(["some", "path", "val1"]), 3) - self.assertEqual(exp.get(["some", "path", "val2"]), None) - self.assertTrue("some" in exp.get_structure() and "path" in exp.get_structure()["some"]) - - def test_pop_whole_ns(self): - exp = ContainerStructure[int, dict]() - exp.set(["some", "path", "val"], 3) - exp.pop(["some", "path", "val"]) - self.assertEqual(exp.get(["some", "path", "val"]), None) - self.assertFalse("some" in exp.get_structure()) - - def test_pop_not_found(self): - exp = ContainerStructure[int, dict]() - with self.assertRaises(MetadataInconsistency): - exp.pop(["some", "path"]) - - def test_pop_ns_fail(self): - exp = ContainerStructure[int, dict]() - exp.set(["some", "path", "val1"], 3) - with self.assertRaises(MetadataInconsistency): - exp.pop(["some", "path"]) - - -class TestIterateSubpaths(unittest.TestCase): - project_uuid = str(uuid.uuid4()) - - def setUp(self): - self.backend = NeptuneBackendMock() - exp = self.backend.create_run(self.project_uuid) - # FIXME test for projects - self.structure = self.backend._containers[(exp.id, ContainerType.RUN)] - self.structure.set(["attributes", "float"], Value()) - self.structure.set(["attributes", "node", "one"], Value()) - self.structure.set(["attributes", "node", "two"], Value()) - self.structure.set(["attributes", "node", "three"], Value()) - self.structure.set(["attributes", "int"], Value()) - self.structure.set(["attributes", "string"], Value()) - - def test_iterate_empty_run(self): - empty_structure = ContainerStructure[Value, dict]() - - self.assertListEqual(list(empty_structure.iterate_subpaths([])), []) - self.assertListEqual(list(empty_structure.iterate_subpaths(["test"])), []) - - def test_iterate_empty_prefix(self): - prefix = [] - expected_subpaths = [ - "sys/id", - "sys/state", - "sys/owner", - "sys/size", - "sys/tags", - "sys/creation_time", - "sys/modification_time", - "sys/failed", - "attributes/float", - "attributes/int", - "attributes/string", - "attributes/node/one", - "attributes/node/two", - "attributes/node/three", - ] - - self.assertListEqual(list(self.structure.iterate_subpaths(prefix)), expected_subpaths) - - def test_iterate_prefix(self): - prefix = ["sys"] - expected_subpaths = [ - "sys/id", - "sys/state", - "sys/owner", - "sys/size", - "sys/tags", - "sys/creation_time", - "sys/modification_time", - "sys/failed", - ] - - self.assertListEqual(list(self.structure.iterate_subpaths(prefix)), expected_subpaths) - - def test_iterate_long_prefix(self): - prefix = ["attributes", "node"] - expected_subpaths = [ - "attributes/node/one", - "attributes/node/two", - "attributes/node/three", - ] - - self.assertListEqual(list(self.structure.iterate_subpaths(prefix)), expected_subpaths) - - def test_iterate_nonexistent_prefix(self): - prefix = ["argh"] - expected_subpaths = [] - - self.assertListEqual(list(self.structure.iterate_subpaths(prefix)), expected_subpaths) diff --git a/tests/unit/neptune/new/internal/test_credentials.py b/tests/unit/neptune/new/internal/test_credentials.py deleted file mode 100644 index 54213531e..000000000 --- a/tests/unit/neptune/new/internal/test_credentials.py +++ /dev/null @@ -1,62 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import os -import unittest - -from neptune.envs import API_TOKEN_ENV_NAME -from neptune.internal.credentials import Credentials -from neptune.internal.exceptions import NeptuneInvalidApiTokenException - -API_TOKEN = ( - "eyJhcGlfYWRkcmVzcyI6Imh0dHBzOi8vYXBwLnN0YWdlLm5lcHR1bmUubWwiLCJ" - "hcGlfa2V5IjoiOTJhNzhiOWQtZTc3Ni00ODlhLWI5YzEtNzRkYmI1ZGVkMzAyIn0=" -) - - -class TestCredentials(unittest.TestCase): - def test_should_take_default_credentials_from_env(self): - # given - os.environ[API_TOKEN_ENV_NAME] = API_TOKEN - - # when - credentials = Credentials.from_token() - - # then - self.assertEqual(API_TOKEN, credentials.api_token) - - def test_should_replace_token_from_env(self): - # given - os.environ[API_TOKEN_ENV_NAME] = "INVALID_TOKEN" - - # when - credentials = Credentials.from_token(API_TOKEN) - - # then - self.assertEqual(API_TOKEN, credentials.api_token) - - def test_raise_invalid_token(self): - # expect - with self.assertRaises(NeptuneInvalidApiTokenException): - Credentials.from_token("INVALID_TOKEN") - - def test_raise_invalid_token_from_env(self): - # given - os.environ[API_TOKEN_ENV_NAME] = "INVALID_TOKEN" - - # expect - with self.assertRaises(NeptuneInvalidApiTokenException): - Credentials.from_token() diff --git a/tests/unit/neptune/new/internal/test_operations.py b/tests/unit/neptune/new/internal/test_operations.py deleted file mode 100644 index c93ed8013..000000000 --- a/tests/unit/neptune/new/internal/test_operations.py +++ /dev/null @@ -1,154 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import unittest -import uuid - -from neptune.attributes import Integer -from neptune.internal.operation import ( - AddStrings, - AssignArtifact, - AssignBool, - AssignDatetime, - AssignFloat, - AssignInt, - AssignString, - ClearArtifact, - ClearFloatLog, - ClearImageLog, - ClearStringLog, - ClearStringSet, - ConfigFloatSeries, - ContainerType, - CopyAttribute, - DeleteAttribute, - DeleteFiles, - ImageValue, - LogFloats, - LogImages, - LogStrings, - Operation, - RemoveStrings, - TrackFilesToArtifact, - UploadFile, - UploadFileContent, - UploadFileSet, - all_subclasses, - datetime, -) - - -class TestOperations(unittest.TestCase): - def test_serialization_to_dict(self): - classes = {cls for cls in all_subclasses(Operation)} - # drop abstract classes - for cls in classes.copy(): - if inspect.isabstract(cls): - classes.remove(cls) - - # test every Operation subclass and drop from `classes` - for obj in self._list_objects(): - if obj.__class__ in classes: - classes.remove(obj.__class__) - deserialized_obj = Operation.from_dict(json.loads(json.dumps(obj.to_dict()))) - self.assertEqual(obj.__dict__, deserialized_obj.__dict__) - - # expect no Operation subclass left - self.assertEqual(classes, set()) - - @staticmethod - def _list_objects(): - now = datetime.now() - return [ - AssignFloat(TestOperations._random_path(), 5.0), - AssignInt(TestOperations._random_path(), 5), - AssignBool(TestOperations._random_path(), True), - AssignBool(TestOperations._random_path(), False), - AssignString(TestOperations._random_path(), "a\rsdf\thr"), - AssignDatetime( - TestOperations._random_path(), - now.replace(microsecond=1000 * int(now.microsecond / 1000)), - ), - AssignArtifact( - TestOperations._random_path(), - "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - ), - UploadFile(TestOperations._random_path(), "txt", file_path="file/path/f.txt"), - UploadFile(TestOperations._random_path(), "txt", file_path="file/path/f.txt", clean_after_upload=True), - UploadFile(TestOperations._random_path(), "txt", tmp_file_name="f.txt"), - UploadFileContent(TestOperations._random_path(), "stream.txt", "some base64"), - UploadFileSet( - TestOperations._random_path(), - ["file/path/*.txt", "another/file/path/*.txt"], - True, - ), - UploadFileSet( - TestOperations._random_path(), - ["file/path/*.txt", "another/file/path/*.txt"], - False, - ), - DeleteFiles(TestOperations._random_path(), {"file/path/*.txt", "dir/path/"}), - LogFloats( - TestOperations._random_path(), - [ - LogFloats.ValueType(5, 4, 500), - LogFloats.ValueType(3, None, 1000), - LogFloats.ValueType(10, 10, 1234), - ], - ), - LogStrings( - TestOperations._random_path(), - [ - LogStrings.ValueType("jetybv", 1, 5), - LogStrings.ValueType("ghs\ner", 3, 123), - LogStrings.ValueType("r", None, 1356), - LogStrings.ValueType("ghsr", 13, 53682), - ], - ), - LogImages( - TestOperations._random_path(), - [ - LogImages.ValueType(ImageValue("base64_image_1", "name1", "description1"), None, 2), - LogImages.ValueType(ImageValue("base64_image_2", "name2", "description2"), 0, 5), - ], - ), - ClearFloatLog(TestOperations._random_path()), - ClearStringLog(TestOperations._random_path()), - ClearImageLog(TestOperations._random_path()), - ConfigFloatSeries(TestOperations._random_path(), min=11, max=600, unit="kg/h"), - AddStrings(TestOperations._random_path(), {"asef", "asrge4"}), - RemoveStrings(TestOperations._random_path(), {"a\ne", "aeg\t4ger", "agrg"}), - ClearStringSet(TestOperations._random_path()), - DeleteAttribute(TestOperations._random_path()), - TrackFilesToArtifact( - TestOperations._random_path(), - str(uuid.uuid4()), - [("file/path/f.txt", None)], - ), - ClearArtifact(TestOperations._random_path()), - CopyAttribute( - TestOperations._random_path(), - container_id=str(uuid.uuid4()), - container_type=ContainerType.RUN, - source_path=TestOperations._random_path(), - source_attr_cls=Integer, - ), - ] - - @staticmethod - def _random_path(): - return ["some", "random", "path", str(uuid.uuid4())] diff --git a/tests/unit/neptune/new/internal/test_streams.py b/tests/unit/neptune/new/internal/test_streams.py deleted file mode 100644 index 883a299e8..000000000 --- a/tests/unit/neptune/new/internal/test_streams.py +++ /dev/null @@ -1,88 +0,0 @@ -# -# Copyright (c) 2021, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import sys -import threading -import unittest -from contextlib import redirect_stdout -from io import StringIO -from unittest.mock import MagicMock - -from neptune.internal.streams.std_stream_capture_logger import ( - StdoutCaptureLogger, - StdStreamCaptureLogger, -) - - -class TestStdStreamCaptureLogger(unittest.TestCase): - def test_catches_stdout(self): - stdout = StringIO() - with redirect_stdout(stdout): - mock_run = MagicMock() - attr_name = "sys/stdout" - logger = StdoutCaptureLogger(mock_run, attr_name) - stdout_fp = sys.stdout - print("testing", file=stdout_fp) - logger.close() - - self.assertListEqual( - mock_run[attr_name].append.call_args_list, - [ - (("testing",), {}), - (("\n",), {}), - ], - ) - stdout.seek(0) - self.assertEqual(stdout.read(), "testing\n") - - def test_does_not_report_if_used_after_stop(self): - stdout = StringIO() - with redirect_stdout(stdout): - mock_run = MagicMock() - attr_name = "sys/stdout" - logger = StdoutCaptureLogger(mock_run, attr_name) - stdout_fp = sys.stdout - logger.close() - - print("testing", file=stdout_fp) - mock_run[attr_name].log.assert_not_called() - stdout.seek(0) - self.assertEqual(stdout.read(), "testing\n") - - def test_logger_with_lock_does_not_cause_deadlock(self): - stream = StringIO() - mock_run = MagicMock() - attr_name = "sys/stdout" - - logger = StdStreamCaptureLogger(mock_run, attr_name, stream) - done_waiting = threading.Event() - - # The logger is blocked in background, the main thread is still awake - logger.write("testing") - self.assertListEqual( - mock_run[attr_name].log.call_args_list, - [], - ) - - done_waiting.set() - logger.close() - self.assertListEqual( - mock_run[attr_name].append.call_args_list, - [ - (("testing",), {}), - ], - ) - stream.seek(0) - self.assertEqual(stream.read(), "testing") diff --git a/tests/unit/neptune/new/internal/utils/__init__.py b/tests/unit/neptune/new/internal/utils/__init__.py deleted file mode 100644 index 63b30720b..000000000 --- a/tests/unit/neptune/new/internal/utils/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/neptune/new/internal/utils/test_dependency_tracking.py b/tests/unit/neptune/new/internal/utils/test_dependency_tracking.py deleted file mode 100644 index 8e666b577..000000000 --- a/tests/unit/neptune/new/internal/utils/test_dependency_tracking.py +++ /dev/null @@ -1,54 +0,0 @@ -import itertools -from unittest.mock import ( - MagicMock, - patch, -) - -import pytest - -from neptune.internal.utils.dependency_tracking import ( - FileDependenciesStrategy, - InferDependenciesStrategy, -) - - -class TestDependencyTracking: - @patch("neptune.internal.utils.dependency_tracking.distributions") - @patch("neptune.types.File.from_content") - def test_infer_calls_upload_correctly(self, mock_from_content, mock_distributions): - single_dist = MagicMock() - single_dist.metadata = {"Name": "some_dependency", "Version": "1.0.0"} - mock_distributions.return_value = itertools.chain([single_dist]) - InferDependenciesStrategy().log_dependencies(run=MagicMock()) - - mock_distributions.assert_called_once() - mock_from_content.assert_called_once_with("some_dependency==1.0.0") - - @patch("neptune.internal.utils.dependency_tracking.distributions", return_value=[]) - @patch("neptune.types.File.from_content") - def test_infer_does_not_upload_empty_dependency_string(self, mock_from_content, mock_distributions): - InferDependenciesStrategy().log_dependencies(run=MagicMock()) - - mock_distributions.assert_called_once() - mock_from_content.assert_not_called() - - @patch("neptune.handler.Handler.upload") - @patch("neptune.internal.utils.dependency_tracking.logger") - def test_file_strategy_path_incorrect(self, mock_logger, mock_upload): - FileDependenciesStrategy(path="non-existent_file_path.txt").log_dependencies(run=MagicMock()) - - mock_upload.assert_not_called() - mock_logger.error.assert_called_once() - - @pytest.mark.parametrize("path", ["valid_file_path.txt", "dir/valid_file_path.txt"]) - @patch("os.path.isfile", return_value=True) - def test_file_strategy_uploads_correct_path(self, mock_is_file, path): - run = MagicMock() - handler = MagicMock() - run.__getitem__ = MagicMock() - run.__getitem__.return_value = handler - handler.upload = MagicMock() - - FileDependenciesStrategy(path=path).log_dependencies(run=run) - - handler.upload.assert_called_once_with(path) diff --git a/tests/unit/neptune/new/internal/utils/test_deprecation.py b/tests/unit/neptune/new/internal/utils/test_deprecation.py deleted file mode 100644 index dbb8259a7..000000000 --- a/tests/unit/neptune/new/internal/utils/test_deprecation.py +++ /dev/null @@ -1,98 +0,0 @@ -# -# Copyright (c) 2020, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from unittest.mock import ( - ANY, - patch, -) - -import pytest - -from neptune.exceptions import NeptuneParametersCollision -from neptune.internal.utils.deprecation import ( - deprecated, - deprecated_parameter, -) -from neptune.internal.warnings import ( - NeptuneDeprecationWarning, - warn_once, -) - - -@deprecated_parameter(deprecated_kwarg_name="deprecated_param", required_kwarg_name="new_param") -def fun_with_deprecated_param(*, new_param): - return new_param - - -@deprecated() -def deprecated_func(): - pass - - -@deprecated(alternative="non_deprecated_func") -def deprecated_func_with_alternative(): - pass - - -class TestDeprecatedParameter: - def test_deprecated_not_used(self): - # https://stackoverflow.com/questions/45671803/how-to-use-pytest-to-assert-no-warning-is-raised - with warnings.catch_warnings(): - warnings.simplefilter("error") - fun_with_deprecated_param(new_param=42) - - def test_deprecated_replaced(self): - with pytest.deprecated_call( - match="Parameter `deprecated_param` is deprecated, use `new_param` instead. We'll end support of it in " - "next major release." - ): - value = fun_with_deprecated_param(deprecated_param=42) - assert value == 42 - - def test_conflict(self): - with pytest.raises(NeptuneParametersCollision): - fun_with_deprecated_param(new_param=42, deprecated_param=42) - - def test_passing_deprecated_parameter_as_none(self): - assert fun_with_deprecated_param(deprecated_param=None) is None - - with pytest.raises(NeptuneParametersCollision): - value = fun_with_deprecated_param(new_param=None, deprecated_param=None) - assert value is None - - def test_deprecated_func_without_alternative(self): - with pytest.deprecated_call( - match="`deprecated_func` is deprecated and will be removed. We'll end support of " - "it in next major release." - ): - deprecated_func() - - def test_deprecated_func_with_alternative(self): - with pytest.deprecated_call( - match="`deprecated_func_with_alternative` is deprecated, " - "use `non_deprecated_func` instead. We'll end support of it in " - "next major release." - ): - deprecated_func_with_alternative() - - @patch("warnings.warn") - def test_warn_once(self, warn): - warn_once(message="Deprecation message 1") - warn_once(message="Deprecation message 1") - - warn.assert_called_once_with( - message="Deprecation message 1", category=NeptuneDeprecationWarning, stacklevel=ANY - ) diff --git a/tests/unit/neptune/new/internal/utils/test_disk_utilization.py b/tests/unit/neptune/new/internal/utils/test_disk_utilization.py deleted file mode 100644 index 035d27086..000000000 --- a/tests/unit/neptune/new/internal/utils/test_disk_utilization.py +++ /dev/null @@ -1,205 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import unittest -import warnings -from io import UnsupportedOperation - -import pytest -from mock import ( - MagicMock, - patch, -) -from psutil import ( - AccessDenied, - Error, -) - -from neptune.envs import ( - NEPTUNE_MAX_DISK_USAGE, - NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED, -) -from neptune.exceptions import NeptuneMaxDiskUtilizationExceeded -from neptune.internal.utils.disk_utilization import ( - NonRaisingErrorHandler, - RaisingErrorHandler, - ensure_disk_not_overutilize, -) - - -class TestDiskUtilization(unittest.TestCase): - @patch.dict(os.environ, {NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED: "True"}) - def test_handle_invalid_env_values(self): - for value in ["True", "101", "-1"]: - with patch.dict(os.environ, {NEPTUNE_MAX_DISK_USAGE: value}, clear=True): - mocked_func = MagicMock() - with warnings.catch_warnings(record=True) as warns: - wrapped_func = ensure_disk_not_overutilize(mocked_func) - wrapped_func() - - assert len(warns) == 1 - assert f"invalid value of '{NEPTUNE_MAX_DISK_USAGE}': '{value}" in str(warns[-1].message) - mocked_func.assert_called_once() - - # Catching OSError that's base error for all OS and IO errors. More info here: https://peps.python.org/pep-3151 - # Additionally, catching specific psutil's base error - psutil.Error. - # More info about psutil.Error here: https://psutil.readthedocs.io/en/latest/index.html#psutil.Error - @patch.dict(os.environ, {NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED: "False"}) - def test_suppressing_of_func_errors(self): - disk_errors = [ - OSError(), - IOError(), - EnvironmentError(), - UnsupportedOperation(), - Error(), - AccessDenied(), - ] - for error in disk_errors: - mocked_func = MagicMock() - wrapped_func = ensure_disk_not_overutilize(mocked_func) - mocked_func.side_effect = error - - wrapped_func() # asserting is not required as expecting that any error will be caught - mocked_func.assert_called_once() - - non_disk_errors = [OverflowError(), AttributeError()] - for error in non_disk_errors: - mocked_func = MagicMock() - wrapped_func = ensure_disk_not_overutilize(mocked_func) - mocked_func.side_effect = error - - with self.assertRaises(BaseException): - wrapped_func() - mocked_func.assert_called_once() - - @patch.dict(os.environ, {NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED: "True"}) - @patch.dict(os.environ, {NEPTUNE_MAX_DISK_USAGE: "60"}) - @patch("psutil.disk_usage") - def test_suppressing_of_checking_utilization_errors(self, disk_usage_mock): - checking_errors = [ - TypeError(), - UnsupportedOperation(), - Error(), - AccessDenied(), - ] - for error in checking_errors: - mocked_func = MagicMock() - wrapped_func = ensure_disk_not_overutilize(mocked_func) - disk_usage_mock.side_effect = error - - wrapped_func() # asserting is not required as expecting that any error will be caught - mocked_func.assert_called_once() - - @patch.dict(os.environ, {NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED: "True"}) - @patch.dict(os.environ, {NEPTUNE_MAX_DISK_USAGE: "100"}) - @patch("psutil.disk_usage") - def test_not_called_with_usage_100_percent(self, disk_usage_mock): - disk_usage_mock.return_value.percent = 100 - mocked_func = MagicMock() - wrapped_func = ensure_disk_not_overutilize(mocked_func) - - with pytest.raises(NeptuneMaxDiskUtilizationExceeded): - wrapped_func() - - mocked_func.assert_not_called() - - @patch.dict(os.environ, {NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED: "True"}) - @patch.dict(os.environ, {NEPTUNE_MAX_DISK_USAGE: "100"}) - @patch("psutil.disk_usage") - def test_called_when_usage_less_than_limit(self, disk_usage_mock): - disk_usage_mock.return_value.percent = 99 - mocked_func = MagicMock() - wrapped_func = ensure_disk_not_overutilize(mocked_func) - - wrapped_func() - - mocked_func.assert_called_once() - - @patch.dict(os.environ, {NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED: "True"}) - @patch.dict(os.environ, {NEPTUNE_MAX_DISK_USAGE: "60"}) - @patch("psutil.disk_usage") - def test_not_called_when_(self, disk_usage_mock): - disk_usage_mock.return_value.percent = 99 - mocked_func = MagicMock() - wrapped_func = ensure_disk_not_overutilize(mocked_func) - with pytest.raises(NeptuneMaxDiskUtilizationExceeded): - wrapped_func() - - mocked_func.assert_not_called() - - -class TestDiskErrorHandler(unittest.TestCase): - @patch("neptune.internal.utils.disk_utilization.RaisingErrorHandler") - @patch("neptune.internal.utils.disk_utilization.NonRaisingErrorHandler") - @patch.dict(os.environ, {NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED: "True"}) - def test_raising_handler_used_if_env_var_true(self, mock_non_raising_handler, mock_raising_handler): - decorated = ensure_disk_not_overutilize(MagicMock()) - decorated() - mock_raising_handler.assert_called_once() - mock_non_raising_handler.assert_not_called() - - @patch("neptune.internal.utils.disk_utilization.RaisingErrorHandler") - @patch("neptune.internal.utils.disk_utilization.NonRaisingErrorHandler") - @patch.dict(os.environ, {NEPTUNE_RAISE_ERROR_ON_DISK_USAGE_EXCEEDED: "False"}) - def test_non_raising_handler_used_if_env_var_false(self, mock_non_raising_handler, mock_raising_handler): - decorated = ensure_disk_not_overutilize(MagicMock()) - decorated() - mock_non_raising_handler.assert_called_once() - mock_raising_handler.assert_not_called() - - def test_non_raising_handler(self): - func = MagicMock() - func.side_effect = OSError - - handler = NonRaisingErrorHandler(None, func) - handler.handle_limit_not_set() # should not raise exception - - handler = NonRaisingErrorHandler(90.0, func) - handler.handle_utilization_calculation_error() # should not raise exception - - handler.handle_limit_not_exceeded() # should not raise exception - - handler.handle_limit_exceeded(100) # should not raise exception - - handler.run() # should not raise exception - - def test_raising_handler(self): - func = MagicMock() - func.side_effect = OSError - - with pytest.raises(OSError): - handler = RaisingErrorHandler(None, func) - handler.handle_limit_not_set() - - with pytest.raises(OSError): - handler = RaisingErrorHandler(None, func) - handler.handle_utilization_calculation_error() - - with pytest.raises(OSError): - handler = RaisingErrorHandler(100.0, func) - handler.handle_limit_not_exceeded() - - with pytest.raises(NeptuneMaxDiskUtilizationExceeded): - handler = RaisingErrorHandler(90.0, func) - handler.handle_limit_exceeded(100) - - with pytest.raises(OSError): - handler.run() - - func.side_effect = None - with patch("neptune.internal.utils.disk_utilization.get_disk_utilization_percent", return_value=95): - with pytest.raises(NeptuneMaxDiskUtilizationExceeded): - handler.run() diff --git a/tests/unit/neptune/new/internal/utils/test_git.py b/tests/unit/neptune/new/internal/utils/test_git.py deleted file mode 100644 index c3b44e0ab..000000000 --- a/tests/unit/neptune/new/internal/utils/test_git.py +++ /dev/null @@ -1,265 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import datetime - -import git -import pytest -from git import Repo -from mock import ( - MagicMock, - patch, -) - -from neptune.internal.utils.git import ( - GitInfo, - get_diff, - get_relevant_upstream_commit, - get_repo_from_git_ref, - get_uncommitted_changes, - get_upstream_index_sha, - search_for_most_recent_ancestor, - to_git_info, - track_uncommitted_changes, -) -from neptune.types import GitRef - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -class TestGit: - def test_disabled(self): - assert to_git_info(GitRef.DISABLED) is None - - @patch("git.Repo") - def test_getting_git_info(self, mock_repo): - # given - now = datetime.datetime.now() - repo = mock_repo.return_value - repo.is_dirty.return_value = True - repo.head.commit.hexsha = "sha" - repo.head.commit.message = "message" - repo.head.commit.author.name = "author_name" - repo.head.commit.author.email = "author@email" - repo.head.commit.committed_datetime = now - repo.active_branch.name = "master" - repo.remotes = [] - - # when - git_info = to_git_info(GitRef(".")) - - # then - assert git_info == GitInfo( - commit_id="sha", - message="message", - author_name="author_name", - author_email="author@email", - commit_date=now, - dirty=True, - branch="master", - remotes=[], - ) - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -def test_get_repo_from_git_ref_disabled(): - # given - git_ref = GitRef.DISABLED - - # when - repo = get_repo_from_git_ref(git_ref) - - # then - assert repo is None - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -def test_get_repo_from_git_ref(): - # given - git_ref = GitRef() - - # when - repo = get_repo_from_git_ref(git_ref) - - # then - assert isinstance(repo, git.Repo) - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -@patch("git.Repo") -def test_get_diff(mock_repo): - # when - get_diff(mock_repo, "some_ref") - - # then - mock_repo.git.diff.assert_called_once_with("some_ref", index=False) - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -@patch("git.Repo") -def test_get_diff_command_error(mock_repo): - # given - mock_repo.git.diff.side_effect = git.GitCommandError("diff") - - # when - diff = get_diff(mock_repo, "some_ref") - - # then - mock_repo.git.diff.assert_called_once_with("some_ref", index=False) - assert diff is None - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -@patch("git.Repo") -def test_search_for_most_recent_ancestor(mock_repo): - # given - mock_repo.active_branch.tracking_branch.return_value = None - - tracking_branch = MagicMock() - branch = MagicMock() - branch.tracking_branch.return_value = tracking_branch - mock_repo.heads = [branch, branch, branch] - - mock_repo.is_ancestor.return_value = True - ancestor = MagicMock() - ancestor_to_be_chosen = MagicMock() - ancestor_to_be_chosen.hexsha = "sha1234" - mock_repo.merge_base.return_value = [ancestor, ancestor_to_be_chosen] - - # when - searched_ancestor = search_for_most_recent_ancestor(mock_repo) - - # then - assert searched_ancestor.hexsha == "sha1234" - - assert mock_repo.merge_base.call_count == 3 - assert mock_repo.is_ancestor.call_count == 5 # 6 ancestors - 1 case when most_recent_ancestor was None - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -@patch("neptune.internal.utils.git.search_for_most_recent_ancestor") -@patch("git.Repo") -def test_get_relevant_upstream_commit_no_search(mock_repo, mock_search): - # when - upstream_commit = get_relevant_upstream_commit(mock_repo) - - # then - assert upstream_commit == mock_repo.active_branch.tracking_branch.return_value.commit - mock_search.assert_not_called() - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -@patch("neptune.internal.utils.git.search_for_most_recent_ancestor") -@patch("git.Repo") -def test_get_relevant_upstream_commit_with_search(mock_repo, mock_search): - # given - mock_repo.active_branch.tracking_branch.return_value = None - - # when - upstream_commit = get_relevant_upstream_commit(mock_repo) - - # then - assert upstream_commit == mock_search.return_value - mock_search.assert_called_once_with(mock_repo) - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -@patch("neptune.internal.utils.git.get_relevant_upstream_commit") -@patch("git.Repo") -def test_get_upstream_index_sha(mock_repo, mock_get_upstream_commit): - # given - mock_get_upstream_commit.return_value.hexsha = "test_sha" - - # when - sha = get_upstream_index_sha(mock_repo) - - # then - assert sha == "test_sha" - mock_get_upstream_commit.assert_called_once_with(mock_repo) - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -@patch("git.Repo") -def test_detached_head(mock_repo): - # given - mock_repo.active_branch.tracking_branch.side_effect = TypeError - - # when - sha = get_upstream_index_sha(mock_repo) - - # then - assert sha is None - mock_repo.git.diff.assert_not_called() - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -@patch("git.Repo") -@patch("neptune.internal.utils.git.get_upstream_index_sha", return_value="test_sha") -def test_get_uncommitted_changes(mock_get_sha, mock_repo): - # given - mock_repo.git.diff.return_value = "some_diff" - mock_repo.head.name = "HEAD" - - # when - uncommitted_changes = get_uncommitted_changes(mock_repo) - - # then - assert mock_repo.git.diff.call_count == 2 - assert mock_get_sha.call_count == 1 - assert uncommitted_changes.diff_head == "some_diff\n" - assert uncommitted_changes.upstream_sha == "test_sha" - assert uncommitted_changes.diff_upstream == "some_diff\n" - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -@patch("git.Repo") -def test_get_uncommitted_changes_clean_repo(tmp_path_factory): - # given - path = tmp_path_factory.mktemp("git_repo") - repo = Repo.init(path) - - # when - uncommitted_changes = get_uncommitted_changes(repo) - - # then - assert uncommitted_changes is None - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -@patch("neptune.internal.utils.git.get_uncommitted_changes") -@patch("neptune.objects.Run") -def test_git_ref_disabled(mock_run, mock_get_changes): - # when - track_uncommitted_changes(GitRef.DISABLED, mock_run) - - # then - mock_get_changes.assert_not_called() - - -@pytest.mark.skip("Temporarily disabled - will be brought back in 2.0.0") -@patch("neptune.internal.utils.git.get_uncommitted_changes") -@patch("neptune.internal.utils.git.get_repo_from_git_ref") -@patch("neptune.internal.utils.git.File") -@patch("neptune.objects.Run") -def test_track_uncommitted_changes(mock_run, mock_file, mock_get_repo, mock_get_changes): - # given - git_ref = GitRef() - - # when - track_uncommitted_changes(git_ref, mock_run) - - # then - assert mock_file.from_content.call_count == 2 - mock_get_repo.assert_called_once_with(git_ref) - mock_get_changes.assert_called_once() diff --git a/tests/unit/neptune/new/internal/utils/test_hashing.py b/tests/unit/neptune/new/internal/utils/test_hashing.py deleted file mode 100644 index 037bb90fd..000000000 --- a/tests/unit/neptune/new/internal/utils/test_hashing.py +++ /dev/null @@ -1,48 +0,0 @@ -# -# Copyright (c) 2023, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from random import ( - choices, - randint, -) -from string import ( - ascii_uppercase, - digits, -) - -from neptune.internal.utils.hashing import generate_hash - - -class TestHashGenerator: - def test_should_be_deterministic(self): - # given - descriptors = [randint(0, 1024), "".join(choices(ascii_uppercase + digits, k=8))] - - # when - hash1 = generate_hash(*descriptors, length=8) - hash2 = generate_hash(*descriptors, length=8) - - # then - assert hash1 == hash2 - - def test_should_be_unique(self): - # given - unique_descriptors = set((randint(0, 1024), "".join(choices(ascii_uppercase + digits, k=8))) for _ in range(10)) - - # when - unique_hashes = set(generate_hash(*descriptors, length=8) for descriptors in unique_descriptors) - - # then - assert len(unique_descriptors) == len(unique_hashes) diff --git a/tests/unit/neptune/new/internal/utils/test_images.py b/tests/unit/neptune/new/internal/utils/test_images.py deleted file mode 100644 index aa626653f..000000000 --- a/tests/unit/neptune/new/internal/utils/test_images.py +++ /dev/null @@ -1,327 +0,0 @@ -# -# Copyright (c) 2019, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import contextlib -import io -import os -import sys -import unittest -from functools import partial -from typing import Optional -from uuid import uuid4 - -import altair as alt -import numpy -import pandas -import plotly.express as px -import pytest -from bokeh.plotting import figure -from vega_datasets import data - -from neptune.internal.utils.images import ( - _scale_array, - get_html_content, - get_image_content, -) -from neptune.internal.utils.utils import ( - IS_MACOS, - IS_WINDOWS, -) -from tests.unit.neptune.new.utils.logging import format_log - -Image = pytest.importorskip("PIL.Image") -Figure = pytest.importorskip("matplotlib.figure.Figure") - - -@pytest.mark.xfail(reason="Removing PIL from dependencies", raises=ModuleNotFoundError) -class TestImage(unittest.TestCase): - - TEST_DIR = "/tmp/neptune/{}".format(uuid4()) - - def setUp(self): - if not os.path.exists(self.TEST_DIR): - os.makedirs(self.TEST_DIR) - - def test_get_image_content_from_pil_image(self): - # given - image_array = self._random_image_array() - expected_image = Image.fromarray(image_array.astype(numpy.uint8)) - - # expect - self.assertEqual(get_image_content(expected_image), self._encode_pil_image(expected_image)) - - def test_get_image_content_from_2d_grayscale_array(self): - # given - image_array = self._random_image_array(d=None) - scaled_array = image_array * 255 - expected_image = Image.fromarray(scaled_array.astype(numpy.uint8)) - - # expect - self.assertEqual(get_image_content(image_array), self._encode_pil_image(expected_image)) - - def test_get_image_content_from_3d_grayscale_array(self): - # given - image_array = numpy.array([[[1], [0]], [[-3], [4]], [[5], [6]]]) - expected_array = numpy.array([[1, 0], [-3, 4], [5, 6]]) - expected_image = Image.fromarray(expected_array.astype(numpy.uint8)) - - # when - - # expect - self.assertEqual(get_image_content(image_array), self._encode_pil_image(expected_image)) - - def test_get_image_content_from_rgb_array(self): - # given - image_array = self._random_image_array() - scaled_array = image_array * 255 - expected_image = Image.fromarray(scaled_array.astype(numpy.uint8)) - - # expect - self.assertEqual(get_image_content(image_array), self._encode_pil_image(expected_image)) - - # and make sure that original image's size was preserved - self.assertFalse((image_array * 255 - scaled_array).any()) - - def test_get_image_content_from_rgba_array(self): - # given - image_array = self._random_image_array(d=4) - scaled_array = image_array * 255 - expected_image = Image.fromarray(scaled_array.astype(numpy.uint8)) - - # expect - self.assertEqual(get_image_content(image_array), self._encode_pil_image(expected_image)) - - # and make sure that original image's size was preserved - self.assertFalse((image_array * 255 - scaled_array).any()) - - def test_get_image_content_from_figure(self): - # given - pyplot.plot([1, 2, 3, 4]) # noqa: F821 - pyplot.ylabel("some interesting numbers") # noqa: F821 - fig = pyplot.gcf() # noqa: F821 - - # expect - self.assertEqual(get_image_content(fig), self._encode_figure(fig)) - - @unittest.skipIf(IS_WINDOWS, "Installing Torch on Windows takes too long") - @unittest.skipIf( - IS_MACOS and sys.version_info.major == 3 and sys.version_info.minor == 10, - "No torch for 3.10 on Mac", - ) - def test_get_image_content_from_torch_tensor(self): - import torch - - # given - image_tensor = torch.rand(200, 300, 3) - expected_array = image_tensor.numpy() * 255 - expected_image = Image.fromarray(expected_array.astype(numpy.uint8)) - - # expect - self.assertEqual(get_image_content(image_tensor), self._encode_pil_image(expected_image)) - - # and make sure that original image's size was preserved - self.assertFalse((image_tensor.numpy() * 255 - expected_array).any()) - - @pytest.mark.skip("Conflicts with protobuf version") - def test_get_image_content_from_tensorflow_tensor(self): - import tensorflow as tf - - # given - image_tensor = tf.random.uniform(shape=[200, 300, 3]) - expected_array = image_tensor.numpy() * 255 - expected_image = Image.fromarray(expected_array.astype(numpy.uint8)) - - # expect - self.assertEqual(get_image_content(image_tensor), self._encode_pil_image(expected_image)) - - def test_get_image_content_from_seaborn_figure(self): - # given - grid = sns.relplot(numpy.random.randn(6, 4)) # noqa: F821 - - # then - self.assertEqual(get_image_content(grid), self._encode_figure(grid)) - - def test_get_html_from_matplotlib_figure(self): - # given - fig = pyplot.figure() # noqa: F821 - x = [ - 21, - 22, - 23, - 4, - 5, - 6, - 77, - 8, - 9, - 10, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 18, - 49, - 50, - 100, - ] - pyplot.hist(x, bins=5) # noqa: F821 - - # when - result = get_html_content(fig) - - # then - self.assertTrue(result.startswith('\n')) - - def test_get_html_from_plotly(self): - # given - df = px.data.tips() - fig = px.histogram( - df, - x="total_bill", - y="tip", - color="sex", - marginal="rug", - hover_data=df.columns, - ) - - # when - result = get_html_content(fig) - - # then - self.assertTrue(result.startswith('\n')) - - def test_get_html_from_altair(self): - # given - source = data.cars() - - chart = ( - alt.Chart(source) - .mark_circle(size=60) - .encode( - x="Horsepower", - y="Miles_per_Gallon", - color="Origin", - tooltip=["Name", "Origin", "Horsepower", "Miles_per_Gallon"], - ) - .interactive() - ) - - # when - result = get_html_content(chart) - - # then - self.assertTrue(result.startswith("\n\n\n