index
int64
0
0
repo_id
stringlengths
21
232
file_path
stringlengths
34
259
content
stringlengths
1
14.1M
__index_level_0__
int64
0
10k
0
kubeflow_public_repos/pipelines/kubernetes_platform
kubeflow_public_repos/pipelines/kubernetes_platform/python/release.sh
#!/bin/bash -ex # # Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # run from within ./kubernetes_platform/python # set environment variable KFP_KUBERNETES_VERSION # ensure you are on the correct release branch, created by create_release_branch.sh PKG_ROOT=$(pwd) REPO_ROOT=$(dirname $(dirname $PKG_ROOT)) echo $REPO_ROOT SETUPPY_VERSION=$(python -c 'from kfp.kubernetes.__init__ import __version__; print(__version__)') if [ -z "$KFP_KUBERNETES_VERSION" ] then echo "Set \$KFP_KUBERNETES_VERSION to use this script. Got empty variable." elif [[ "$KFP_KUBERNETES_VERSION" != "$SETUPPY_VERSION" ]] then echo "\$KFP_KUBERNETES_VERSION '$KFP_KUBERNETES_VERSION' does not match version in setup.py '$SETUPPY_VERSION'." else echo "Got version $KFP_KUBERNETES_VERSION from env var \$KFP_KUBERNETES_VERSION" echo "Building package..." TARGET_TAR_FILE=kfp-kubernetes-$KFP_KUBERNETES_VERSION.tar.gz pushd "$(dirname "$0")" dist_dir=$(mktemp -d) python3 setup.py sdist --format=gztar --dist-dir "$dist_dir" cp "$dist_dir"/*.tar.gz $TARGET_TAR_FILE popd echo "Created package." echo "Testing install" pip install $TARGET_TAR_FILE --break-system-packages INSTALLED_VERSION=$(pip list | grep kfp-kubernetes | awk '{print $2}') if [[ "$INSTALLED_VERSION" != "$KFP_KUBERNETES_VERSION" ]] then echo "Something went wrong! Expected version $KFP_KUBERNETES_VERSION but found version $INSTALLED_VERSION" else python -m twine upload $TARGET_TAR_FILE fi fi
400
0
kubeflow_public_repos/pipelines/kubernetes_platform
kubeflow_public_repos/pipelines/kubernetes_platform/python/create_release_branch.sh
#!/bin/bash -ex # # Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # run from within ./kubernetes_platform/python # set environment variable KFP_KUBERNETES_VERSION PKG_ROOT=$(pwd) REPO_ROOT=$(dirname $(dirname $PKG_ROOT)) echo $REPO_ROOT echo "Generating Python protobuf code..." pushd "$PKG_ROOT/.." make clean-python python popd SETUPPY_VERSION=$(python -c 'from kfp.kubernetes.__init__ import __version__; print(__version__)') if [ -z "$KFP_KUBERNETES_VERSION" ] then echo "Set \$KFP_KUBERNETES_VERSION to use this script. Got empty variable." elif [[ "$KFP_KUBERNETES_VERSION" != "$SETUPPY_VERSION" ]] then echo "\$KFP_KUBERNETES_VERSION '$KFP_KUBERNETES_VERSION' does not match version in setup.py '$SETUPPY_VERSION'." else echo "Got version $KFP_KUBERNETES_VERSION from env var \$KFP_KUBERNETES_VERSION" BRANCH_NAME=kfp-kubernetes-$KFP_KUBERNETES_VERSION echo "Creating release branch $BRANCH_NAME..." git checkout -b $BRANCH_NAME echo "Moving .readthedocs.yml to root..." # required for this branch because readthedocs only supports on docs build per repo # and the default is currently the KFP SDK # GCPC uses this pattern in this repo as well mv $PKG_ROOT/docs/.readthedocs.yml $REPO_ROOT/.readthedocs.yml rm $REPO_ROOT/kubernetes_platform/.gitignore git add $PKG_ROOT/docs/.readthedocs.yml git add $REPO_ROOT/.readthedocs.yml git add $REPO_ROOT/kubernetes_platform/.gitignore git add $REPO_ROOT/*_pb2.py echo "Next steps:" echo "1. Inspect and commit the modified files." echo "2. Push branch using 'git push --set-upstream upstream $BRANCH_NAME'" fi
401
0
kubeflow_public_repos/pipelines/kubernetes_platform
kubeflow_public_repos/pipelines/kubernetes_platform/python/setup.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import re import setuptools NAME = 'kfp-kubernetes' REQUIREMENTS = [ 'protobuf>=4.21.1,<5', 'kfp>=2.6.0,<3', ] DEV_REQUIREMENTS = [ 'docformatter==1.4', 'isort==5.10.1', 'mypy==0.941', 'pre-commit==2.19.0', 'pycln==2.1.1', 'pytest==7.1.2', 'pytest-xdist==2.5.0', 'yapf==0.32.0', ] def find_version(*file_path_parts: str) -> str: """Get version from kfp.__init__.__version__.""" file_path = os.path.join(os.path.dirname(__file__), *file_path_parts) with open(file_path, 'r') as f: version_file_text = f.read() version_match = re.search( r"^__version__ = ['\"]([^'\"]*)['\"]", version_file_text, re.M, ) if version_match: return version_match[1] else: raise ValueError('Could not find version.') def read_readme() -> str: readme_path = os.path.join(os.path.dirname(__file__), 'README.md') with open(readme_path) as f: return f.read() setuptools.setup( name=NAME, version=find_version('kfp', 'kubernetes', '__init__.py'), description='Kubernetes platform configuration library and generated protos.', long_description=read_readme(), long_description_content_type='text/markdown', author='google', author_email='[email protected]', url='https://github.com/kubeflow/pipelines', project_urls={ 'Documentation': 'https://kfp-kubernetes.readthedocs.io/', 'Bug Tracker': 'https://github.com/kubeflow/pipelines/issues', 'Source': 'https://github.com/kubeflow/pipelines/tree/master/kubernetes_platform/python', }, packages=setuptools.find_namespace_packages(include=['kfp.*']), python_requires='>=3.9.0,<3.13.0', install_requires=REQUIREMENTS, include_package_data=True, extras_require={ 'dev': DEV_REQUIREMENTS, }, license='Apache 2.0', )
402
0
kubeflow_public_repos/pipelines/kubernetes_platform
kubeflow_public_repos/pipelines/kubernetes_platform/python/generate_proto.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import subprocess import sys try: from distutils.spawn import find_executable except ImportError: from shutil import which as find_executable PLATFORM_DIR = os.path.realpath(os.path.dirname(os.path.dirname(__file__))) PROTO_DIR = os.path.join(PLATFORM_DIR, 'proto') PKG_DIR = os.path.realpath( os.path.join(PLATFORM_DIR, 'python', 'kfp', 'kubernetes')) # Find the Protocol Compiler. (Taken from protobuf/python/setup.py) if 'PROTOC' in os.environ and os.path.exists(os.environ['PROTOC']): PROTOC = os.environ['PROTOC'] else: PROTOC = find_executable('protoc') def generate_proto(source: str) -> None: """Generate a _pb2.py from a .proto file. Invokes the Protocol Compiler to generate a _pb2.py from the given .proto file. Does nothing if the output already exists and is newer than the input. Args: source: The source proto file that needs to be compiled. """ output = source.replace('.proto', '_pb2.py') if not os.path.exists(output) or ( os.path.exists(source) and os.path.getmtime(source) > os.path.getmtime(output)): print(f'Generating {output}...') if not os.path.exists(source): sys.stderr.write(f"Can't find required file: {source}\n") sys.exit(-1) if PROTOC is None: sys.stderr.write( 'protoc is not found. Please compile it or install the binary package.\n' ) sys.exit(-1) protoc_command = [ PROTOC, f'-I={PROTO_DIR}', f'--experimental_allow_proto3_optional', f'--python_out={PKG_DIR}', source, ] if subprocess.call(protoc_command) != 0: sys.exit(-1) if __name__ == '__main__': generate_proto(os.path.join(PROTO_DIR, 'kubernetes_executor_config.proto'))
403
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/read_write_test.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import re import sys import tempfile from typing import Any, Callable, Dict, List, Tuple, Union import unittest from kfp import compiler from kfp import components from kfp.components import placeholders import pytest import yaml _TEST_ROOT_DIR = os.path.dirname(__file__) _CONFIG_PATH = os.path.join(_TEST_ROOT_DIR, 'test_data_config.yaml') _TEST_DATA_DIR = os.path.join(_TEST_ROOT_DIR, 'data') def create_test_cases() -> List[Dict[str, Any]]: with open(_CONFIG_PATH) as f: config = yaml.safe_load(f) return [(test_case['module'], test_case['name']) for test_case in config['test_cases']] def import_obj_from_file(python_path: str, obj_name: str) -> Any: sys.path.insert(0, os.path.dirname(python_path)) module_name = os.path.splitext(os.path.split(python_path)[1])[0] module = __import__(module_name, fromlist=[obj_name]) if not hasattr(module, obj_name): raise ValueError( f'Object "{obj_name}" not found in module {python_path}.') return getattr(module, obj_name) def ignore_kfp_version_helper(spec: Dict[str, Any]) -> Dict[str, Any]: """Ignores kfp sdk versioning in command. Takes in a YAML input and ignores the kfp sdk versioning in command for comparison between compiled file and goldens. """ pipeline_spec = spec.get('pipelineSpec', spec) if 'executors' in pipeline_spec['deploymentSpec']: for executor in pipeline_spec['deploymentSpec']['executors']: pipeline_spec['deploymentSpec']['executors'][ executor] = yaml.safe_load( re.sub( r"'kfp==(\d+).(\d+).(\d+)(-[a-z]+.\d+)?'", 'kfp', yaml.dump( pipeline_spec['deploymentSpec']['executors'] [executor], sort_keys=True))) return spec def load_pipeline_spec_and_platform_spec( filename: str) -> Tuple[Dict[str, Any], Dict[str, Any]]: with open(filename) as f: pipeline_spec, platform_spec = tuple(yaml.safe_load_all(f)) # ignore the sdkVersion del pipeline_spec['sdkVersion'] return ignore_kfp_version_helper(pipeline_spec), platform_spec def handle_placeholders( component_spec: 'structures.ComponentSpec' ) -> 'structures.ComponentSpec': if component_spec.implementation.container is not None: if component_spec.implementation.container.command is not None: component_spec.implementation.container.command = [ placeholders.convert_command_line_element_to_string(c) for c in component_spec.implementation.container.command ] if component_spec.implementation.container.args is not None: component_spec.implementation.container.args = [ placeholders.convert_command_line_element_to_string(a) for a in component_spec.implementation.container.args ] return component_spec def handle_expected_diffs( component_spec: 'structures.ComponentSpec' ) -> 'structures.ComponentSpec': """Strips some component spec fields that should be ignored when comparing with golden result.""" # Ignore description when comparing components specs read in from v1 component YAML and from IR YAML, because non lightweight Python components defined in v1 YAML can have a description field, but IR YAML does not preserve this field unless the component is a lightweight Python function-based component component_spec.description = None # ignore SDK version so that golden snapshots don't need to be updated between SDK version bump if component_spec.implementation.graph is not None: component_spec.implementation.graph.sdk_version = '' return handle_placeholders(component_spec) class TestReadWrite: def _compile_and_load_component( self, compilable: Union[Callable[..., Any], 'python_component.PythonComponent']): with tempfile.TemporaryDirectory() as tmp_dir: tmp_file = os.path.join(tmp_dir, 're_compiled_output.yaml') compiler.Compiler().compile(compilable, tmp_file) return components.load_component_from_file(tmp_file) def _compile_and_read_yaml( self, compilable: Union[Callable[..., Any], 'python_component.PythonComponent'] ) -> Tuple[Dict[str, Any], Dict[str, Any]]: with tempfile.TemporaryDirectory() as tmp_dir: tmp_file = os.path.join(tmp_dir, 're_compiled_output.yaml') compiler.Compiler().compile(compilable, tmp_file) return load_pipeline_spec_and_platform_spec(tmp_file) def _test_serialization_deserialization_consistency(self, yaml_file: str): """Tests serialization and deserialization consistency.""" original_component = components.load_component_from_file(yaml_file) reloaded_component = self._compile_and_load_component( original_component) assert handle_expected_diffs( original_component.component_spec) == handle_expected_diffs( reloaded_component.component_spec) def _test_serialization_correctness( self, python_file: str, yaml_file: str, function_name: str, ): """Tests serialization correctness.""" pipeline = import_obj_from_file(python_file, function_name) compiled_pipeline_spec, compiled_platform_spec = self._compile_and_read_yaml( pipeline) golden_pipeline_spec, golden_platform_spec = load_pipeline_spec_and_platform_spec( yaml_file) assert compiled_pipeline_spec == golden_pipeline_spec assert compiled_platform_spec == golden_platform_spec @pytest.mark.parametrize('test_case,function', create_test_cases()) def test( self, test_case: str, function: str, ): """Tests serialization and deserialization consistency and correctness. Args: name (str): '{test_group_name}-{test_case_name}'. Useful for print statements/debugging. test_case (str): Test case name (without file extension). test_data_dir (str): The directory containing the test case files. function (str, optional): The function name to compile. read (bool): Whether the pipeline/component supports deserialization from YAML (IR, except for V1 component YAML back compatability tests). write (bool): Whether the pipeline/component supports compilation from a Python file. """ yaml_file = os.path.join(_TEST_DATA_DIR, f'{test_case}.yaml') py_file = os.path.join(_TEST_DATA_DIR, f'{test_case}.py') self._test_serialization_correctness( python_file=py_file, yaml_file=yaml_file, function_name=function) self._test_serialization_deserialization_consistency( yaml_file=yaml_file) if __name__ == '__main__': unittest.main()
404
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/test_data_config.yaml
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. test_cases: # Disabled while https://github.com/kubeflow/pipelines/issues/10918 is failing # - module: create_mount_delete_dynamic_pvc # name: my_pipeline - module: create_mount_delete_existing_pvc name: my_pipeline - module: create_mount_delete_existing_pvc_from_task_output name: my_pipeline # Disabled while https://github.com/kubeflow/pipelines/issues/10918 is failing # - module: secret_as_env # name: my_pipeline # Disabled while https://github.com/kubeflow/pipelines/issues/10918 is failing # - module: secret_as_vol # name: my_pipeline # Disabled while https://github.com/kubeflow/pipelines/issues/10918 is failing # - module: node_selector # name: my_pipeline
405
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/empty_dir_mounts.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp taskInfo: name: comp schemaVersion: 2.1.0 sdkVersion: kfp-2.7.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: emptyDirMounts: - medium: Memory mountPath: /mnt/my_vol_1 sizeLimit: 1Gi volumeName: emptydir-vol-1
406
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/image_pull_secrets.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.6.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp taskInfo: name: comp schemaVersion: 2.1.0 sdkVersion: kfp-2.6.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: imagePullSecret: - secretName: my-secret
407
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc_from_task_output.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.component def get_pvc_name() -> str: return 'static-pvc-name' @dsl.pipeline def my_pipeline(): name_task = get_pvc_name() pvc1 = kubernetes.CreatePVC( pvc_name=name_task.output, access_modes=['ReadWriteOnce'], size='5Gi', storage_class_name='standard', ) task1 = comp() kubernetes.mount_pvc( task1, pvc_name=pvc1.outputs['name'], mount_path='/data', ) delete_pvc1 = kubernetes.DeletePVC(pvc_name=name_task.output).after(task1) if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
408
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): pvc1 = kubernetes.CreatePVC( pvc_name='static-pvc-name', access_modes=['ReadWriteOnce'], size='5Gi', storage_class_name='standard', ) task1 = comp() kubernetes.mount_pvc( task1, pvc_name=pvc1.outputs['name'], mount_path='/data', ) delete_pvc1 = kubernetes.DeletePVC( pvc_name=pvc1.outputs['name']).after(task1) if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
409
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/create_mount_delete_dynamic_pvc.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-consumer: executorLabel: exec-consumer outputDefinitions: parameters: Output: parameterType: STRING comp-createpvc: executorLabel: exec-createpvc inputDefinitions: parameters: access_modes: description: 'AccessModes to request for the provisioned PVC. May be one or more of ``''ReadWriteOnce''``, ``''ReadOnlyMany''``, ``''ReadWriteMany''``, or ``''ReadWriteOncePod''``. Corresponds to `PersistentVolumeClaim.spec.accessModes <https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes>`_.' parameterType: LIST annotations: description: Annotations for the PVC's metadata. Corresponds to `PersistentVolumeClaim.metadata.annotations <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim>`_. isOptional: true parameterType: STRUCT pvc_name: description: 'Name of the PVC. Corresponds to `PersistentVolumeClaim.metadata.name <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim>`_. Only one of ``pvc_name`` and ``pvc_name_suffix`` can be provided.' isOptional: true parameterType: STRING pvc_name_suffix: description: 'Prefix to use for a dynamically generated name, which will take the form ``<argo-workflow-name>-<pvc_name_suffix>``. Only one of ``pvc_name`` and ``pvc_name_suffix`` can be provided.' isOptional: true parameterType: STRING size: description: The size of storage requested by the PVC that will be provisioned. For example, ``'5Gi'``. Corresponds to `PersistentVolumeClaim.spec.resources.requests.storage <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec>`_. parameterType: STRING storage_class_name: defaultValue: '' description: 'Name of StorageClass from which to provision the PV to back the PVC. ``None`` indicates to use the cluster''s default storage_class_name. Set to ``''''`` for a statically specified PVC.' isOptional: true parameterType: STRING volume_name: description: 'Pre-existing PersistentVolume that should back the provisioned PersistentVolumeClaim. Used for statically specified PV only. Corresponds to `PersistentVolumeClaim.spec.volumeName <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec>`_.' isOptional: true parameterType: STRING outputDefinitions: parameters: name: parameterType: STRING comp-deletepvc: executorLabel: exec-deletepvc inputDefinitions: parameters: pvc_name: description: Name of the PVC to delete. Supports passing a runtime-generated name, such as a name provided by ``kubernetes.CreatePvcOp().outputs['name']``. parameterType: STRING comp-producer: executorLabel: exec-producer outputDefinitions: parameters: Output: parameterType: STRING deploymentSpec: executors: exec-consumer: container: args: - --executor_input - '{{$}}' - --function_to_execute - consumer command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.4.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef consumer() -> str:\n with open('/data/file.txt', 'r') as file:\n\ \ content = file.read()\n print(content)\n return content\n\ \n" image: python:3.9 exec-createpvc: container: image: argostub/createpvc exec-deletepvc: container: image: argostub/deletepvc exec-producer: container: args: - --executor_input - '{{$}}' - --function_to_execute - producer command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.4.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef producer() -> str:\n with open('/data/file.txt', 'w') as file:\n\ \ file.write('Hello world')\n with open('/data/file.txt', 'r')\ \ as file:\n content = file.read()\n print(content)\n return\ \ content\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: consumer: cachingOptions: enableCache: true componentRef: name: comp-consumer dependentTasks: - createpvc - producer taskInfo: name: consumer createpvc: cachingOptions: enableCache: true componentRef: name: comp-createpvc inputs: parameters: access_modes: runtimeValue: constant: - ReadWriteOnce pvc_name_suffix: runtimeValue: constant: -my-pvc size: runtimeValue: constant: 5Mi storage_class_name: runtimeValue: constant: standard taskInfo: name: createpvc deletepvc: cachingOptions: enableCache: true componentRef: name: comp-deletepvc dependentTasks: - consumer - createpvc inputs: parameters: pvc_name: taskOutputParameter: outputParameterKey: name producerTask: createpvc taskInfo: name: deletepvc producer: cachingOptions: enableCache: true componentRef: name: comp-producer dependentTasks: - createpvc taskInfo: name: producer schemaVersion: 2.1.0 sdkVersion: kfp-2.4.0 --- platforms: kubernetes: deploymentSpec: executors: exec-consumer: pvcMount: - mountPath: /data taskOutputParameter: outputParameterKey: name producerTask: createpvc exec-producer: pvcMount: - mountPath: /data taskOutputParameter: outputParameterKey: name producerTask: createpvc
410
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/field_path_as_env.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_field_path_as_env( task, env_name='KFP_RUN_NAME', field_path="metadata.annotations['pipelines.kubeflow.org/run_name']" ) if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
411
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/create_mount_delete_dynamic_pvc.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def producer() -> str: with open('/data/file.txt', 'w') as file: file.write('Hello world') with open('/data/file.txt', 'r') as file: content = file.read() print(content) return content @dsl.component def consumer() -> str: with open('/data/file.txt', 'r') as file: content = file.read() print(content) return content @dsl.pipeline def my_pipeline(): pvc1 = kubernetes.CreatePVC( pvc_name_suffix='-my-pvc', access_modes=['ReadWriteOnce'], size='5Mi', storage_class_name='standard', ) task1 = producer() task2 = consumer().after(task1) kubernetes.mount_pvc( task1, pvc_name=pvc1.outputs['name'], mount_path='/data', ) kubernetes.mount_pvc( task2, pvc_name=pvc1.outputs['name'], mount_path='/data', ) delete_pvc1 = kubernetes.DeletePVC( pvc_name=pvc1.outputs['name']).after(task2) if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
412
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/timeout.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.6.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp taskInfo: name: comp schemaVersion: 2.1.0 sdkVersion: kfp-2.6.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: activeDeadlineSeconds: '20'
413
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/general_ephemeral_volume.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp taskInfo: name: comp schemaVersion: 2.1.0 sdkVersion: kfp-2.7.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: genericEphemeralVolume: - accessModes: - ReadWriteOnce defaultStorageClass: true metadata: annotations: annotation1: a1 mountPath: path size: 5Gi volumeName: pvc-name
414
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/image_pull_secrets.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): task = comp() kubernetes.set_image_pull_secrets(task, ['my-secret']) if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
415
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/timeout.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): task = comp() kubernetes.set_timeout(task, 20) if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
416
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/config_map_as_env.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.4.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp taskInfo: name: comp schemaVersion: 2.1.0 sdkVersion: kfp-2.4.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: configMapAsEnv: - keyToEnv: - envVar: CONFIG_MAP_VAR configMapKey: foo configMapName: my-cm
417
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/config_map_as_vol.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.4.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp taskInfo: name: comp schemaVersion: 2.1.0 sdkVersion: kfp-2.4.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: configMapAsVolume: - mountPath: /mnt/my_vol configMapName: my-cm optional: False
418
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/empty_dir_mounts.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): task = comp() kubernetes.empty_dir_mount( task, volume_name='emptydir-vol-1', mount_path='/mnt/my_vol_1', medium='Memory', size_limit='1Gi' ) if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
419
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/secret_as_env.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_env( task, secret_name='my-secret', secret_key_to_env={'password': 'SECRET_VAR'}) if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
420
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/node_selector.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.4.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp taskInfo: name: comp schemaVersion: 2.1.0 sdkVersion: kfp-2.4.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: nodeSelector: labels: cloud.google.com/gke-accelerator: nvidia-tesla-p4
421
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/general_ephemeral_volume.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_ephemeral_volume( task, volume_name='pvc-name', mount_path='path', access_modes=['ReadWriteOnce'], size='5Gi', annotations={"annotation1": "a1"}, ) if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
422
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/node_selector.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_node_selector( task, label_key='cloud.google.com/gke-accelerator', label_value='nvidia-tesla-p4', ) if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
423
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/config_map_as_env.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_config_map_as_env( task, config_map_name='my-cm', config_map_key_to_env={'foo': 'CONFIG_MAP_VAR'}) if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
424
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc_from_task_output.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp comp-createpvc: executorLabel: exec-createpvc inputDefinitions: parameters: access_modes: description: 'AccessModes to request for the provisioned PVC. May be one or more of ``''ReadWriteOnce''``, ``''ReadOnlyMany''``, ``''ReadWriteMany''``, or ``''ReadWriteOncePod''``. Corresponds to `PersistentVolumeClaim.spec.accessModes <https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes>`_.' parameterType: LIST annotations: description: Annotations for the PVC's metadata. Corresponds to `PersistentVolumeClaim.metadata.annotations <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim>`_. isOptional: true parameterType: STRUCT pvc_name: description: 'Name of the PVC. Corresponds to `PersistentVolumeClaim.metadata.name <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim>`_. Only one of ``pvc_name`` and ``pvc_name_suffix`` can be provided.' isOptional: true parameterType: STRING pvc_name_suffix: description: 'Prefix to use for a dynamically generated name, which will take the form ``<argo-workflow-name>-<pvc_name_suffix>``. Only one of ``pvc_name`` and ``pvc_name_suffix`` can be provided.' isOptional: true parameterType: STRING size: description: The size of storage requested by the PVC that will be provisioned. For example, ``'5Gi'``. Corresponds to `PersistentVolumeClaim.spec.resources.requests.storage <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec>`_. parameterType: STRING storage_class_name: defaultValue: '' description: 'Name of StorageClass from which to provision the PV to back the PVC. ``None`` indicates to use the cluster''s default storage_class_name. Set to ``''''`` for a statically specified PVC.' isOptional: true parameterType: STRING volume_name: description: 'Pre-existing PersistentVolume that should back the provisioned PersistentVolumeClaim. Used for statically specified PV only. Corresponds to `PersistentVolumeClaim.spec.volumeName <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec>`_.' isOptional: true parameterType: STRING outputDefinitions: parameters: name: parameterType: STRING comp-deletepvc: executorLabel: exec-deletepvc inputDefinitions: parameters: pvc_name: description: Name of the PVC to delete. Supports passing a runtime-generated name, such as a name provided by ``kubernetes.CreatePvcOp().outputs['name']``. parameterType: STRING comp-get-pvc-name: executorLabel: exec-get-pvc-name outputDefinitions: parameters: Output: parameterType: STRING deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.4.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 exec-createpvc: container: image: argostub/createpvc exec-deletepvc: container: image: argostub/deletepvc exec-get-pvc-name: container: args: - --executor_input - '{{$}}' - --function_to_execute - get_pvc_name command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.4.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef get_pvc_name() -> str:\n return 'static-pvc-name'\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp dependentTasks: - createpvc taskInfo: name: comp createpvc: cachingOptions: enableCache: true componentRef: name: comp-createpvc dependentTasks: - get-pvc-name inputs: parameters: access_modes: runtimeValue: constant: - ReadWriteOnce pvc_name: taskOutputParameter: outputParameterKey: Output producerTask: get-pvc-name size: runtimeValue: constant: 5Gi storage_class_name: runtimeValue: constant: standard taskInfo: name: createpvc deletepvc: cachingOptions: enableCache: true componentRef: name: comp-deletepvc dependentTasks: - comp - get-pvc-name inputs: parameters: pvc_name: taskOutputParameter: outputParameterKey: Output producerTask: get-pvc-name taskInfo: name: deletepvc get-pvc-name: cachingOptions: enableCache: true componentRef: name: comp-get-pvc-name taskInfo: name: get-pvc-name schemaVersion: 2.1.0 sdkVersion: kfp-2.4.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: pvcMount: - mountPath: /data taskOutputParameter: outputParameterKey: name producerTask: createpvc
425
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/secret_as_vol.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.4.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp taskInfo: name: comp schemaVersion: 2.1.0 sdkVersion: kfp-2.4.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: secretAsVolume: - mountPath: /mnt/my_vol secretName: my-secret optional: False
426
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/secret_as_vol.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_volume( task, secret_name='my-secret', mount_path='/mnt/my_vol') if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
427
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/config_map_as_vol.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_config_map_as_volume( task, config_map_name='my-cm', mount_path='/mnt/my_vol') if __name__ == '__main__': from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace('.py', '.yaml'))
428
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/field_path_as_env.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.6.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp taskInfo: name: comp schemaVersion: 2.1.0 sdkVersion: kfp-2.6.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: fieldPathAsEnv: - fieldPath: metadata.annotations['pipelines.kubeflow.org/run_name'] name: KFP_RUN_NAME
429
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp comp-createpvc: executorLabel: exec-createpvc inputDefinitions: parameters: access_modes: description: 'AccessModes to request for the provisioned PVC. May be one or more of ``''ReadWriteOnce''``, ``''ReadOnlyMany''``, ``''ReadWriteMany''``, or ``''ReadWriteOncePod''``. Corresponds to `PersistentVolumeClaim.spec.accessModes <https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes>`_.' parameterType: LIST annotations: description: Annotations for the PVC's metadata. Corresponds to `PersistentVolumeClaim.metadata.annotations <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim>`_. isOptional: true parameterType: STRUCT pvc_name: description: 'Name of the PVC. Corresponds to `PersistentVolumeClaim.metadata.name <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim>`_. Only one of ``pvc_name`` and ``pvc_name_suffix`` can be provided.' isOptional: true parameterType: STRING pvc_name_suffix: description: 'Prefix to use for a dynamically generated name, which will take the form ``<argo-workflow-name>-<pvc_name_suffix>``. Only one of ``pvc_name`` and ``pvc_name_suffix`` can be provided.' isOptional: true parameterType: STRING size: description: The size of storage requested by the PVC that will be provisioned. For example, ``'5Gi'``. Corresponds to `PersistentVolumeClaim.spec.resources.requests.storage <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec>`_. parameterType: STRING storage_class_name: defaultValue: '' description: 'Name of StorageClass from which to provision the PV to back the PVC. ``None`` indicates to use the cluster''s default storage_class_name. Set to ``''''`` for a statically specified PVC.' isOptional: true parameterType: STRING volume_name: description: 'Pre-existing PersistentVolume that should back the provisioned PersistentVolumeClaim. Used for statically specified PV only. Corresponds to `PersistentVolumeClaim.spec.volumeName <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec>`_.' isOptional: true parameterType: STRING outputDefinitions: parameters: name: parameterType: STRING comp-deletepvc: executorLabel: exec-deletepvc inputDefinitions: parameters: pvc_name: description: Name of the PVC to delete. Supports passing a runtime-generated name, such as a name provided by ``kubernetes.CreatePvcOp().outputs['name']``. parameterType: STRING deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.4.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 exec-createpvc: container: image: argostub/createpvc exec-deletepvc: container: image: argostub/deletepvc pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp dependentTasks: - createpvc taskInfo: name: comp createpvc: cachingOptions: enableCache: true componentRef: name: comp-createpvc inputs: parameters: access_modes: runtimeValue: constant: - ReadWriteOnce pvc_name: runtimeValue: constant: static-pvc-name size: runtimeValue: constant: 5Gi storage_class_name: runtimeValue: constant: standard taskInfo: name: createpvc deletepvc: cachingOptions: enableCache: true componentRef: name: comp-deletepvc dependentTasks: - comp - createpvc inputs: parameters: pvc_name: taskOutputParameter: outputParameterKey: name producerTask: createpvc taskInfo: name: deletepvc schemaVersion: 2.1.0 sdkVersion: kfp-2.4.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: pvcMount: - mountPath: /data taskOutputParameter: outputParameterKey: name producerTask: createpvc
430
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/toleration.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from kfp import dsl from kfp import kubernetes from kubernetes.client import V1Toleration @dsl.component def comp(): pass @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_toleration( task, key="key1", operator="Equal", value="value1", effect="NoExecute", toleration_seconds=10, ) if __name__ == "__main__": from kfp import compiler compiler.Compiler().compile(my_pipeline, __file__.replace(".py", ".yaml"))
431
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/secret_as_env.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.4.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp taskInfo: name: comp schemaVersion: 2.1.0 sdkVersion: kfp-2.4.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: secretAsEnv: - keyToEnv: - envVar: SECRET_VAR secretKey: password secretName: my-secret
432
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/snapshot/data/toleration.yaml
# PIPELINE DEFINITION # Name: my-pipeline components: comp-comp: executorLabel: exec-comp deploymentSpec: executors: exec-comp: container: args: - --executor_input - '{{$}}' - --function_to_execute - comp command: - sh - -c - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.6.0'\ \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ $0\" \"$@\"\n" - sh - -ec - 'program_path=$(mktemp -d) printf "%s" "$0" > "$program_path/ephemeral_component.py" _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" image: python:3.9 pipelineInfo: name: my-pipeline root: dag: tasks: comp: cachingOptions: enableCache: true componentRef: name: comp-comp taskInfo: name: comp schemaVersion: 2.1.0 sdkVersion: kfp-2.6.0 --- platforms: kubernetes: deploymentSpec: executors: exec-comp: tolerations: - effect: NoExecute key: key1 operator: Equal tolerationSeconds: '10' value: value1
433
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/unit/test_empty_dir_mounts.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp import dsl from kfp import kubernetes class TestEmptyDirMounts: def test_add_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.empty_dir_mount( task, volume_name='emptydir-vol-1', mount_path='/mnt/my_vol_1', medium='Memory', size_limit='1Gi' ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'emptyDirMounts': [{ 'medium': 'Memory', 'mountPath': '/mnt/my_vol_1', 'sizeLimit': '1Gi', 'volumeName': 'emptydir-vol-1' }] } } } } } } def test_add_two(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.empty_dir_mount( task, volume_name='emptydir-vol-1', mount_path='/mnt/my_vol_1', medium='Memory', size_limit='1Gi' ) kubernetes.empty_dir_mount( task, volume_name='emptydir-vol-2', mount_path='/mnt/my_vol_2' ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'emptyDirMounts': [{ 'medium': 'Memory', 'mountPath': '/mnt/my_vol_1', 'sizeLimit': '1Gi', 'volumeName': 'emptydir-vol-1' }, { 'mountPath': '/mnt/my_vol_2', 'volumeName': 'emptydir-vol-2' }] } } } } } } def test_respects_other_configuration(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.empty_dir_mount( task, volume_name='emptydir-vol-1', mount_path='/mnt/my_vol_1', medium='Memory', size_limit='1Gi' ) # this should exist too kubernetes.set_image_pull_secrets(task, ['secret-name']) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'emptyDirMounts': [{ 'medium': 'Memory', 'mountPath': '/mnt/my_vol_1', 'sizeLimit': '1Gi', 'volumeName': 'emptydir-vol-1' }], 'imagePullSecret': [{ 'secretName': 'secret-name' }] } } } } } } @dsl.component def comp(): pass
434
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/unit/test_secret.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp import dsl from kfp import kubernetes class TestUseSecretAsVolume: def test_use_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_volume( task, secret_name='secret-name', mount_path='secretpath', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'secretAsVolume': [{ 'secretName': 'secret-name', 'mountPath': 'secretpath', 'optional': False }] } } } } } } def test_use_one_optional_true(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_volume( task, secret_name='secret-name', mount_path='secretpath', optional=True) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'secretAsVolume': [{ 'secretName': 'secret-name', 'mountPath': 'secretpath', 'optional': True }] } } } } } } def test_use_one_optional_false(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_volume( task, secret_name='secret-name', mount_path='secretpath', optional=False) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'secretAsVolume': [{ 'secretName': 'secret-name', 'mountPath': 'secretpath', 'optional': False }] } } } } } } def test_use_two(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_volume( task, secret_name='secret-name1', mount_path='secretpath1', ) kubernetes.use_secret_as_volume( task, secret_name='secret-name2', mount_path='secretpath2', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'secretAsVolume': [ { 'secretName': 'secret-name1', 'mountPath': 'secretpath1', 'optional': False }, { 'secretName': 'secret-name2', 'mountPath': 'secretpath2', 'optional': False }, ] } } } } } } def test_preserves_secret_as_env(self): # checks that use_secret_as_volume respects previously set secrets as env @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_env( task, secret_name='secret-name1', secret_key_to_env={'password': 'SECRET_VAR'}, ) kubernetes.use_secret_as_volume( task, secret_name='secret-name2', mount_path='secretpath2', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'secretAsEnv': [{ 'secretName': 'secret-name1', 'keyToEnv': [{ 'secretKey': 'password', 'envVar': 'SECRET_VAR' }] }], 'secretAsVolume': [{ 'secretName': 'secret-name2', 'mountPath': 'secretpath2', 'optional': False },] } } } } } } def test_alongside_pvc_mount(self): # checks that use_secret_as_volume respects previously set pvc @dsl.pipeline def my_pipeline(): task = comp() kubernetes.mount_pvc( task, pvc_name='pvc-name', mount_path='path', ) kubernetes.use_secret_as_volume( task, secret_name='secret-name', mount_path='secretpath', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'pvcMount': [{ 'constant': 'pvc-name', 'mountPath': 'path' }], 'secretAsVolume': [{ 'secretName': 'secret-name', 'mountPath': 'secretpath', 'optional': False }] } } } } } } class TestUseSecretAsEnv: def test_use_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_env( task, secret_name='secret-name', secret_key_to_env={ 'username': 'USERNAME', 'password': 'PASSWORD', }, ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'secretAsEnv': [{ 'secretName': 'secret-name', 'keyToEnv': [ { 'secretKey': 'username', 'envVar': 'USERNAME' }, { 'secretKey': 'password', 'envVar': 'PASSWORD' }, ] }] } } } } } } def test_use_two(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_env( task, secret_name='secret-name1', secret_key_to_env={'password1': 'SECRET_VAR1'}, ) kubernetes.use_secret_as_env( task, secret_name='secret-name2', secret_key_to_env={'password2': 'SECRET_VAR2'}, ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'secretAsEnv': [ { 'secretName': 'secret-name1', 'keyToEnv': [{ 'secretKey': 'password1', 'envVar': 'SECRET_VAR1' }] }, { 'secretName': 'secret-name2', 'keyToEnv': [{ 'secretKey': 'password2', 'envVar': 'SECRET_VAR2' }] }, ] } } } } } } def test_preserves_secret_as_volume(self): # checks that use_secret_as_env respects previously set secrets as vol @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_volume( task, secret_name='secret-name2', mount_path='secretpath2', ) kubernetes.use_secret_as_env( task, secret_name='secret-name1', secret_key_to_env={'password': 'SECRET_VAR'}, ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'secretAsEnv': [{ 'secretName': 'secret-name1', 'keyToEnv': [{ 'secretKey': 'password', 'envVar': 'SECRET_VAR' }] }], 'secretAsVolume': [{ 'secretName': 'secret-name2', 'mountPath': 'secretpath2', 'optional': False },] } } } } } } def test_preserves_pvc_mount(self): # checks that use_secret_as_env respects previously set pvc @dsl.pipeline def my_pipeline(): task = comp() kubernetes.mount_pvc( task, pvc_name='pvc-name', mount_path='path', ) kubernetes.use_secret_as_env( task, secret_name='secret-name', secret_key_to_env={'password': 'SECRET_VAR'}, ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'pvcMount': [{ 'constant': 'pvc-name', 'mountPath': 'path' }], 'secretAsEnv': [{ 'secretName': 'secret-name', 'keyToEnv': [{ 'secretKey': 'password', 'envVar': 'SECRET_VAR' }] }] } } } } } } @dsl.component def comp(): pass
435
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/unit/test_config_map.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp import dsl from kfp import kubernetes class TestUseConfigMapAsVolume: def test_use_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_config_map_as_volume( task, config_map_name='cm-name', mount_path='cmpath', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'configMapAsVolume': [{ 'configMapName': 'cm-name', 'mountPath': 'cmpath', 'optional': False }] } } } } } } def test_use_one_optional_true(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_config_map_as_volume( task, config_map_name='cm-name', mount_path='cmpath', optional=True) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'configMapAsVolume': [{ 'configMapName': 'cm-name', 'mountPath': 'cmpath', 'optional': True }] } } } } } } def test_use_one_optional_false(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_config_map_as_volume( task, config_map_name='cm-name', mount_path='cmpath', optional=False) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'configMapAsVolume': [{ 'configMapName': 'cm-name', 'mountPath': 'cmpath', 'optional': False }] } } } } } } def test_use_two(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_config_map_as_volume( task, config_map_name='cm-name1', mount_path='cmpath1', ) kubernetes.use_config_map_as_volume( task, config_map_name='cm-name2', mount_path='cmpath2', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'configMapAsVolume': [ { 'configMapName': 'cm-name1', 'mountPath': 'cmpath1', 'optional': False }, { 'configMapName': 'cm-name2', 'mountPath': 'cmpath2', 'optional': False }, ] } } } } } } def test_preserves_config_map_as_env(self): # checks that use_config map_as_volume respects previously set config maps as env @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_config_map_as_env( task, config_map_name='cm-name1', config_map_key_to_env={'foo': 'CM_VAR'}, ) kubernetes.use_config_map_as_volume( task, config_map_name='cm-name2', mount_path='cmpath2', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'configMapAsEnv': [{ 'configMapName': 'cm-name1', 'keyToEnv': [{ 'configMapKey': 'foo', 'envVar': 'CM_VAR' }] }], 'configMapAsVolume': [{ 'configMapName': 'cm-name2', 'mountPath': 'cmpath2', 'optional': False },] } } } } } } def test_alongside_pvc_mount(self): # checks that use_config_map_as_volume respects previously set pvc @dsl.pipeline def my_pipeline(): task = comp() kubernetes.mount_pvc( task, pvc_name='pvc-name', mount_path='path', ) kubernetes.use_config_map_as_volume( task, config_map_name='cm-name', mount_path='cmpath', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'pvcMount': [{ 'constant': 'pvc-name', 'mountPath': 'path' }], 'configMapAsVolume': [{ 'configMapName': 'cm-name', 'mountPath': 'cmpath', 'optional': False }] } } } } } } class TestUseConfigMapAsEnv: def test_use_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_config_map_as_env( task, config_map_name='cm-name', config_map_key_to_env={ 'foo': 'FOO', 'bar': 'BAR', }, ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'configMapAsEnv': [{ 'configMapName': 'cm-name', 'keyToEnv': [ { 'configMapKey': 'foo', 'envVar': 'FOO' }, { 'configMapKey': 'bar', 'envVar': 'BAR' }, ] }] } } } } } } def test_use_two(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_config_map_as_env( task, config_map_name='cm-name1', config_map_key_to_env={'foo1': 'CM_VAR1'}, ) kubernetes.use_config_map_as_env( task, config_map_name='cm-name2', config_map_key_to_env={'foo2': 'CM_VAR2'}, ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'configMapAsEnv': [ { 'configMapName': 'cm-name1', 'keyToEnv': [{ 'configMapKey': 'foo1', 'envVar': 'CM_VAR1' }] }, { 'configMapName': 'cm-name2', 'keyToEnv': [{ 'configMapKey': 'foo2', 'envVar': 'CM_VAR2' }] }, ] } } } } } } def test_preserves_config_map_as_volume(self): # checks that use_config_map_as_env respects previously set ConfigMaps as vol @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_config_map_as_volume( task, config_map_name='cm-name2', mount_path='cmpath2', ) kubernetes.use_config_map_as_env( task, config_map_name='cm-name1', config_map_key_to_env={'foo': 'CM_VAR'}, ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'configMapAsEnv': [{ 'configMapName': 'cm-name1', 'keyToEnv': [{ 'configMapKey': 'foo', 'envVar': 'CM_VAR' }] }], 'configMapAsVolume': [{ 'configMapName': 'cm-name2', 'mountPath': 'cmpath2', 'optional': False },] } } } } } } def test_preserves_pvc_mount(self): # checks that use_config_map_as_env respects previously set pvc @dsl.pipeline def my_pipeline(): task = comp() kubernetes.mount_pvc( task, pvc_name='pvc-name', mount_path='path', ) kubernetes.use_config_map_as_env( task, config_map_name='cm-name', config_map_key_to_env={'foo': 'CM_VAR'}, ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'pvcMount': [{ 'constant': 'pvc-name', 'mountPath': 'path' }], 'configMapAsEnv': [{ 'configMapName': 'cm-name', 'keyToEnv': [{ 'configMapKey': 'foo', 'envVar': 'CM_VAR' }] }] } } } } } } @dsl.component def comp(): pass
436
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/unit/test_image_pull_secrets.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp import dsl from kfp import kubernetes class TestImagePullSecret: def test_add_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.set_image_pull_secrets(task, ['secret-name']) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'imagePullSecret': [{ 'secretName': 'secret-name' }] } } } } } } def test_add_two(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.set_image_pull_secrets(task, ['secret-name1', 'secret-name2']) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'imagePullSecret': [ { 'secretName': 'secret-name1' }, { 'secretName': 'secret-name2' }, ] } } } } } } def test_respects_other_configuration(self): @dsl.pipeline def my_pipeline(): task = comp() # Load the secret as a volume kubernetes.use_secret_as_volume( task, secret_name='secret-name', mount_path='/mnt/my_vol') # Set image pull secrets for a task using secret names kubernetes.set_image_pull_secrets(task, ['secret-name']) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'secretAsVolume': [{ 'secretName': 'secret-name', 'mountPath': '/mnt/my_vol', 'optional': False }], 'imagePullSecret': [{ 'secretName': 'secret-name' }] } } } } } } @dsl.component def comp(): pass
437
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/unit/test_pod_metadata.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp import dsl from kfp import kubernetes class TestPodMetadata: def test_add_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_pod_label( task, label_key='kubeflow.com/kfp', label_value='pipeline-node', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'podMetadata': { 'labels': { 'kubeflow.com/kfp': 'pipeline-node' } } } } } } } } def test_add_same_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_pod_label( task, label_key='kubeflow.com/kfp', label_value='pipeline-node', ) kubernetes.add_pod_label( task, label_key='kubeflow.com/kfp', label_value='pipeline-node2', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'podMetadata': { 'labels': { 'kubeflow.com/kfp': 'pipeline-node2' } } } } } } } } def test_add_two_and_mix(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_pod_label( task, label_key='kubeflow.com/kfp', label_value='pipeline-node', ) kubernetes.add_pod_label( task, label_key='kubeflow.com/common', label_value='test', ) kubernetes.add_pod_annotation( task, annotation_key='run_id', annotation_value='123456', ) kubernetes.add_pod_annotation( task, annotation_key='experiment_id', annotation_value='234567', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'podMetadata': { 'annotations': { 'run_id': '123456', 'experiment_id': '234567' }, 'labels': { 'kubeflow.com/kfp': 'pipeline-node', 'kubeflow.com/common': 'test' } } } } } } } } def test_respects_other_configuration(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_volume( task, secret_name='my-secret', mount_path='/mnt/my_vol') kubernetes.add_pod_annotation( task, annotation_key='run_id', annotation_value='123456', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'podMetadata': { 'annotations': { 'run_id': '123456' } }, 'secretAsVolume': [{ 'secretName': 'my-secret', 'mountPath': '/mnt/my_vol', 'optional': False }] } } } } } } @dsl.component def comp(): pass
438
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/unit/test_timeout.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp import dsl from kfp import kubernetes import pytest class TestTimeout: def test_timeout(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.set_timeout( task, seconds=20 ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'activeDeadlineSeconds': '20' } } } } } } def test_reset_timeout(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.set_timeout( task, seconds=20 ) kubernetes.set_timeout( task, seconds=0 ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { } } } } } } def test_bad_value_timeout(self): with pytest.raises( ValueError, match=r'Argument for "seconds" must be an integer greater or equals to 0. Got invalid input: -20.', ): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.set_timeout( task, seconds=-20 ) @dsl.component def comp(): pass
439
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/unit/test_field.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp import dsl from kfp import kubernetes class TestUseFieldPathAsEnv: def test_use_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_field_path_as_env( task, env_name="KFP_RUN_NAME", field_path="metadata.annotations['pipelines.kubeflow.org/run_name']" ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'fieldPathAsEnv': [{ 'name': 'KFP_RUN_NAME', 'fieldPath': 'metadata.annotations[\'pipelines.kubeflow.org/run_name\']' }] } } } } } } def test_use_two(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_field_path_as_env( task, env_name="KFP_RUN_NAME", field_path="metadata.annotations['pipelines.kubeflow.org/run_name']" ) kubernetes.use_field_path_as_env( task, env_name="POD_NAME", field_path="metadata.name" ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'fieldPathAsEnv': [{ 'name': 'KFP_RUN_NAME', 'fieldPath': 'metadata.annotations[\'pipelines.kubeflow.org/run_name\']' }, { 'name': 'POD_NAME', 'fieldPath': 'metadata.name' }] } } } } } } @dsl.component def comp(): pass
440
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/unit/test_image_pull_policy.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp import dsl from kfp import kubernetes class TestImagePullPolicy: def test_always(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.set_image_pull_policy(task, 'Always') assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'imagePullPolicy': 'Always' } } } } } } def test_if_not_present(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.set_image_pull_policy(task, 'IfNotPresent') assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'imagePullPolicy': 'IfNotPresent' } } } } } } def test_never(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.set_image_pull_policy(task, 'Never') assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'imagePullPolicy': 'Never' } } } } } } @dsl.component def comp(): pass
441
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/unit/test_node_selector.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp import dsl from kfp import kubernetes class TestNodeSelector: def test_add_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_node_selector( task, label_key='cloud.google.com/gke-accelerator', label_value='nvidia-tesla-p4', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'nodeSelector': { 'labels': { 'cloud.google.com/gke-accelerator': 'nvidia-tesla-p4' } } } } } } } } def test_add_two(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_node_selector( task, label_key='cloud.google.com/gke-accelerator', label_value='nvidia-tesla-p4', ) kubernetes.add_node_selector( task, label_key='other_label_key', label_value='other_label_value', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'nodeSelector': { 'labels': { 'cloud.google.com/gke-accelerator': 'nvidia-tesla-p4', 'other_label_key': 'other_label_value' }, } } } } } } } def test_respects_other_configuration(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_volume( task, secret_name='my-secret', mount_path='/mnt/my_vol') kubernetes.add_node_selector( task, label_key='cloud.google.com/gke-accelerator', label_value='nvidia-tesla-p4', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'nodeSelector': { 'labels': { 'cloud.google.com/gke-accelerator': 'nvidia-tesla-p4' } }, 'secretAsVolume': [{ 'secretName': 'my-secret', 'mountPath': '/mnt/my_vol', 'optional': False }] } } } } } } @dsl.component def comp(): pass
442
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/unit/test_tolerations.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp import compiler from kfp import dsl from kfp import kubernetes class TestTolerations: def test_add_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_toleration( task, key='key1', operator='Equal', value='value1', effect='NoSchedule', ) compiler.Compiler().compile( pipeline_func=my_pipeline, package_path='my_pipeline.yaml') assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'tolerations': [{ 'key': 'key1', 'operator': 'Equal', 'value': 'value1', 'effect': 'NoSchedule', }] } } } } } } def test_add_one_with_toleration_seconds(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_toleration( task, key='key1', operator='Equal', value='value1', effect='NoExecute', toleration_seconds=10, ) compiler.Compiler().compile( pipeline_func=my_pipeline, package_path='my_pipeline.yaml') assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'tolerations': [{ 'key': 'key1', 'operator': 'Equal', 'value': 'value1', 'effect': 'NoExecute', 'tolerationSeconds': '10', }] } } } } } } def test_add_two(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_toleration( task, key='key1', operator='Equal', value='value1', ) kubernetes.add_toleration( task, key='key2', operator='Equal', value='value2', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'tolerations': [ { 'key': 'key1', 'operator': 'Equal', 'value': 'value1', }, { 'key': 'key2', 'operator': 'Equal', 'value': 'value2', }, ] } } } } } } def test_respects_other_configuration(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_volume( task, secret_name='my-secret', mount_path='/mnt/my_vol') kubernetes.add_toleration( task, key='key1', operator='Equal', value='value1', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'tolerations': [{ 'key': 'key1', 'operator': 'Equal', 'value': 'value1', },], 'secretAsVolume': [{ 'secretName': 'my-secret', 'mountPath': '/mnt/my_vol', 'optional': False },], }, } } } } } @dsl.component def comp(): pass
443
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/test
kubeflow_public_repos/pipelines/kubernetes_platform/python/test/unit/test_volume.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp import dsl from kfp import kubernetes import pytest class TestMountPVC: def test_mount_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.mount_pvc( task, pvc_name='pvc-name', mount_path='path', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'pvcMount': [{ 'constant': 'pvc-name', 'mountPath': 'path' }] } } } } } } def test_mount_two(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.mount_pvc( task, pvc_name='pvc-name', mount_path='path1', ) kubernetes.mount_pvc( task, pvc_name='other-pvc-name', mount_path='path2', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'pvcMount': [ { 'constant': 'pvc-name', 'mountPath': 'path1' }, { 'constant': 'other-pvc-name', 'mountPath': 'path2' }, ] } } } } } } def test_mount_preserves_secret_as_env(self): # checks that mount_pvc respects previously set secrets @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_env( task, secret_name='secret-name', secret_key_to_env={'password': 'SECRET_VAR'}, ) kubernetes.mount_pvc( task, pvc_name='pvc-name', mount_path='path', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'pvcMount': [{ 'constant': 'pvc-name', 'mountPath': 'path' }], 'secretAsEnv': [{ 'secretName': 'secret-name', 'keyToEnv': [{ 'secretKey': 'password', 'envVar': 'SECRET_VAR' }] }] } } } } } } def test_mount_preserves_secret_as_vol(self): # checks that mount_pvc respects previously set secrets @dsl.pipeline def my_pipeline(): task = comp() kubernetes.use_secret_as_volume( task, secret_name='secret-name', mount_path='secretpath', ) kubernetes.mount_pvc( task, pvc_name='pvc-name', mount_path='path', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'pvcMount': [{ 'constant': 'pvc-name', 'mountPath': 'path' }], 'secretAsVolume': [{ 'secretName': 'secret-name', 'mountPath': 'secretpath', 'optional': False }] } } } } } } def test_illegal_pvc_name(self): @dsl.component def identity(string: str) -> str: return string with pytest.raises( ValueError, match=r'Argument for \'pvc_name\' must be an instance of str or PipelineChannel\. Got unknown input type: <class \'int\'>\.', ): @dsl.pipeline def my_pipeline(string: str = 'string'): op1 = kubernetes.mount_pvc( identity(string=string), pvc_name=1, mount_path='/path', ) class TestGenericEphemeralVolume: def test_mount_one(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_ephemeral_volume( task, volume_name='pvc-name', mount_path='path', access_modes=['ReadWriteOnce'], size='5Gi', ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'genericEphemeralVolume': [{ 'volumeName': 'pvc-name', 'mountPath': 'path', 'accessModes': ['ReadWriteOnce'], 'defaultStorageClass': True, 'size': '5Gi', }] } } } } } } def test_mount_two(self): @dsl.pipeline def my_pipeline(): task = comp() kubernetes.add_ephemeral_volume( task, volume_name='pvc-name', mount_path='path1', access_modes=['ReadWriteOnce'], size='5Gi', ) kubernetes.add_ephemeral_volume( task, volume_name='other-pvc-name', mount_path='path2', access_modes=['ReadWriteMany'], size='10Ti', storage_class_name='gp2', labels={ 'label1': 'l1', }, annotations={ 'annotation1': 'a1', } ) assert json_format.MessageToDict(my_pipeline.platform_spec) == { 'platforms': { 'kubernetes': { 'deploymentSpec': { 'executors': { 'exec-comp': { 'genericEphemeralVolume': [ { 'volumeName': 'pvc-name', 'mountPath': 'path1', 'accessModes': ['ReadWriteOnce'], 'defaultStorageClass': True, 'size': '5Gi', }, { 'volumeName': 'other-pvc-name', 'mountPath': 'path2', 'accessModes': ['ReadWriteMany'], 'size': '10Ti', 'storageClassName': 'gp2', 'metadata': { 'labels': {'label1': 'l1'}, 'annotations': {'annotation1': 'a1'}, }, }, ] } } } } } } @dsl.component def comp(): pass
444
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/secret.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import Dict from google.protobuf import json_format from kfp.dsl import PipelineTask from kfp.kubernetes import common from kfp.kubernetes import kubernetes_executor_config_pb2 as pb def use_secret_as_env( task: PipelineTask, secret_name: str, secret_key_to_env: Dict[str, str], ) -> PipelineTask: """Use a Kubernetes Secret as an environment variable as described by the `Kubernetes documentation https://kubernetes.io/docs/concepts/configuration/secret/#using-secrets-as-environment-variables `_. Args: task: Pipeline task. secret_name: Name of the Secret. secret_key_to_env: Dictionary of Secret data key to environment variable name. For example, ``{'password': 'PASSWORD'}`` sets the data of the Secret's password field to the environment variable ``PASSWORD``. Returns: Task object with updated secret configuration. """ msg = common.get_existing_kubernetes_config_as_message(task) key_to_env = [ pb.SecretAsEnv.SecretKeyToEnvMap( secret_key=secret_key, env_var=env_var, ) for secret_key, env_var in secret_key_to_env.items() ] secret_as_env = pb.SecretAsEnv( secret_name=secret_name, key_to_env=key_to_env, ) msg.secret_as_env.append(secret_as_env) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task def use_secret_as_volume( task: PipelineTask, secret_name: str, mount_path: str, optional: bool = False, ) -> PipelineTask: """Use a Kubernetes Secret by mounting its data to the task's container as described by the `Kubernetes documentation <https://kubernetes.io/docs/concepts/configuration/secret/#using-secrets-as-files-from-a-pod>`_. Args: task: Pipeline task. secret_name: Name of the Secret. mount_path: Path to which to mount the Secret data. optional: Optional field specifying whether the Secret must be defined. Returns: Task object with updated secret configuration. """ msg = common.get_existing_kubernetes_config_as_message(task) secret_as_vol = pb.SecretAsVolume( secret_name=secret_name, mount_path=mount_path, optional=optional, ) msg.secret_as_volume.append(secret_as_vol) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task
445
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/empty_dir.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import Optional from google.protobuf import json_format from kfp.dsl import PipelineTask from kfp.kubernetes import common from kfp.kubernetes import kubernetes_executor_config_pb2 as pb def empty_dir_mount( task: PipelineTask, volume_name: str, mount_path: str, medium: Optional[str] = None, size_limit: Optional[str] = None, ) -> PipelineTask: """Mount an EmptyDir volume to the task's container. Args: task: Pipeline task. volume_name: Name of the EmptyDir volume. mount_path: Path within the container at which the EmptyDir should be mounted. medium: Storage medium to back the EmptyDir. Must be one of `Memory` or `HugePages`. Defaults to `None`. size_limit: Maximum size of the EmptyDir. For example, `5Gi`. Defaults to `None`. Returns: Task object with updated EmptyDir mount configuration. """ msg = common.get_existing_kubernetes_config_as_message(task) empty_dir_mount = pb.EmptyDirMount( volume_name=volume_name, mount_path=mount_path, medium=medium, size_limit=size_limit, ) msg.empty_dir_mounts.append(empty_dir_mount) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task
446
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/image.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import List from google.protobuf import json_format from kfp.dsl import PipelineTask from kfp.kubernetes import common from kfp.kubernetes import kubernetes_executor_config_pb2 as pb def set_image_pull_secrets( task: PipelineTask, secret_names: List[str], ) -> PipelineTask: """Set image pull secrets for a Kubernetes task. Args: task: Pipeline task. secret_names: List of image pull secret names. Returns: Task object with updated image pull secret configuration. """ msg = common.get_existing_kubernetes_config_as_message(task) # Assuming secret_names is a list of strings image_pull_secret = [ pb.ImagePullSecret(secret_name=secret_name) for secret_name in secret_names ] msg.image_pull_secret.extend(image_pull_secret) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task def set_image_pull_policy(task: PipelineTask, policy: str) -> PipelineTask: """Set image pull policy for the container. Args: task: Pipeline task. policy: One of `Always`, `Never`, `IfNotPresent`. Returns: Task object with an added ImagePullPolicy specification. """ if policy not in ['Always', 'Never', 'IfNotPresent']: raise ValueError( 'Invalid imagePullPolicy. Must be one of `Always`, `Never`, `IfNotPresent`.' ) msg = common.get_existing_kubernetes_config_as_message(task) msg.image_pull_policy = policy task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task
447
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/__init__.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '1.3.0' __all__ = [ 'add_ephemeral_volume', 'add_node_selector', 'add_pod_annotation', 'add_pod_label', 'add_toleration', 'CreatePVC', 'DeletePVC', 'empty_dir_mount', 'mount_pvc', 'set_image_pull_policy', 'use_field_path_as_env', 'set_image_pull_secrets', 'set_timeout', 'use_config_map_as_env', 'use_config_map_as_volume', 'use_secret_as_env', 'use_secret_as_volume', ] from kfp.kubernetes.config_map import use_config_map_as_env from kfp.kubernetes.config_map import use_config_map_as_volume from kfp.kubernetes.empty_dir import empty_dir_mount from kfp.kubernetes.field import use_field_path_as_env from kfp.kubernetes.image import set_image_pull_policy from kfp.kubernetes.image import set_image_pull_secrets from kfp.kubernetes.node_selector import add_node_selector from kfp.kubernetes.pod_metadata import add_pod_annotation from kfp.kubernetes.pod_metadata import add_pod_label from kfp.kubernetes.secret import use_secret_as_env from kfp.kubernetes.secret import use_secret_as_volume from kfp.kubernetes.timeout import set_timeout from kfp.kubernetes.toleration import add_toleration from kfp.kubernetes.volume import add_ephemeral_volume from kfp.kubernetes.volume import CreatePVC from kfp.kubernetes.volume import DeletePVC from kfp.kubernetes.volume import mount_pvc
448
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/timeout.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp.dsl import PipelineTask from kfp.kubernetes import common def set_timeout( task: PipelineTask, seconds: int, ) -> PipelineTask: """Add timeout to the task Pod's `active_deadline_seconds <https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.26/#podspec-v1-core>`_. Timeout an integer greater than 0, corresponding to the podspec active_deadline_seconds <https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.26/#podspec-v1-core`_ field. Integer 0 means removing the timeout fields from previous functions. Args: task: Pipeline task. seconds: Value of the active_deadline_seconds. Returns: Task object with an updated active_deadline_seconds. """ msg = common.get_existing_kubernetes_config_as_message(task) if seconds >= 0: msg.active_deadline_seconds = seconds else: raise ValueError( f'Argument for "seconds" must be an integer greater or equals to 0. Got invalid input: {seconds}. ' ) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task
449
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/field.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp.dsl import PipelineTask from kfp.kubernetes import common from kfp.kubernetes import kubernetes_executor_config_pb2 as pb def use_field_path_as_env( task: PipelineTask, env_name: str, field_path: str, ) -> PipelineTask: """Use a Kubernetes Field Path as an environment variable as described in https://kubernetes.io/docs/tasks/inject-data-application/environment-variable-expose-pod-information Args: task: Pipeline task. env_name: Name of the enviornment variable. field_path: Kubernetes field path to expose as the enviornment variable. Returns: Task object with updated field path as the enviornment variable. """ msg = common.get_existing_kubernetes_config_as_message(task) field_path_as_env = pb.FieldPathAsEnv( name=env_name, field_path=field_path, ) msg.field_path_as_env.append(field_path_as_env) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task
450
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/pod_metadata.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp.dsl import PipelineTask from kfp.kubernetes import common def add_pod_label( task: PipelineTask, label_key: str, label_value: str, ) -> PipelineTask: """Add a label to the task Pod's `metadata <https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#Pod>`_. Each label is a key-value pair, corresponding to the metadata's `ObjectMeta <https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/object-meta/#ObjectMeta`_ field. Args: task: Pipeline task. label_key: Key of the metadata label. label_value: Value of the metadata label. Returns: Task object with an added metadata label. """ msg = common.get_existing_kubernetes_config_as_message(task) msg.pod_metadata.labels.update({label_key: label_value}) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task def add_pod_annotation( task: PipelineTask, annotation_key: str, annotation_value: str, ) -> PipelineTask: """Add an annotation to the task Pod's `metadata <https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#Pod>`_. Each annotation is a key-value pair, corresponding to the metadata's `ObjectMeta <https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/object-meta/#ObjectMeta`_ field. Args: task: Pipeline task. annotation_key: Key of the metadata annotation. annotation_value: Value of the metadata annotation. Returns: Task object with an added metadata annotation. """ msg = common.get_existing_kubernetes_config_as_message(task) msg.pod_metadata.annotations.update({annotation_key: annotation_value}) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task
451
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/node_selector.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp.dsl import PipelineTask from kfp.kubernetes import common def add_node_selector( task: PipelineTask, label_key: str, label_value: str, ) -> PipelineTask: """Add a constraint to the task Pod's `nodeSelector <https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#nodeselector>`_. Each constraint is a key-value pair, corresponding to the PodSpec's `nodeSelector <https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#scheduling>`_ field. For the task's Pod to be eligible to run on a node, the node's labels must satisfy the constraint. Args: task: Pipeline task. label_key: Key of the nodeSelector label. label_value: Value of the nodeSelector label. Returns: Task object with an added nodeSelector constraint. """ msg = common.get_existing_kubernetes_config_as_message(task) msg.node_selector.labels.update({label_key: label_value}) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task
452
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/common.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.protobuf import json_format from kfp.kubernetes import kubernetes_executor_config_pb2 as pb def get_existing_kubernetes_config_as_message( task: 'PipelineTask') -> pb.KubernetesExecutorConfig: cur_k8_config_dict = task.platform_config.get('kubernetes', {}) k8_config_msg = pb.KubernetesExecutorConfig() return json_format.ParseDict(cur_k8_config_dict, k8_config_msg)
453
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/config_map.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import Dict from google.protobuf import json_format from kfp.dsl import PipelineTask from kfp.kubernetes import common from kfp.kubernetes import kubernetes_executor_config_pb2 as pb def use_config_map_as_env( task: PipelineTask, config_map_name: str, config_map_key_to_env: Dict[str, str], ) -> PipelineTask: """Use a Kubernetes ConfigMap as an environment variable as described by the `Kubernetes documentation https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#define-container-environment-variables-using-configmap-data` _. Args: task: Pipeline task. config_map_name: Name of the ConfigMap. config_map_key_to_env: Dictionary of ConfigMap key to environment variable name. For example, ``{'foo': 'FOO'}`` sets the value of the ConfigMap's foo field to the environment variable ``FOO``. Returns: Task object with updated ConfigMap configuration. """ msg = common.get_existing_kubernetes_config_as_message(task) key_to_env = [ pb.ConfigMapAsEnv.ConfigMapKeyToEnvMap( config_map_key=config_map_key, env_var=env_var, ) for config_map_key, env_var in config_map_key_to_env.items() ] config_map_as_env = pb.ConfigMapAsEnv( config_map_name=config_map_name, key_to_env=key_to_env, ) msg.config_map_as_env.append(config_map_as_env) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task def use_config_map_as_volume( task: PipelineTask, config_map_name: str, mount_path: str, optional: bool = False, ) -> PipelineTask: """Use a Kubernetes ConfigMap by mounting its data to the task's container as described by the `Kubernetes documentation <https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#add-configmap-data-to-a-volume>`_. Args: task: Pipeline task. config_map_name: Name of the ConfigMap. mount_path: Path to which to mount the ConfigMap data. optional: Optional field specifying whether the ConfigMap must be defined. Returns: Task object with updated ConfigMap configuration. """ msg = common.get_existing_kubernetes_config_as_message(task) config_map_as_vol = pb.ConfigMapAsVolume( config_map_name=config_map_name, mount_path=mount_path, optional=optional, ) msg.config_map_as_volume.append(config_map_as_vol) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task
454
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/toleration.py
# Copyright 2024 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import Optional from google.protobuf import json_format from kfp.dsl import PipelineTask from kfp.kubernetes import common from kfp.kubernetes import kubernetes_executor_config_pb2 as pb try: from typing import Literal except ImportError: from typing_extensions import Literal def add_toleration( task: PipelineTask, key: Optional[str] = None, operator: Optional[Literal["Equal", "Exists"]] = None, value: Optional[str] = None, effect: Optional[Literal["NoExecute", "NoSchedule", "PreferNoSchedule"]] = None, toleration_seconds: Optional[int] = None, ): """Add a `toleration<https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/>`_. to a task. Args: task: Pipeline task. key: key is the taint key that the toleration applies to. Empty means match all taint keys. If the key is empty, operator must be Exists; this combination means to match all values and all keys. operator: operator represents a key's relationship to the value. Valid operators are Exists and Equal. Defaults to Equal. Exists is equivalent to wildcard for value, so that a pod can tolerate all taints of a particular category. value: value is the taint value the toleration matches to. If the operator is Exists, the value should be empty, otherwise just a regular string. effect: effect indicates the taint effect to match. Empty means match all taint effects. When specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute. toleration_seconds: toleration_seconds represents the period of time the toleration (which must be of effect NoExecute, otherwise this field is ignored) tolerates the taint. By default, it is not set, which means tolerate the taint forever (do not evict). Zero and negative values will be treated as 0 (evict immediately) by the system. Returns: Task object with added toleration. """ msg = common.get_existing_kubernetes_config_as_message(task) msg.tolerations.append( pb.Toleration( key=key, operator=operator, value=value, effect=effect, toleration_seconds=toleration_seconds, ) ) task.platform_config["kubernetes"] = json_format.MessageToDict(msg) return task
455
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp
kubeflow_public_repos/pipelines/kubernetes_platform/python/kfp/kubernetes/volume.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import Dict, List, Optional, Union from google.protobuf import json_format from google.protobuf import message from kfp import dsl from kfp.dsl import PipelineTask from kfp.kubernetes import common from kfp.kubernetes import kubernetes_executor_config_pb2 as pb @dsl.container_component def CreatePVC( name: dsl.OutputPath(str), access_modes: List[str], size: str, pvc_name: Optional[str] = None, pvc_name_suffix: Optional[str] = None, storage_class_name: Optional[str] = '', volume_name: Optional[str] = None, annotations: Optional[Dict[str, str]] = None, ): """Create a PersistentVolumeClaim, which can be used by downstream tasks. See `PersistentVolume <https://kubernetes.io/docs/concepts/storage/persistent-volumes/#persistent-volumes>`_ and `PersistentVolumeClaim <https://kubernetes.io/docs/concepts/storage/persistent-volumes/#persistentvolumeclaims>`_ documentation for more information about the component input parameters. Args: access_modes: AccessModes to request for the provisioned PVC. May be one or more of ``'ReadWriteOnce'``, ``'ReadOnlyMany'``, ``'ReadWriteMany'``, or ``'ReadWriteOncePod'``. Corresponds to `PersistentVolumeClaim.spec.accessModes <https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes>`_. size: The size of storage requested by the PVC that will be provisioned. For example, ``'5Gi'``. Corresponds to `PersistentVolumeClaim.spec.resources.requests.storage <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec>`_. pvc_name: Name of the PVC. Corresponds to `PersistentVolumeClaim.metadata.name <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim>`_. Only one of ``pvc_name`` and ``pvc_name_suffix`` can be provided. pvc_name_suffix: Prefix to use for a dynamically generated name, which will take the form ``<argo-workflow-name>-<pvc_name_suffix>``. Only one of ``pvc_name`` and ``pvc_name_suffix`` can be provided. storage_class_name: Name of StorageClass from which to provision the PV to back the PVC. ``None`` indicates to use the cluster's default storage_class_name. Set to ``''`` for a statically specified PVC. volume_name: Pre-existing PersistentVolume that should back the provisioned PersistentVolumeClaim. Used for statically specified PV only. Corresponds to `PersistentVolumeClaim.spec.volumeName <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec>`_. annotations: Annotations for the PVC's metadata. Corresponds to `PersistentVolumeClaim.metadata.annotations <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaim>`_. Returns: ``name: str`` \n\t\t\tName of the generated PVC. """ return dsl.ContainerSpec(image='argostub/createpvc') def mount_pvc( task: PipelineTask, pvc_name: Union[str, 'PipelineChannel'], mount_path: str, ) -> PipelineTask: """Mount a PersistentVolumeClaim to the task's container. Args: task: Pipeline task. pvc_name: Name of the PVC to mount. Supports passing a runtime-generated name, such as a name provided by ``kubernetes.CreatePvcOp().outputs['name']``. mount_path: Path to which the PVC should be mounted as a volume. Returns: Task object with updated PVC mount configuration. """ msg = common.get_existing_kubernetes_config_as_message(task) pvc_mount = pb.PvcMount(mount_path=mount_path) pvc_name_from_task = _assign_pvc_name_to_msg(pvc_mount, pvc_name) if pvc_name_from_task: task.after(pvc_name.task) msg.pvc_mount.append(pvc_mount) task.platform_config['kubernetes'] = json_format.MessageToDict(msg) return task @dsl.container_component def DeletePVC(pvc_name: str): """Delete a PersistentVolumeClaim. Args: pvc_name: Name of the PVC to delete. Supports passing a runtime-generated name, such as a name provided by ``kubernetes.CreatePvcOp().outputs['name']``. """ return dsl.ContainerSpec(image='argostub/deletepvc') def _assign_pvc_name_to_msg( msg: message.Message, pvc_name: Union[str, 'PipelineChannel'], ) -> bool: """Assigns pvc_name to the msg's pvc_reference oneof. Returns True if pvc_name is an upstream task output. Else, returns False.""" if isinstance(pvc_name, str): msg.constant = pvc_name return False elif hasattr(pvc_name, 'task_name'): if pvc_name.task_name is None: msg.component_input_parameter = pvc_name.name return False else: msg.task_output_parameter.producer_task = pvc_name.task_name msg.task_output_parameter.output_parameter_key = pvc_name.name return True else: raise ValueError( f'Argument for {"pvc_name"!r} must be an instance of str or PipelineChannel. Got unknown input type: {type(pvc_name)!r}. ' ) def add_ephemeral_volume( task: PipelineTask, volume_name: str, mount_path: str, access_modes: List[str], size: str, storage_class_name: Optional[str] = None, labels: Dict[str, str] = None, annotations: Dict[str, str] = None, ): """Add a `generic ephemeral volume <https://kubernetes.io/docs/concepts/storage/ephemeral-volumes/#generic-ephemeral-volumes>`_. to a task. Args: task: Pipeline task. volume_name: name to be given to the created ephemeral volume. Corresponds to Pod.spec.volumes[*].name mount_path: local path in the main container where the PVC should be mounted as a volume access_modes: AccessModes to request for the provisioned PVC. May be one or more of ``'ReadWriteOnce'``, ``'ReadOnlyMany'``, ``'ReadWriteMany'``, or``'ReadWriteOncePod'``. Corresponds to `Pod.spec.volumes[*].ephemeral.volumeClaimTemplate.spec.accessModes <https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes>`_. size: The size of storage requested by the PVC that will be provisioned. For example, ``'5Gi'``. Corresponds to `Pod.spec.volumes[*].ephemeral.volumeClaimTemplate.spec.resources.requests.storage <https://kubernetes.io/docs/reference/kubernetes-api/config-and-storage-resources/persistent-volume-claim-v1/#PersistentVolumeClaimSpec>`_. storage_class_name: Name of StorageClass from which to provision the PV to back the PVC. ``None`` indicates to use the cluster's default storage_class_name. labels: The labels to attach to the created PVC. Corresponds to `Pod.spec.volumes[*].ephemeral.volumeClaimTemplate.metadata.labels annotations: The annotation to attach to the created PVC. Corresponds to `Pod.spec.volumes[*].ephemeral.volumeClaimTemplate.metadata.annotations Returns: Task object with added toleration. """ msg = common.get_existing_kubernetes_config_as_message(task) msg.generic_ephemeral_volume.append( pb.GenericEphemeralVolume( volume_name=volume_name, mount_path=mount_path, access_modes=access_modes, size=size, default_storage_class=storage_class_name is None, storage_class_name=storage_class_name, metadata=pb.PodMetadata( annotations=annotations or {}, labels=labels or {}, ) if annotations or labels else None, ) ) task.platform_config["kubernetes"] = json_format.MessageToDict(msg) return task
456
0
kubeflow_public_repos/pipelines/kubernetes_platform/python
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs/conf.py
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/master/config import re from kfp import dsl # preserve function docstrings for components by setting component decorators to passthrough decorators # also enables autodoc to document the components as functions without using the autodata directive (https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#directive-autodata) def container_component_decorator(func): return func def component_decorator(*args, **kwargs): def decorator(func): return func return decorator dsl.component = component_decorator dsl.container_component = container_component_decorator # -- Project information ----------------------------------------------------- project = 'Kubeflow Pipelines' copyright = '2023, The Kubeflow Authors' author = 'The Kubeflow Authors' # The short X.Y version version = '' # The full version, including alpha/beta/rc tags release = '' # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', 'sphinx_click', 'm2r2', 'sphinx_immaterial', 'autodocsumm', ] autodoc_member_order = 'bysource' autodoc_default_options = { 'members': True, 'imported-members': True, 'undoc-members': True, 'show-inheritance': False, 'autosummary': True, } html_theme = 'sphinx_immaterial' html_title = 'KFP SDK kfp-kubernetes API Reference' html_static_path = ['_static'] html_css_files = ['custom.css'] html_logo = '_static/kubeflow.png' html_favicon = '_static/favicon.ico' html_theme_options = { 'icon': { 'repo': 'fontawesome/brands/github', }, 'repo_url': 'https://github.com/kubeflow/pipelines/tree/master/kubernetes_platform', 'repo_name': 'pipelines', 'repo_type': 'github', 'edit_uri': 'blob/master/docs', 'globaltoc_collapse': False, 'features': [ 'navigation.expand', # "navigation.tabs", # "toc.integrate", 'navigation.sections', # "navigation.instant", # "header.autohide", 'navigation.top', # "navigation.tracking", 'search.highlight', 'search.share', 'toc.follow', 'toc.sticky', ], 'palette': [ { 'media': '(prefers-color-scheme: dark)', 'scheme': 'slate', 'primary': 'kfpblue', # "accent": "lime", 'toggle': { 'icon': 'material/lightbulb', 'name': 'Switch to light mode', }, }, { 'media': '(prefers-color-scheme: light)', 'scheme': 'default', 'primary': 'kfpblue', # "accent": "light-blue", 'toggle': { 'icon': 'material/lightbulb-outline', 'name': 'Switch to dark mode', }, }, ], 'font': { 'text': 'Open Sans' }, 'version_dropdown': True, 'version_info': [ { 'version': 'https://kfp-kubernetes.readthedocs.io/en/kfp-kubernetes-1.3.0/', 'title': '1.3.0', 'aliases': ['stable'], }, { 'version': 'https://kfp-kubernetes.readthedocs.io/en/kfp-kubernetes-1.2.0/', 'title': '1.2.0', 'aliases': [], }, { 'version': 'https://kfp-kubernetes.readthedocs.io/en/kfp-kubernetes-1.1.0/', 'title': '1.1.0', 'aliases': [], }, { 'version': 'https://kfp-kubernetes.readthedocs.io/en/kfp-kubernetes-1.0.0/', 'title': '1.0.0', 'aliases': [], }, { 'version': 'https://kfp-kubernetes.readthedocs.io/en/kfp-kubernetes-0.0.1/', 'title': '0.0.1', 'aliases': [], }, ], # "toc_title_is_page_title": True, } # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: source_suffix = '.rst' # The master toctree document. master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = 'KfpKubernetesdoc' # TODO: align with GCPC representation of components (in particular, OutputPath and Output[]) def strip_outputs_from_signature(app, what, name, obj, options, signature, return_annotation): if signature is not None: signature = re.sub( r'[0-9a-zA-Z]+: <kfp\.components\.types\.type_annotations\.OutputPath object at 0x[0-9a-fA-F]+>?,?\s', '', signature) return signature, return_annotation def setup(app): app.connect('autodoc-process-signature', strip_outputs_from_signature)
457
0
kubeflow_public_repos/pipelines/kubernetes_platform/python
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs/Makefile
# Copyright 2023 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
458
0
kubeflow_public_repos/pipelines/kubernetes_platform/python
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs/requirements.txt
autodocsumm==0.2.9 sphinx==5.0.2 sphinx-click==4.3.0 sphinx-immaterial==0.9.0 sphinx-rtd-theme==1.0.0 m2r2==0.3.2
459
0
kubeflow_public_repos/pipelines/kubernetes_platform/python
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs/make.bat
REM Copyright 2019 The Kubeflow Authors REM REM Licensed under the Apache License, Version 2.0 (the "License"); REM you may not use this file except in compliance with the License. REM You may obtain a copy of the License at REM REM http://www.apache.org/licenses/LICENSE-2.0 REM REM Unless required by applicable law or agreed to in writing, software REM distributed under the License is distributed on an "AS IS" BASIS, REM WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. REM See the License for the specific language governing permissions and REM limitations under the License. @ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% :end popd
460
0
kubeflow_public_repos/pipelines/kubernetes_platform/python
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs/.readthedocs.yml
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details version: 2 sphinx: configuration: kubernetes_platform/python/docs/conf.py build: os: ubuntu-22.04 tools: python: "3.9" python: install: - requirements: kubernetes_platform/python/docs/requirements.txt - method: pip path: kubernetes_platform/python/
461
0
kubeflow_public_repos/pipelines/kubernetes_platform/python
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs/index.rst
.. mdinclude:: ../README.md .. toctree:: :caption: Contents :hidden: Home <self> API Reference <source/api> Source Code <https://github.com/kubeflow/pipelines/tree/master/kubernetes_platform/python>
462
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs/source/api.rst
API Reference ========================== .. toctree:: :maxdepth: 1 kubernetes
463
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs/source/kubernetes.rst
kfp.kubernetes ========================== .. automodule:: kfp.kubernetes
464
0
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs
kubeflow_public_repos/pipelines/kubernetes_platform/python/docs/_static/custom.css
[data-md-color-primary=kfpblue]{--md-primary-fg-color:#4279f4;--md-primary-fg-color--light:##4279f4;--md-primary-fg-color--dark:##4279f4;--md-primary-bg-color:#fff;--md-primary-bg-color--light:hsla(0,0%,100%,.7)}
465
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/docs/conf.py
# Copyright 2019-2022 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/master/config import functools import os import sys from typing import List, Optional import sphinx from sphinx import application # noqa # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../sdk/python')) # -- Project information ----------------------------------------------------- project = 'Kubeflow Pipelines' copyright = '2022, The Kubeflow Authors' author = 'The Kubeflow Authors' # The short X.Y version version = '' # The full version, including alpha/beta/rc tags release = '' # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', 'sphinx_click', 'm2r2', 'sphinx_immaterial', 'autodocsumm', ] autodoc_member_order = 'bysource' autodoc_default_options = { 'members': True, 'imported-members': True, 'undoc-members': True, 'show-inheritance': False, 'autosummary': True, } html_theme = 'sphinx_immaterial' html_title = 'KFP SDK API Reference' html_static_path = ['_static'] html_css_files = ['custom.css'] html_logo = '_static/kubeflow.png' html_favicon = '_static/favicon.ico' html_theme_options = { 'icon': { 'repo': 'fontawesome/brands/github', }, 'repo_url': 'https://github.com/kubeflow/pipelines/', 'repo_name': 'pipelines', 'repo_type': 'github', 'edit_uri': 'blob/master/docs', 'globaltoc_collapse': False, 'features': [ 'navigation.expand', # "navigation.tabs", # "toc.integrate", 'navigation.sections', # "navigation.instant", # "header.autohide", 'navigation.top', # "navigation.tracking", 'search.highlight', 'search.share', 'toc.follow', 'toc.sticky', ], 'palette': [ { 'media': '(prefers-color-scheme: dark)', 'scheme': 'slate', 'primary': 'kfpblue', # "accent": "lime", 'toggle': { 'icon': 'material/lightbulb', 'name': 'Switch to light mode', }, }, { 'media': '(prefers-color-scheme: light)', 'scheme': 'default', 'primary': 'kfpblue', # "accent": "light-blue", 'toggle': { 'icon': 'material/lightbulb-outline', 'name': 'Switch to dark mode', }, }, ], 'font': { 'text': 'Open Sans' }, 'version_dropdown': True, 'version_json': 'https://raw.githubusercontent.com/kubeflow/pipelines/master/docs/versions.json', # "toc_title_is_page_title": True, } # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: source_suffix = '.rst' # The master toctree document. master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = 'KubeflowPipelinesdoc' # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'KubeflowPipelines.tex', 'Kubeflow Pipelines Documentation', 'Google', 'manual'), ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, 'kubeflowpipelines', 'Kubeflow Pipelines Documentation', [author], 1)] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'KubeflowPipelines', 'Kubeflow Pipelines Documentation', author, 'KubeflowPipelines', 'Kubeflow Pipelines is a platform for building and deploying portable, scalable machine learning workflows based on Docker containers within the Kubeflow project.', ''), ] # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] # # -- Extension configuration ------------------------------------------------- readme_path = os.path.join( os.path.abspath(os.path.dirname(os.path.dirname(__file__))), 'sdk', 'python', 'README.md') def trim_header_of_readme(path: str) -> List[str]: with open(path, 'r') as f: contents = f.readlines() with open(path, 'w') as f: f.writelines(contents[1:]) return contents def re_attach_header_of_readme(path: str, contents: List[str]) -> None: with open(path, 'w') as f: f.writelines(contents) original_readme_contents = trim_header_of_readme(readme_path) re_attach_header_of_readme_closure = functools.partial( re_attach_header_of_readme, path=readme_path, contents=original_readme_contents) def re_attach_header_of_readme_hook(app: sphinx.application.Sphinx, exception: Optional[Exception]) -> None: re_attach_header_of_readme_closure() def setup(app: sphinx.application.Sphinx) -> None: app.connect('build-finished', re_attach_header_of_readme_hook)
466
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/docs/Makefile
# Copyright 2019 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
467
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/docs/requirements.txt
autodocsumm==0.2.9 sdk/python sphinx==5.0.2 sphinx-click==4.3.0 sphinx-immaterial==0.9.0 sphinx-rtd-theme==1.0.0 m2r2==0.3.2
468
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/docs/make.bat
REM Copyright 2019 The Kubeflow Authors REM REM Licensed under the Apache License, Version 2.0 (the "License"); REM you may not use this file except in compliance with the License. REM You may obtain a copy of the License at REM REM http://www.apache.org/licenses/LICENSE-2.0 REM REM Unless required by applicable law or agreed to in writing, software REM distributed under the License is distributed on an "AS IS" BASIS, REM WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. REM See the License for the specific language governing permissions and REM limitations under the License. @ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% :end popd
469
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/docs/OWNERS
approvers: - chensun - connor-mccarthy reviewers: - chensun - connor-mccarthy
470
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/docs/versions.json
[ { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.9.0/", "title": "2.9.0", "aliases": [ "stable", "latest" ] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.8.0/", "title": "2.8.0", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.7.0/", "title": "2.7.0", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.6.0/", "title": "2.6.0", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.5.0/", "title": "2.5.0", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.4.0/", "title": "2.4.0", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.3.0/", "title": "2.3.0", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.2.0/", "title": "2.2.0", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.0.1/", "title": "2.0.1", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.0.0/", "title": "2.0.0", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.0.0-rc.2/", "title": "v2.0.0rc2", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/sdk-2.0.0-rc.1/", "title": "v2.0.0rc1", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b17/", "title": "v2.0.0b17", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b16/", "title": "v2.0.0b16", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b15/", "title": "v2.0.0b15", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b14/", "title": "v2.0.0b14", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b13/", "title": "v2.0.0b13", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b12/", "title": "v2.0.0b12", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b11/", "title": "v2.0.0b11", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b9/", "title": "v2.0.0b9", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b8/", "title": "v2.0.0b8", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b6/", "title": "v2.0.0b6", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b5/", "title": "v2.0.0b5", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/2.0.0b4/", "title": "v2.0.0b4", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/1.8.22/", "title": "v1.8.22", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/1.8.21/", "title": "v1.8.21", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/1.8.20/", "title": "v1.8.20", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/1.8.19/", "title": "v1.8.19", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/1.8.18/", "title": "v1.8.18", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/1.8.17/", "title": "v1.8.17", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/1.8.16/", "title": "v1.8.16", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/1.8.15/", "title": "v1.8.15", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/1.8.14/", "title": "v1.8.14", "aliases": [] }, { "version": "https://kubeflow-pipelines.readthedocs.io/en/1.8.13/", "title": "v1.8.13", "aliases": [] } ]
471
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/docs/build_docs_locally.sh
#!/bin/bash # Copyright 2023 Kubeflow Pipelines contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # install requirements pushd .. pip install -r docs/requirements.txt popd # build docs make clean html # serve docs pushd _build/html python3 -m http.server popd
472
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/docs/index.rst
Kubeflow Pipelines SDK API Reference ==================================== .. mdinclude:: ../sdk/python/README.md .. toctree:: :caption: Contents :hidden: Home <self> API Reference <source/kfp> Command Line Interface <source/cli> Usage Docs (kubeflow.org) <https://kubeflow.org/docs/pipelines/> Source Code <https://github.com/kubeflow/pipelines/>
473
0
kubeflow_public_repos/pipelines/docs
kubeflow_public_repos/pipelines/docs/source/components.rst
kfp.components ========================== .. automodule:: kfp.components
474
0
kubeflow_public_repos/pipelines/docs
kubeflow_public_repos/pipelines/docs/source/client.rst
kfp.client ========================== .. automodule:: kfp.client
475
0
kubeflow_public_repos/pipelines/docs
kubeflow_public_repos/pipelines/docs/source/local.rst
kfp.local ========================== .. automodule:: kfp.local
476
0
kubeflow_public_repos/pipelines/docs
kubeflow_public_repos/pipelines/docs/source/dsl.rst
kfp.dsl ========================== .. automodule:: kfp.dsl
477
0
kubeflow_public_repos/pipelines/docs
kubeflow_public_repos/pipelines/docs/source/registry.rst
kfp.registry ========================== .. automodule:: kfp.registry
478
0
kubeflow_public_repos/pipelines/docs
kubeflow_public_repos/pipelines/docs/source/compiler.rst
kfp.compiler ========================== .. glossary:: .. automodule:: kfp.compiler
479
0
kubeflow_public_repos/pipelines/docs
kubeflow_public_repos/pipelines/docs/source/cli.rst
Command Line Interface ========================== .. contents:: All commands :depth: 4 :backlinks: none .. click:: kfp.cli.cli:cli :prog: kfp :nested: full
480
0
kubeflow_public_repos/pipelines/docs
kubeflow_public_repos/pipelines/docs/source/kfp.rst
API Reference ========================== .. toctree:: :maxdepth: 1 dsl compiler components client registry local
481
0
kubeflow_public_repos/pipelines/docs
kubeflow_public_repos/pipelines/docs/_static/custom.css
[data-md-color-primary=kfpblue]{--md-primary-fg-color:#4279f4;--md-primary-fg-color--light:##4279f4;--md-primary-fg-color--dark:##4279f4;--md-primary-bg-color:#fff;--md-primary-bg-color--light:hsla(0,0%,100%,.7)}
482
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/proxy/attempt-register-vm-on-proxy.sh
#!/bin/bash # # Copyright 2019 The Kubeflow Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -ex DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" function run-proxy-agent { # Start the proxy process # https://github.com/google/inverting-proxy/blob/master/agent/Dockerfile # Connect proxy agent to Kubeflow Pipelines UI /opt/bin/proxy-forwarding-agent \ --debug=${DEBUG} \ --proxy=${PROXY_URL}/ \ --proxy-timeout=${PROXY_TIMEOUT} \ --backend=${BACKEND_ID} \ --host=${ML_PIPELINE_UI_SERVICE_HOST}:${ML_PIPELINE_UI_SERVICE_PORT} \ --shim-websockets=true \ --shim-path=websocket-shim \ --health-check-path=${HEALTH_CHECK_PATH} \ --health-check-interval-seconds=${HEALTH_CHECK_INTERVAL_SECONDS} \ --health-check-unhealthy-threshold=${HEALTH_CHECK_UNHEALTHY_THRESHOLD} } # Check if already has Hostname value. # It's possible the pod got restarted, in such case we continue use the existing # hostname. In proxy server side, it doesn't check VM name even pod got moved to # new VM. HOSTNAME=$(kubectl get configmap inverse-proxy-config -o json | jq -r ".data.Hostname // empty") if [[ -n "${HOSTNAME}" ]]; then echo "Reuse existing hostname" PROXY_URL=$(kubectl get configmap inverse-proxy-config -o json | jq -r ".data.ProxyUrl") BACKEND_ID=$(kubectl get configmap inverse-proxy-config -o json | jq -r ".data.BackendId") # If ConfigMap already exist, reuse the existing endpoint (a.k.a BACKEND_ID) and same ProxyUrl. run-proxy-agent exit 0 fi # Activate service account for gcloud SDK first if [[ ! -z "${GOOGLE_APPLICATION_CREDENTIALS}" ]]; then gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" fi INSTANCE_ZONE="/"$(curl http://metadata.google.internal/computeMetadata/v1/instance/zone -H "Metadata-Flavor: Google") INSTANCE_ZONE="${INSTANCE_ZONE##/*/}" # Allow providing PROXY_URL from env, so we can specify staging inverse proxy url. if [[ -z "${PROXY_URL}" ]]; then # Get latest Proxy server URL wget https://storage.googleapis.com/ml-pipeline/proxy-agent-config.json PROXY_URL=$(python3 ${DIR}/get_proxy_url.py --config-file-path "proxy-agent-config.json" --location "${INSTANCE_ZONE}" --version "latest") fi if [[ -z "${PROXY_URL}" ]]; then echo "Proxy URL for the zone ${INSTANCE_ZONE} no found, exiting." exit 1 fi echo "Proxy URL: ${PROXY_URL}" # Register the proxy agent VM_ID=$(curl -H 'Metadata-Flavor: Google' "http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/identity?format=full&audience=${PROXY_URL}/request-service-account-endpoint" 2>/dev/null) RESULT_JSON=$(curl -H "Authorization: Bearer $(gcloud auth print-access-token)" -H "X-Inverting-Proxy-VM-ID: ${VM_ID}" -d "" "${PROXY_URL}/request-service-account-endpoint" 2>/dev/null) echo "Response from the registration server: ${RESULT_JSON}" HOSTNAME=$(echo "${RESULT_JSON}" | jq -r ".hostname") BACKEND_ID=$(echo "${RESULT_JSON}" | jq -r ".backendID") echo "Hostname: ${HOSTNAME}" echo "Backend id: ${BACKEND_ID}" # Store the registration information in a ConfigMap PATCH_TEMP='{"data": {"Hostname":"'${HOSTNAME}'","ProxyUrl":"'${PROXY_URL}'","BackendId":"'${BACKEND_ID}'"}}' PATCH_JSON=$(printf "${PATCH_TEMP}" "${HOSTNAME}" "${PROXY_URL}" "${BACKEND_ID}") echo "PATCH_JSON: ${PATCH_JSON}" kubectl patch configmap/inverse-proxy-config \ --type merge \ --patch "${PATCH_JSON}" # Patch again as catched case that it actually didn't merge successfully but no error. # It looks like a bug of K8s master or configmap is not ready when run here. # Here sleep for 20 seconds and patch it again to mitigate the problem first. sleep 20 kubectl patch configmap/inverse-proxy-config \ --type merge \ --patch "${PATCH_JSON}" echo "Patched configmap/inverse-proxy-config" run-proxy-agent
483
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/proxy/requirements.txt
requests
484
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/proxy/OWNERS
approvers: - IronPan reviewers: - ojarjur
485
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/proxy/Dockerfile
# Copyright 2019 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Pin to a specific version of invert proxy agent FROM gcr.io/inverting-proxy/agent@sha256:694d6c1bf299585b530c923c3728cd2c45083f3b396ec83ff799cef1c9dc7474 # We need --allow-releaseinfo-change, because of https://github.com/kubeflow/pipelines/issues/6311#issuecomment-899224137. RUN apt update --allow-releaseinfo-change && apt-get install -y curl jq python3-pip COPY requirements.txt . RUN python3 -m pip install -r \ requirements.txt --quiet --no-cache-dir \ && rm -f requirements.txt # Install gcloud SDK RUN curl https://dl.google.com/dl/cloudsdk/release/google-cloud-sdk.tar.gz > /tmp/google-cloud-sdk.tar.gz RUN mkdir -p /usr/local/gcloud RUN tar -C /usr/local/gcloud -xf /tmp/google-cloud-sdk.tar.gz RUN /usr/local/gcloud/google-cloud-sdk/install.sh ENV PATH $PATH:/usr/local/gcloud/google-cloud-sdk/bin # Install kubectl RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl RUN chmod +x ./kubectl RUN mv kubectl /usr/local/bin/ ADD ./ /opt/proxy CMD ["/bin/sh", "-c", "/opt/proxy/attempt-register-vm-on-proxy.sh"]
486
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/proxy/get_proxy_url_test.py
#!/usr/bin/env python3 # Copyright 2019 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import unittest from get_proxy_url import urls_for_zone url_map_json = """ { "us": ["https://datalab-us-west1.cloud.google.com"], "us-west1": ["https://datalab-us-west1.cloud.google.com"], "us-west2": ["https://datalab-us-west2.cloud.google.com"], "us-east1": ["https://datalab-us-east1.cloud.google.com"] } """ class TestUrlsForZone(unittest.TestCase): def test_get_urls(self): self.assertEqual([ "https://datalab-us-east1.cloud.google.com", "https://datalab-us-west1.cloud.google.com" ], urls_for_zone("us-east1-a", json.loads(url_map_json))) def test_get_urls_no_match(self): self.assertEqual([], urls_for_zone( "euro-west1-a", json.loads(url_map_json) )) def test_get_urls_incorrect_format(self): with self.assertRaises(ValueError): urls_for_zone("weird-format-a", json.loads(url_map_json)) def test_get_urls_priority(self): self.assertEqual([ "https://datalab-us-west1.cloud.google.com", "https://datalab-us-west2.cloud.google.com" ], urls_for_zone("us-west1-a", json.loads(url_map_json))) if __name__ == '__main__': unittest.main()
487
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/proxy/get_proxy_url.py
#!/usr/bin/env python3 # Copyright 2019 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """CLI tool that returns URL of the proxy for particular zone and version.""" import argparse import json import logging import re import requests try: unicode except NameError: unicode = str def urls_for_zone(zone, location_to_urls_map): """Returns list of potential proxy URLs for a given zone. Returns: List of possible URLs, in order of proximity. Args: zone: GCP zone location_to_urls_map: Maps region/country/continent to list of URLs, e.g.: { "us-west1" : [ us-west1-url ], "us-east1" : [ us-east1-url ], "us" : [ us-west1-url ], ... } """ zone_match = re.match("((([a-z]+)-[a-z]+)\d+)-[a-z]", zone) if not zone_match: raise ValueError("Incorrect zone specified: {}".format(zone)) # e.g. zone = us-west1-b region = zone_match.group(1) # us-west1 approx_region = zone_match.group(2) # us-west country = zone_match.group(3) # us urls = [] if region in location_to_urls_map: urls.extend([ url for url in location_to_urls_map[region] if url not in urls ]) region_regex = re.compile("([a-z]+-[a-z]+)\d+") for location in location_to_urls_map: region_match = region_regex.match(location) if region_match and region_match.group(1) == approx_region: urls.extend([ url for url in location_to_urls_map[location] if url not in urls ]) if country in location_to_urls_map: urls.extend([ url for url in location_to_urls_map[country] if url not in urls ]) return urls def main(): parser = argparse.ArgumentParser(description="Get proxy URL") parser.add_argument("--config-file-path", required=True, type=str) parser.add_argument("--location", required=True, type=str) parser.add_argument("--version", required=True, type=str) args = parser.parse_args() with open(args.config_file_path, "r") as config_file: data = json.loads(config_file.read()) agent_containers_config = data["agent-docker-containers"] version = args.version if version not in agent_containers_config: version = "latest" if version not in agent_containers_config: raise ValueError("Version latest not found in the config file.") container_config = agent_containers_config[version] regional_urls = container_config["proxy-urls"] location = args.location urls = urls_for_zone(location, regional_urls) if not urls: raise ValueError("No valid URLs found for zone: {}".format(location)) for url in urls: try: status_code = requests.head(url).status_code except requests.ConnectionError: pass expected_codes = frozenset([307]) # 307 - Temporary Redirect, Proxy server sends this if VM has access rights. if status_code in expected_codes: logging.debug("Status code from the url %s", status_code) print(url) exit(0) logging.debug( "Incorrect status_code from the server: %s. Expected: %s", status_code, expected_codes ) raise ValueError("No working URL found") if __name__ == '__main__': main()
488
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/components/README.md
# Components for Kubeflow Pipelines Kubeflow Pipelines system orchestrates pipeline graphs of containerized command-line programs. Components are the building blocks of pipelines. Component definition files describe component interface (inputs and outputs) and implementation (how to call the containerized program). Users can load components with KFP SDK, instantiate the components and compose them in a pipeline graph.
489
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/components/build_image.sh
#!/bin/bash -e # Copyright 2018 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. while getopts ":hp:t:i:l:" opt; do case "${opt}" in h) echo "-p: project name" echo "-t: tag name" echo "-i: image name. If provided, project name and tag name are not necessary" echo "-l: local image name." exit ;; p) PROJECT_ID=${OPTARG} ;; t) TAG_NAME=${OPTARG} ;; i) IMAGE_NAME=${OPTARG} ;; l) LOCAL_IMAGE_NAME=${OPTARG} ;; \? ) echo "Usage: cmd [-p] project [-t] tag [-i] image [-l] local image" exit ;; esac done if [ -z "${PROJECT_ID}" ]; then PROJECT_ID=$(gcloud config config-helper --format "value(configuration.properties.core.project)") fi if [ -z "${TAG_NAME}" ]; then TAG_NAME=$(date +v%Y%m%d)-$(git describe --tags --always --dirty)-$(git diff | shasum -a256 | cut -c -6) fi if [ -z "${IMAGE_NAME}" ]; then docker pull gcr.io/${PROJECT_ID}/${LOCAL_IMAGE_NAME}:latest || true fi docker build -t ${LOCAL_IMAGE_NAME} . --cache-from gcr.io/${PROJECT_ID}/${LOCAL_IMAGE_NAME}:latest if [ -z "${IMAGE_NAME}" ]; then docker tag ${LOCAL_IMAGE_NAME} gcr.io/${PROJECT_ID}/${LOCAL_IMAGE_NAME}:${TAG_NAME} docker tag ${LOCAL_IMAGE_NAME} gcr.io/${PROJECT_ID}/${LOCAL_IMAGE_NAME}:latest docker push gcr.io/${PROJECT_ID}/${LOCAL_IMAGE_NAME}:${TAG_NAME} docker push gcr.io/${PROJECT_ID}/${LOCAL_IMAGE_NAME}:latest else docker tag ${LOCAL_IMAGE_NAME} "${IMAGE_NAME}" docker push "${IMAGE_NAME}" fi
490
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/components/test_load_all_components.sh
#!/bin/bash -e # # Copyright 2019 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This script automated the process to release the component images. # To run it, find a good release candidate commit SHA from ml-pipeline-staging project, # and provide a full github COMMIT SHA to the script. E.g. # ./release.sh 2118baf752d3d30a8e43141165e13573b20d85b8 # The script copies the images from staging to prod, and update the local code. # You can then send a PR using your local branch. cd "$(dirname "$0")" PYTHONPATH="$PYTHONPATH:../sdk/python" echo "Testing loading all components" find . -name component.yaml | python3 -c ' import sys import kfp # These components use v1 graph syntax which is not supported in v2 yet. SKIP_COMPONENT_FILES = [ "./contrib/XGBoost/Cross_validation_for_regression/from_CSV/component.yaml", "./contrib/XGBoost/Train_regression_and_calculate_metrics/from_CSV/component.yaml", "./contrib/XGBoost/Train_and_cross-validate_regression/from_CSV/component.yaml", # TODO: This component uses invalid placeholders. Updated when migrating GCPC to v2. "./google-cloud/google_cloud_pipeline_components/aiplatform/batch_predict_job/component.yaml", "./google-cloud/google_cloud_pipeline_components/v1/batch_predict_job/component.yaml" ] for component_file in sys.stdin: component_file = component_file.rstrip("\n") print(component_file) if component_file in SKIP_COMPONENT_FILES: continue kfp.components.load_component_from_file(component_file) '
491
0
kubeflow_public_repos/pipelines
kubeflow_public_repos/pipelines/components/OWNERS
approvers: - IronPan - neuromage - SinaChavoshi reviewers: - IronPan - neuromage - SinaChavoshi - animeshsingh
492
0
kubeflow_public_repos/pipelines/components
kubeflow_public_repos/pipelines/components/kserve/README.md
# KServe Component Organization: KServe Organization Description: KServe is a highly scalable and standards based Model Inference Platform on Kubernetes for Trusted AI Version information: KServe 0.12.0. Works for Kubeflow 1.9 **Note:** To use the KServe 0.7.0 version of this component which runs on Kubeflow 1.5, then change the load_component_from_url in the usage section with the following YAML instead: ``` https://raw.githubusercontent.com/kubeflow/pipelines/1.8.1/components/kserve/component.yaml ``` Test status: Currently manual tests Owners information: - Tommy Li (Tomcli) - IBM, [email protected] - Yi-Hong Wang (yhwang) - IBM, [email protected] ## Usage Load the component with: ```python import kfp.dsl as dsl import kfp from kfp import components kserve_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kserve/component.yaml') ``` ### Arguments | Argument | Default | Description | |----------|---------|-------------| | action | `create` | Action to execute on KServe. Available options are `create`, `update`, `apply`, and `delete`. Note: `apply` is equivalent to `update` if the resource exists and `create` if not. | | model_name | | Name to give to the deployed model/InferenceService | | model_uri | | Path of the S3 or GCS compatible directory containing the model. | | canary_traffic_percent | `100` | The traffic split percentage between the candidate model and the last ready model | | namespace | | Kubernetes namespace where the KServe service is deployed. If no namespace is provided, `anonymous` will be used unless a namespace is provided in the `inferenceservice_yaml` argument. | | framework | | Machine learning framework for model serving. Currently the supported frameworks are `tensorflow`, `pytorch`, `sklearn`, `xgboost`, `onnx`, `triton`, `pmml`, and `lightgbm`. | | runtime_version | `latest` | Runtime Version of Machine Learning Framework | | resource_requests | `{"cpu": "0.5", "memory": "512Mi"}` | CPU and Memory requests for Model Serving | | resource_limits | `{"cpu": "1", "memory": "1Gi"}` | CPU and Memory limits for Model Serving | | custom_model_spec | `{}` | Custom model runtime container spec in JSON. Sample spec: `{"image": "codait/max-object-detector", "port":5000, "name": "test-container"}` | | inferenceservice_yaml | `{}` | Raw InferenceService serialized YAML for deployment. Use this if you need additional configurations for your InferenceService. | | autoscaling_target | `0` | Autoscaling Target Number. If not 0, sets the following annotation on the InferenceService: `autoscaling.knative.dev/target` | | service_account | | ServiceAccount to use to run the InferenceService pod. | | enable_istio_sidecar | `True` | Whether to enable istio sidecar injection. | | watch_timeouot | `300` | Timeout in seconds for watching until the InferenceService becomes ready. | | min_replicas | `-1` | Minimum number of InferenceService replicas. Default of -1 just delegates to pod default of 1. | | max_replicas | `-1` | Maximum number of InferenceService replicas. | ### Basic InferenceService Creation The following will use the KServe component to deploy a TensorFlow model. ```python @dsl.pipeline( name='KServe Pipeline', description='A pipeline for KServe.' ) def kserve_pipeline(): kserve_op( action='apply', model_name='tf-sample', model_uri='gs://kfserving-examples/models/tensorflow/flowers', framework='tensorflow', ) kfp.Client().create_run_from_pipeline_func(kserve_pipeline, arguments={}) ``` Sample op for deploying a PyTorch model: ```python kserve_op( action='apply', model_name='pytorch-test', model_uri='gs://kfserving-examples/models/torchserve/image_classifier', framework='pytorch' ) ``` ### Canary Rollout Ensure you have an initial model deployed with 100 percent traffic with something like: ```python kserve_op( action = 'apply', model_name='tf-sample', model_uri='gs://kfserving-examples/models/tensorflow/flowers', framework='tensorflow', ) ``` Deploy the candidate model which will only get a portion of traffic: ```python kserve_op( action='apply', model_name='tf-sample', model_uri='gs://kfserving-examples/models/tensorflow/flowers-2', framework='tensorflow', canary_traffic_percent='10' ) ``` To promote the candidate model, you can either set `canary_traffic_percent` to `100` or simply remove it, then re-run the pipeline: ```python kserve_op( action='apply', model_name='tf-sample', model_uri='gs://kfserving-examples/models/tensorflow/flowers-2', framework='tensorflow' ) ``` If you instead want to rollback the candidate model, then set `canary_traffic_percent` to `0`, then re-run the pipeline: ```python kserve_op( action='apply', model_name='tf-sample', model_uri='gs://kfserving-examples/models/tensorflow/flowers-2', framework='tensorflow', canary_traffic_percent='0' ) ``` ### Deletion To delete a model, simply set the `action` to `'delete'` and pass in the InferenceService name: ```python kserve_op( action='delete', model_name='tf-sample' ) ``` ### Custom Runtime To pass in a custom model serving runtime, you can use the `custom_model_spec` argument. Currently, the expected format for `custom_model_spec` coincides with: ```json { "image": "some_image", "port": "port_number", "name": "custom-container", "env" : [{ "name": "some_name", "value": "some_value"}], "resources": { "requests": {}, "limits": {}} } ``` Sample deployment: ```python container_spec = '{ "image": "codait/max-object-detector", "port":5000, "name": "custom-container"}' kserve_op( action='apply', model_name='custom-simple', custom_model_spec=container_spec ) ``` ### Deploy using InferenceService YAML If you need more fine-grained configuration, there is the option to deploy using an InferenceService YAML file: ```python isvc_yaml = ''' apiVersion: "serving.kserve.io/v1beta1" kind: "InferenceService" metadata: name: "sklearn-iris" namespace: "anonymous" spec: predictor: sklearn: storageUri: "gs://kfserving-examples/models/sklearn/iris" ''' kserve_op( action='apply', inferenceservice_yaml=isvc_yaml ) ```
493
0
kubeflow_public_repos/pipelines/components
kubeflow_public_repos/pipelines/components/kserve/requirements.txt
kserve==0.12.0 protobuf~=3.19.0
494
0
kubeflow_public_repos/pipelines/components
kubeflow_public_repos/pipelines/components/kserve/component.yaml
name: Serve a model with KServe description: Serve Models using KServe inputs: - {name: Action, type: String, default: 'create', description: 'Action to execute on KServe'} - {name: Model Name, type: String, default: '', description: 'Name to give to the deployed model'} - {name: Model URI, type: String, default: '', description: 'Path of the S3 or GCS compatible directory containing the model.'} - {name: Canary Traffic Percent, type: String, default: '100', description: 'The traffic split percentage between the candidate model and the last ready model'} - {name: Namespace, type: String, default: '', description: 'Kubernetes namespace where the KServe service is deployed.'} - {name: Framework, type: String, default: '', description: 'Machine Learning Framework for Model Serving.'} - {name: Runtime Version, type: String, default: 'latest', description: 'Runtime Version of Machine Learning Framework'} - {name: Resource Requests, type: String, default: '{"cpu": "0.5", "memory": "512Mi"}', description: 'CPU and Memory requests for Model Serving'} - {name: Resource Limits, type: String, default: '{"cpu": "1", "memory": "1Gi"}', description: 'CPU and Memory limits for Model Serving'} - {name: Custom Model Spec, type: String, default: '{}', description: 'Custom model runtime container spec in JSON'} - {name: Autoscaling Target, type: String, default: '0', description: 'Autoscaling Target Number'} - {name: Service Account, type: String, default: '', description: 'ServiceAccount to use to run the InferenceService pod'} - {name: Enable Istio Sidecar, type: Bool, default: 'True', description: 'Whether to enable istio sidecar injection'} - {name: InferenceService YAML, type: String, default: '{}', description: 'Raw InferenceService serialized YAML for deployment'} - {name: Watch Timeout, type: String, default: '300', description: "Timeout seconds for watching until InferenceService becomes ready."} - {name: Min Replicas, type: String, default: '-1', description: 'Minimum number of InferenceService replicas'} - {name: Max Replicas, type: String, default: '-1', description: 'Maximum number of InferenceService replicas'} - {name: Request Timeout, type: String, default: '60', description: "Specifies the number of seconds to wait before timing out a request to the component."} - {name: Enable ISVC Status, type: Bool, default: 'True', description: "Specifies whether to store the inference service status as the output parameter"} outputs: - {name: InferenceService Status, type: String, description: 'Status JSON output of InferenceService'} implementation: container: image: quay.io/aipipeline/kserve-component:v0.12.0 command: ['python'] args: [ -u, kservedeployer.py, --action, {inputValue: Action}, --model-name, {inputValue: Model Name}, --model-uri, {inputValue: Model URI}, --canary-traffic-percent, {inputValue: Canary Traffic Percent}, --namespace, {inputValue: Namespace}, --framework, {inputValue: Framework}, --runtime-version, {inputValue: Runtime Version}, --resource-requests, {inputValue: Resource Requests}, --resource-limits, {inputValue: Resource Limits}, --custom-model-spec, {inputValue: Custom Model Spec}, --autoscaling-target, {inputValue: Autoscaling Target}, --service-account, {inputValue: Service Account}, --enable-istio-sidecar, {inputValue: Enable Istio Sidecar}, --output-path, {outputPath: InferenceService Status}, --inferenceservice-yaml, {inputValue: InferenceService YAML}, --watch-timeout, {inputValue: Watch Timeout}, --min-replicas, {inputValue: Min Replicas}, --max-replicas, {inputValue: Max Replicas}, --request-timeout, {inputValue: Request Timeout}, --enable-isvc-status, {inputValue: Enable ISVC Status} ]
495
0
kubeflow_public_repos/pipelines/components
kubeflow_public_repos/pipelines/components/kserve/OWNERS
approvers: - Tomcli - yhwang reviewers: - Tomcli - yhwang
496
0
kubeflow_public_repos/pipelines/components
kubeflow_public_repos/pipelines/components/kserve/Dockerfile
FROM python:3.9-slim-bullseye RUN apt-get update && apt-get install -y gcc python3-dev COPY requirements.txt . RUN python3 -m pip install -r \ requirements.txt --quiet --no-cache-dir \ && rm -f requirements.txt ENV APP_HOME /app COPY src $APP_HOME WORKDIR $APP_HOME ENTRYPOINT ["python"] CMD ["kservedeployer.py"]
497
0
kubeflow_public_repos/pipelines/components/kserve
kubeflow_public_repos/pipelines/components/kserve/src/kservedeployer.py
# Copyright 2019 kubeflow.org. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse from distutils.util import strtobool import json import os import sys import time import yaml from kubernetes import client from kubernetes.client.models import V1ResourceRequirements from kserve import constants from kserve import KServeClient from kserve import V1beta1InferenceService from kserve import V1beta1InferenceServiceSpec from kserve import V1beta1LightGBMSpec from kserve import V1beta1ONNXRuntimeSpec from kserve import V1beta1PMMLSpec from kserve import V1beta1PredictorSpec from kserve import V1beta1SKLearnSpec from kserve import V1beta1TFServingSpec from kserve import V1beta1TorchServeSpec from kserve import V1beta1TritonSpec from kserve import V1beta1XGBoostSpec from kserve.api.watch import isvc_watch AVAILABLE_FRAMEWORKS = { 'tensorflow': V1beta1TFServingSpec, 'pytorch': V1beta1TorchServeSpec, 'sklearn': V1beta1SKLearnSpec, 'xgboost': V1beta1XGBoostSpec, 'onnx': V1beta1ONNXRuntimeSpec, 'triton': V1beta1TritonSpec, 'pmml': V1beta1PMMLSpec, 'lightgbm': V1beta1LightGBMSpec } def create_predictor_spec(framework, runtime_version, resource_requests, resource_limits, storage_uri, canary_traffic_percent, service_account, min_replicas, max_replicas, containers, request_timeout): """ Create and return V1beta1PredictorSpec to be used in a V1beta1InferenceServiceSpec object. """ predictor_spec = V1beta1PredictorSpec( service_account_name=service_account, min_replicas=(min_replicas if min_replicas >= 0 else None ), max_replicas=(max_replicas if max_replicas > 0 and max_replicas >= min_replicas else None ), containers=(containers or None), canary_traffic_percent=canary_traffic_percent, timeout=request_timeout ) # If the containers field was set, then this is custom model serving. if containers: return predictor_spec if framework not in AVAILABLE_FRAMEWORKS: raise ValueError("Error: No matching framework: " + framework) setattr( predictor_spec, framework, AVAILABLE_FRAMEWORKS[framework]( storage_uri=storage_uri, resources=V1ResourceRequirements( requests=resource_requests, limits=resource_limits ), runtime_version=runtime_version ) ) return predictor_spec def create_custom_container_spec(custom_model_spec): """ Given a JSON container spec, return a V1Container object representing the container. This is used for passing in custom server images. The expected format for the input is: { "image": "test/containerimage", "port":5000, "name": "custom-container" } """ env = ( [ client.V1EnvVar(name=i["name"], value=i["value"]) for i in custom_model_spec["env"] ] if custom_model_spec.get("env", "") else None ) ports = ( [client.V1ContainerPort(container_port=int(custom_model_spec.get("port", "")), protocol="TCP")] if custom_model_spec.get("port", "") else None ) resources = ( client.V1ResourceRequirements( requests=(custom_model_spec["resources"]["requests"] if custom_model_spec.get('resources', {}).get('requests') else None ), limits=(custom_model_spec["resources"]["limits"] if custom_model_spec.get('resources', {}).get('limits') else None ), ) if custom_model_spec.get("resources", {}) else None ) return client.V1Container( name=custom_model_spec.get("name", "custom-container"), image=custom_model_spec["image"], env=env, ports=ports, command=custom_model_spec.get("command", None), args=custom_model_spec.get("args", None), image_pull_policy=custom_model_spec.get("image_pull_policy", None), working_dir=custom_model_spec.get("working_dir", None), resources=resources ) def create_inference_service(metadata, predictor_spec): """ Build and return V1beta1InferenceService object. """ return V1beta1InferenceService( api_version=constants.KSERVE_V1BETA1, kind=constants.KSERVE_KIND, metadata=metadata, spec=V1beta1InferenceServiceSpec( predictor=predictor_spec ), ) def submit_api_request(kserve_client, action, name, isvc, namespace=None, watch=False, timeout_seconds=300): """ Creates or updates a Kubernetes custom object. This code is borrowed from the KServeClient.create/patch methods as using those directly doesn't allow for sending in dicts as the InferenceService object which is needed for supporting passing in raw InferenceService serialized YAML. """ custom_obj_api = kserve_client.api_instance args = [constants.KSERVE_GROUP, constants.KSERVE_V1BETA1_VERSION, namespace, constants.KSERVE_PLURAL] if action == 'update': outputs = custom_obj_api.patch_namespaced_custom_object(*args, name, isvc) else: outputs = custom_obj_api.create_namespaced_custom_object(*args, isvc) if watch: # Sleep 3 to avoid status still be True within a very short time. time.sleep(3) isvc_watch( name=outputs['metadata']['name'], namespace=namespace, timeout_seconds=timeout_seconds) else: return outputs def perform_action(action, model_name, model_uri, canary_traffic_percent, namespace, framework, runtime_version, resource_requests, resource_limits, custom_model_spec, service_account, inferenceservice_yaml, request_timeout, autoscaling_target=0, enable_istio_sidecar=True, watch_timeout=300, min_replicas=0, max_replicas=0): """ Perform the specified action. If the action is not 'delete' and `inferenceService_yaml` was provided, the dict representation of the YAML will be sent directly to the Kubernetes API. Otherwise, a V1beta1InferenceService object will be built using the provided input and then sent for creation/update. :return InferenceService JSON output """ kserve_client = KServeClient() if inferenceservice_yaml: # Overwrite name and namespace if exists if namespace: inferenceservice_yaml['metadata']['namespace'] = namespace if model_name: inferenceservice_yaml['metadata']['name'] = model_name else: model_name = inferenceservice_yaml['metadata']['name'] isvc = inferenceservice_yaml elif action != 'delete': # Create annotations annotations = {} if int(autoscaling_target) != 0: annotations["autoscaling.knative.dev/target"] = str(autoscaling_target) if not enable_istio_sidecar: annotations["sidecar.istio.io/inject"] = 'false' if not annotations: annotations = None metadata = client.V1ObjectMeta( name=model_name, namespace=namespace, annotations=annotations ) # If a custom model container spec was provided, build the V1Container # object using it. containers = [] if custom_model_spec: containers = [create_custom_container_spec(custom_model_spec)] # Build the V1beta1PredictorSpec. predictor_spec = create_predictor_spec( framework, runtime_version, resource_requests, resource_limits, model_uri, canary_traffic_percent, service_account, min_replicas, max_replicas, containers, request_timeout ) isvc = create_inference_service(metadata, predictor_spec) if action == "create": submit_api_request(kserve_client, 'create', model_name, isvc, namespace, watch=True, timeout_seconds=watch_timeout) elif action == "update": submit_api_request(kserve_client, 'update', model_name, isvc, namespace, watch=True, timeout_seconds=watch_timeout) elif action == "apply": try: submit_api_request(kserve_client, 'create', model_name, isvc, namespace, watch=True, timeout_seconds=watch_timeout) except Exception: submit_api_request(kserve_client, 'update', model_name, isvc, namespace, watch=True, timeout_seconds=watch_timeout) elif action == "delete": kserve_client.delete(model_name, namespace=namespace) else: raise ("Error: No matching action: " + action) model_status = kserve_client.get(model_name, namespace=namespace) return model_status def main(): """ This parses arguments passed in from the CLI and performs the corresponding action. """ parser = argparse.ArgumentParser() parser.add_argument( "--action", type=str, help="Action to execute on KServe", default="create" ) parser.add_argument( "--model-name", type=str, help="Name to give to the deployed model" ) parser.add_argument( "--model-uri", type=str, help="Path of the S3, GCS or PVC directory containing the model", ) parser.add_argument( "--canary-traffic-percent", type=str, help="The traffic split percentage between the candidate model and the last ready model", default="100", ) parser.add_argument( "--namespace", type=str, help="Kubernetes namespace where the KServe service is deployed", default="", ) parser.add_argument( "--framework", type=str, help="Model serving framework to use. Available frameworks: " + str(list(AVAILABLE_FRAMEWORKS.keys())), default="" ) parser.add_argument( "--runtime-version", type=str, help="Runtime Version of Machine Learning Framework", default="latest" ) parser.add_argument( "--resource-requests", type=json.loads, help="CPU and Memory requests for Model Serving", default='{"cpu": "0.5", "memory": "512Mi"}', ) parser.add_argument( "--resource-limits", type=json.loads, help="CPU and Memory limits for Model Serving", default='{"cpu": "1", "memory": "1Gi"}', ) parser.add_argument( "--custom-model-spec", type=json.loads, help="The container spec for a custom model runtime", default="{}", ) parser.add_argument( "--autoscaling-target", type=str, help="Autoscaling target number", default="0" ) parser.add_argument( "--service-account", type=str, help="Service account containing s3 credentials", default="", ) parser.add_argument( "--enable-istio-sidecar", type=strtobool, help="Whether to inject istio sidecar", default="True" ) parser.add_argument( "--inferenceservice-yaml", type=yaml.safe_load, help="Raw InferenceService serialized YAML for deployment", default="{}" ) parser.add_argument("--output-path", type=str, help="Path to store URI output") parser.add_argument("--watch-timeout", type=str, help="Timeout seconds for watching until InferenceService becomes ready.", default="300") parser.add_argument( "--min-replicas", type=str, help="Minimum number of replicas", default="-1" ) parser.add_argument( "--max-replicas", type=str, help="Maximum number of replicas", default="-1" ) parser.add_argument("--request-timeout", type=str, help="Specifies the number of seconds to wait before timing out a request to the component.", default="60") parser.add_argument("--enable-isvc-status", type=strtobool, help="Specifies whether to store the inference service status as the output parameter", default="True") args = parser.parse_args() action = args.action.lower() model_name = args.model_name model_uri = args.model_uri canary_traffic_percent = int(args.canary_traffic_percent) namespace = args.namespace framework = args.framework.lower() runtime_version = args.runtime_version.lower() resource_requests = args.resource_requests resource_limits = args.resource_limits output_path = args.output_path custom_model_spec = args.custom_model_spec autoscaling_target = int(args.autoscaling_target) service_account = args.service_account enable_istio_sidecar = args.enable_istio_sidecar inferenceservice_yaml = args.inferenceservice_yaml watch_timeout = int(args.watch_timeout) min_replicas = int(args.min_replicas) max_replicas = int(args.max_replicas) request_timeout = int(args.request_timeout) enable_isvc_status = args.enable_isvc_status # Default the namespace. if not namespace: namespace = 'anonymous' # If no namespace was provided, but one is listed in the YAML, use that. if inferenceservice_yaml and inferenceservice_yaml.get('metadata', {}).get('namespace'): namespace = inferenceservice_yaml['metadata']['namespace'] # Only require model name when an Isvc YAML was not provided. if not inferenceservice_yaml and not model_name: parser.error('{} argument is required when performing "{}" action'.format( 'model_name', action )) # If the action isn't a delete, require 'model-uri' and 'framework' only if an Isvc YAML # or custom model container spec are not provided. if action != 'delete': if not inferenceservice_yaml and not custom_model_spec and not (model_uri and framework): parser.error('Arguments for {} and {} are required when performing "{}" action'.format( 'model_uri', 'framework', action )) model_status = perform_action( action=action, model_name=model_name, model_uri=model_uri, canary_traffic_percent=canary_traffic_percent, namespace=namespace, framework=framework, runtime_version=runtime_version, resource_requests=resource_requests, resource_limits=resource_limits, custom_model_spec=custom_model_spec, autoscaling_target=autoscaling_target, service_account=service_account, enable_istio_sidecar=enable_istio_sidecar, inferenceservice_yaml=inferenceservice_yaml, request_timeout=request_timeout, watch_timeout=watch_timeout, min_replicas=min_replicas, max_replicas=max_replicas ) print(model_status) if action != 'delete': # Check whether the model is ready for condition in model_status["status"]["conditions"]: if condition['type'] == 'Ready': if condition['status'] == 'True': print('Model is ready\n') break print('Model is timed out, please check the InferenceService events for more details.') sys.exit(1) try: print(model_status["status"]["url"] + " is the Knative domain.") print("Sample test commands: \n") # model_status['status']['url'] is like http://flowers-sample.kubeflow.example.com/v1/models/flowers-sample print("curl -v -X GET %s" % model_status["status"]["url"]) print("\nIf the above URL is not accessible, it's recommended to setup Knative with a configured DNS.\n" "https://knative.dev/docs/install/installing-istio/#configuring-dns") except Exception: print("Model is not ready, check the logs for the Knative URL status.") sys.exit(1) if output_path: if not enable_isvc_status: model_status = {} else: try: # Remove some less needed fields to reduce output size. del model_status['metadata']['managedFields'] del model_status['status']['conditions'] if sys.getsizeof(model_status) > 3000: del model_status['components']['predictor']['address']['url'] del model_status['components']['predictor']['latestCreatedRevision'] del model_status['components']['predictor']['latestReadyRevision'] del model_status['components']['predictor']['latestRolledoutRevision'] del model_status['components']['predictor']['url'] del model_status['spec'] except KeyError: pass if not os.path.exists(os.path.dirname(output_path)): os.makedirs(os.path.dirname(output_path)) with open(output_path, "w") as report: report.write(json.dumps(model_status, indent=4)) if __name__ == "__main__": main()
498
0
kubeflow_public_repos/pipelines/components/kubeflow
kubeflow_public_repos/pipelines/components/kubeflow/deployer/component.yaml
name: Kubeflow - Serve TF model description: Serve TensorFlow model using Kubeflow TF-serving inputs: - {name: Model dir, type: GCSPath, description: 'Path of GCS directory containing exported Tensorflow model.'} # type: {GCSPath: {path_type: Directory}} - {name: Cluster name, type: String, default: '', description: 'Kubernetes cluster name where the TS-serving service should be deployed. Uses the current cluster by default.'} - {name: Namespace, type: String, default: 'kubeflow', description: 'Kubernetes namespace where the TS-serving service should be deployed.'} - {name: Server name, type: String, default: 'model-server', description: 'TF-serving server name to use when deploying.'} - {name: PVC name, type: String, default: '' , description: 'Optional PersistentVolumeClaim to use.'} - {name: Service type, type: String, default: 'ClusterIP' , description: 'Optional Service type to use, two options: "ClusterIP" (default if not set) and "NodePort".'} #outputs: # - {name: Endppoint URI, type: Serving URI, description: 'URI of the deployed prediction service..'} implementation: container: image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:1.8.0-alpha.0 command: [/bin/deploy.sh] args: [ --model-export-path, {inputValue: Model dir}, --cluster-name, {inputValue: Cluster name}, --namespace, {inputValue: Namespace}, --server-name, {inputValue: Server name}, --pvc-name, {inputValue: PVC name}, --service-type, {inputValue: Service type}, ]
499